awk BEGIN insert new column to .txt without overwriting existing file data, include new column header and convert to csv

I have a file vols.txt like this:

LabelID        Mean        StdD         Max         Min       Count     Vol(mm^3)        Extent(Vox)
    0       0.00000     0.00000     0.00000     0.00000    14121856   2973312.714    512   512    54
    1       1.00000     0.00000     1.00000     1.00000        2802       589.952     51    32    31
    2       2.00000     0.00000     2.00000     2.00000         127        26.739     11    14    18
    3       3.00000     0.00000     3.00000     3.00000        2379       500.891     34    21    27
    4       4.00000     0.00000     4.00000     4.00000         462        97.273     29    20    21
    5       5.00000     0.00000     5.00000     5.00000        2913       613.323     45    32     9
    8       8.00000     0.00000     8.00000     8.00000        5235      1102.213     49    31    31
    9       9.00000     0.00000     9.00000     9.00000        4483       943.882     47    43    22
   10      10.00000     0.00000    10.00000    10.00000        2492       524.683     38    27    23
   11      11.00000     0.00000    11.00000    11.00000       11424      2405.288     44    47    24
   12      12.00000     0.00000    12.00000    12.00000        1603       337.507     56    18     9

I want to add a new column to the beginning of the file to make vols_id.txt :

subjectID   LabelID        Mean        StdD         Max         Min       Count     Vol(mm^3)        Extent(Vox)
25000           0       0.00000     0.00000     0.00000     0.00000    14121856   2973312.714    512   512    54
25000           1       1.00000     0.00000     1.00000     1.00000        2802       589.952     51    32    31
25000           2       2.00000     0.00000     2.00000     2.00000         127        26.739     11    14    18
25000           3       3.00000     0.00000     3.00000     3.00000        2379       500.891     34    21    27
25000           4       4.00000     0.00000     4.00000     4.00000         462        97.273     29    20    21
25000           5       5.00000     0.00000     5.00000     5.00000        2913       613.323     45    32     9
25000           8       8.00000     0.00000     8.00000     8.00000        5235      1102.213     49    31    31
25000           9       9.00000     0.00000     9.00000     9.00000        4483       943.882     47    43    22
25000          10      10.00000     0.00000    10.00000    10.00000        2492       524.683     38    27    23
25000          11      11.00000     0.00000    11.00000    11.00000       11424      2405.288     44    47    24
25000          12      12.00000     0.00000    12.00000    12.00000        1603       337.507     56    18     9

Finally, I would like to convert this to a .csv with comma delimiter.

I have tried awk 'BEGIN { ORS = " " } {print 25000; for(i=2;i<=NF;++i) print $i}{print "\n"}' vols.txt >> vols_id.txt

but this overwrites the first column and as a novice I am unsure how to modify to include header and also how to convert to a .csv file. Any help appreciated!


$ awk -v OFS=, '{$1=(NR==1?"subjectID":25000) OFS $1}1' file


subjectID,LabelID,Mean,StdD,Max,Min,Count,Vol(mm^3),Extent(Vox)
25000,0,0.00000,0.00000,0.00000,0.00000,14121856,2973312.714,512,512,54
25000,1,1.00000,0.00000,1.00000,1.00000,2802,589.952,51,32,31
25000,2,2.00000,0.00000,2.00000,2.00000,127,26.739,11,14,18
25000,3,3.00000,0.00000,3.00000,3.00000,2379,500.891,34,21,27
25000,4,4.00000,0.00000,4.00000,4.00000,462,97.273,29,20,21

You can use this awk:

awk -v OFS=, '{$1=$1; print (NR==1 ? "subjectID":25000), $0}' file

subjectID,LabelID,Mean,StdD,Max,Min,Count,Vol(mm^3),Extent(Vox)
25000,0,0.00000,0.00000,0.00000,0.00000,14121856,2973312.714,512,512,54
25000,1,1.00000,0.00000,1.00000,1.00000,2802,589.952,51,32,31
25000,2,2.00000,0.00000,2.00000,2.00000,127,26.739,11,14,18
25000,3,3.00000,0.00000,3.00000,3.00000,2379,500.891,34,21,27
25000,4,4.00000,0.00000,4.00000,4.00000,462,97.273,29,20,21
25000,5,5.00000,0.00000,5.00000,5.00000,2913,613.323,45,32,9
25000,8,8.00000,0.00000,8.00000,8.00000,5235,1102.213,49,31,31
25000,9,9.00000,0.00000,9.00000,9.00000,4483,943.882,47,43,22
25000,10,10.00000,0.00000,10.00000,10.00000,2492,524.683,38,27,23
25000,11,11.00000,0.00000,11.00000,11.00000,11424,2405.288,44,47,24
25000,12,12.00000,0.00000,12.00000,12.00000,1603,337.507,56,18,9

awk -v OFS="," '{$0=$1FS$0} NR==1 {$1="subjectID"} NR!=1 {$1=2500}1' input_file
subjectID,LabelID,Mean,StdD,Max,Min,Count,Vol(mm^3),Extent(Vox)
2500,0,0.00000,0.00000,0.00000,0.00000,14121856,2973312.714,512,512,54
2500,1,1.00000,0.00000,1.00000,1.00000,2802,589.952,51,32,31
2500,2,2.00000,0.00000,2.00000,2.00000,127,26.739,11,14,18
2500,3,3.00000,0.00000,3.00000,3.00000,2379,500.891,34,21,27
2500,4,4.00000,0.00000,4.00000,4.00000,462,97.273,29,20,21
2500,5,5.00000,0.00000,5.00000,5.00000,2913,613.323,45,32,9
2500,8,8.00000,0.00000,8.00000,8.00000,5235,1102.213,49,31,31
2500,9,9.00000,0.00000,9.00000,9.00000,4483,943.882,47,43,22
2500,10,10.00000,0.00000,10.00000,10.00000,2492,524.683,38,27,23
2500,11,11.00000,0.00000,11.00000,11.00000,11424,2405.288,44,47,24
2500,12,12.00000,0.00000,12.00000,12.00000,1603,337.507,56,18,9