| # feat. Bob Techentin
#-----------------------------
if {[catch {file stat /usr/bin/vi entry} err]} {error "Couldn't stat /usr/bin/vi : $err"}
#-----------------------------
if {[catch {file stat /usr/bin entry} err]} {error "Couldn't stat /usr/bin : $err"}
#-----------------------------
# can't [file stat] a filehandle
#-----------------------------
file stat /usr/bin/vi inode
set ctime $inode(ctime)
set size $inode(size)
# or you can use specific [file] subcommands
set size [file size /usr/bin/vi]
#-----------------------------
#  You have to read the file to test for binary data
if {![catch {open $filename r} F]} {
        set data [read $F]
        close $F
        if {![string is ascii $data]} {
                error "$filename doesn't have text in it."
        }
} else {
        error "Opening $filename: $F"
}
#-----------------------------
set files [glob /usr/bin/*]
foreach f $files {
    puts "Inside /usr/bin is something called [file tail $f]"
}
#----------------------------- | 
| # feat. Bob Techentin #----------------------------- set READTIME [file atime $filename] set WRITETIME [file mtime $filename] file atime $filename $NEWREADTIME file mtime $filename $NEWWRITETIME #----------------------------- set atime [file atime $filename] set mtime [file mtime $filename] set atime [clock scan "- 1 week" -base $atime] set mtime [clock scan "- 1 week" -base $mtime] if {[catch { file atime $filename $atime file mtime $filename $mtime } err]} { error "couldn't backdate $filename by a week w/ file (a|m)time: $err" } #----------------------------- file atime $file [clock seconds] #----------------------------- #!/bin/sh # uvi - vi a file without changing its access times # the next line restarts using tclsh \ exec tclsh "$0" "$@" if {[llength $argv] != 1} { error "usage: uvi filename" } set file [lindex $argv 0] file stat $file statinfo if {[info exists env(EDITOR)]} { exec $env(EDITOR) $file } else { exec vi $file } file atime $file $statinfo(atime) file mtime $file $statinfo(mtime) #----------------------------- | 
| # feat. Bob Techentin
#-----------------------------
file delete $filename
eval file delete $filenames
#-----------------------------
if {[catch {file delete $file}]} {
        error "Can't unlink $file"
}
#-----------------------------
#  Tcl's [file delete] command doesn't return a count,
#  so we have to count files before and after deletion.
set existing 0
foreach f $filelist {
        if {[file exists $f]} {incr existing}
}
catch {eval file delete $filelist}
set remaining 0
foreach f $filelist {
        if {[file exists $f]} {incr remaining}
}
if {$remaining > 0} {
        set count [expr {$existing-$remaining}]
        puts stderr "could only delete $count of $existing files"
}
#-----------------------------# feat. Bob Techentin
#-----------------------------
file delete $filename
eval file delete $filenames
#-----------------------------
if {[catch {file delete $file}]} {
        error "Can't unlink $file"
}
#-----------------------------
if {[catch {eval file delete $filelist}]} {
        set remaining 0
        foreach f $filelist {
                if {[file exists $f]} {incr remaining}
        }
        set count [expr {[llength $filelist] - $remaining}]
        puts stderr "could only delete $count of $existing files"
}
#  Tcl's [file delete] command doesn't return a count,
#  so we have to count files before and after deletion.
set existing 0
foreach f $filelist {
        if {[file exists $f]} {incr existing}
}
catch {eval file delete $filelist}
set remaining 0
foreach f $filelist {
        if {[file exists $f]} {incr remaining}
}
if {$remaining > 0} {
        set count [expr {$existing-$remaining}]
        puts stderr "could only delete $count of $existing files"
}
#----------------------------- | 
| # feat. Bob Techentin
#-----------------------------
file copy $oldfile $newfile
#-----------------------------
if {[catch {open $oldfile "r"} IN]}  {error "can't open $oldfile: $IN"}
if {[catch {open $newfile "w"} OUT]} {error "can't open $newfile: $OUT"}
set blksize [fconfigure $IN -buffersize]
fcopy $IN $OUT -size $blksize
close $IN
close $OUT
#-----------------------------
#  Tcl file operations are portable
file copy $oldfile $newfile
#-----------------------------
file copy datafile.dat datafile.bak
file rename -force datafile.new datafile.dat
#----------------------------- | 
| # feat. Bob Techentin
#-----------------------------
unset seen
foreach filename $argv {
        file stat $filename statinfo
        set dev $statinfo(dev)
        set ino $statinfo(ino)
        if {![info exists seen($dev,$ino)]} {
                #  do something with $filename because
                #  we haven't seen it before
                lappend seen($dev,$ino) $filename
                #  seen($dev,$ino) is a list of filenames for the same file
        }
}
#----------------------------- | 
| # feat. Bob Techentin #----------------------------- foreach file [glob [file join $dirname "*"]] { # do something with $file } #----------------------------- set dir /usr/local/bin puts "Text files in $dir are:" foreach file [glob [file join $dir "*"]] { set fp [open $file "r"] if {[string is ascii [read $fp]]} { puts $file } close $fp } #----------------------------- foreach file [glob [file join $dir "*"]] { if {$file eq ".." || $file eq "."} continue # ... } #----------------------------- proc plainfiles {dir} { set result [list] foreach file [lsort [glob [file join $dir "*"]]] { if {[string index [file tail $file] 0] eq "."} continue if {[file type $file] eq "file"} { lappend result $file } } return $result } #----------------------------- | 
| # List of regular files in current directory - file names only in list
set files [glob -nocomplain -type f -- *.c]
# -------------
# As above, but with full path
set files [glob -directory [pwd] -nocomplain -type f -- *.c]
# -------------
# As previous [which is more compact, so preferred], showing use of 'file' to build file names
set pwd [pwd] ; set files [glob -nocomplain -type f -- *.c]
# Assemble full path names from list entries
foreach f $files {
  puts [file nativename [file join "$pwd" "$f"]]
}
# -------------
# Variants of the *NIX, 'find', command from the 'fileutil' package
package require fileutil
# Set search path to current directory. Could have also have used either of ~,
# for the user's HOME directory, or a relative / absolute path
set path .
# -----
# 1. Similar to, 'glob', but also allows 'regexp'-based globbing
set files [fileutil::findByPattern $path -glob -- *.c]
# -----
# 2. Makes use of a 'filter' procedure
proc is_c {name} { return [string match *.c $name] }
set files [fileutil::find $path is_c]
# -----
# In both cases:
# * Search is recursive
# * Full path names printed
foreach f $files {
  puts $f
}
# -----------------------------
# Two lists generated, first one filtered by file extension, second one by file type
package require fileutil
proc is_c_or_h {name} { return [string match -nocase *.\[ch\] $name] }
set path . ; set files [fileutil::find $path is_c_or_h]
foreach f $files {
  if {[string match "text" [fileutil::fileType $f]]} { lappend textfiles $f }
}
foreach f $textfiles {
  puts $f
}
# -------------
# As above, but both file extension and file type considered in filter procedure, so
# only a single list is generated
package require fileutil
proc is_c_or_h_and_text {name} {
  if {[string match -nocase *.\[ch\] $name] && ![catch {fileutil::fileType $name} filetype]} {
    return [expr [string compare "text" $filetype] == 0]
  }
  return 0
}
set path . ; set files [fileutil::find $path is_c_or_h_and_text]
foreach f $files {
  puts $f
}
# -----------------------------
# Sorted list of all subdirectories in the current directory which commence with the
# digits 0-9
set dirs [lsort [glob -directory [pwd] -nocomplain -type d -- \[0-9\]*]] | 
| # Minimal-code approach to this problem is to generate a list of paths using the
# 'find' or 'findByPattern' commands of the 'fileutil' package, then traverse that
# list processing each file in turn. A variation is to write a filter procedure for
# 'find' that processes each selected file whilst still retaining its expected 
# behaviour. Whilst the latter is likely to be better-performing, it isn't generally
# recommended to have a filter procedure possess side-effecting behaviour
package require fileutil
# Conventional filter procedures for use with, fileutil::find
proc is_dir {name} { return [expr [fileutil::test $name {d}] != 0] }
# -----
# Generate list of directories in a directory
set path . ; set files [fileutil::find $path is_dir]
# -------------
# Side-effecting filter procedures
proc accum_filesize {name} {
  global filesize
  if [fileutil::test $name {f}] { set filesize [expr $filesize + [file size $name]] }
  return 0
}
proc biggest_file {name} {
  global biggest
  if {[fileutil::test $name {f}] && [file size $name] > $biggest} {
    set biggest [file size $name]
  }
  return 0
}
proc youngest_file {name} {
  global youngest
  if {[fileutil::test $name {f}] && [file mtime $name] < $youngest} {
    set youngest [file mtime $name]
  }
  return 0
}
# -----
# Obtain total size of all files in a directory and its subdirectories
set path . ; set filesize 0 ; set files [fileutil::find $path accum_filesize]
puts $filesize
# Find biggest size file in a directory and its subdirectories
set path . ; set biggest 0 ; set files [fileutil::find $path biggest_file]
puts $biggest
# Find most recent file in a directory and its subdirectories
set youngest 2147483647 ; set files [fileutil::find $path youngest_file]
puts [clock format $youngest -format %D]
# Alternatively, one could implement a procedure that actually recurses through
# directories performing required processing. One approach would see the intermixing
# of recursing and processing code; another would see a generic recursing procedure
# passed the name of a processing procedure which is then applied to each selected
# file via the 'eval' command. The latter approach has the advanatage of being flexible,
# though performance is hampered due to the use of 'eval'; the former approach is more
# 'one-shot', but most likely, better performing
proc processDirectory {baseDir proc} {
  set pwd [pwd] ; if [catch {cd $baseDir} result] { return }
  foreach dir [glob -nocomplain -type d -- *] {
    processDirectory $dir $proc
  }
  eval "$proc [pwd]" ; cd $pwd
}
# -------------
proc show {dir} { puts $dir }
proc accum_filesize {dir} {
  global filesize
  foreach file [glob -directory $dir -nocomplain -type f -- *] {
    set filesize [expr $filesize + [file size $file]]
  }
}
proc biggest_file {dir} {
  global biggest
  foreach file [glob -directory $dir -nocomplain -type f -- *] {
    if {[file size $file] > $biggest} { set biggest [file size $file]}
  }
}
proc youngest_file {dir} {
  global youngest
  foreach file [glob -directory $dir -nocomplain -type f -- *] {
    if {[file mtime $file] < $youngest} { set youngest [file mtime $file]}
  }
}
# -------------
# Obtain total size of all files in a directory and its subdirectories
set filesize 0 ; processDirectory [pwd] "accum_filesize" ; puts $filesize
# Find biggest size file in a directory and its subdirectories
set biggest 0 ; processDirectory [pwd] "biggest_file" ; puts $biggest
# Find most recent file in a directory and its subdirectories
set youngest 2147483647 ; processDirectory [pwd] "youngest_file"
puts [clock format $youngest -format %D]
# -----------------------------
# Generate list of directories in a list of directories
if {$argc < 1} { puts stderr "usage: $argv0 dir..." ; exit 1 }
foreach dirname $argv {
  processDirectory $dirname "show"
} | 
| # The 'file delete' command can:
# * Delete both files and subdirectories
# * Recursively delete the latter
# Therefore, it is not necessary to construct a tree-traversing [recursive or otherwise]
# procedure in order to remove a directory tree. It may be, however, useful to use such
# an approach should it be necessary to implement special processing [e.g. interactive
# prompting]. The use of a custom procedure that intermixes recursing and processing code
# [as shown in the previous section] is probably the simplest, best performing approach,
# to this latter task.
# 'rmtree1' - straightforward implementation 
if {$argc < 1} { puts stderr "usage: $argv0 dir..." ; exit 1 }
foreach dirname $argv {
  if [catch {file delete -force -- $dirname} result] {
    puts "Error deleting $dirname \[$result\]"
  }
}
# -----------------------------
# 'rmtree2' - recursive, tree-traversing implementation 
# Recurser - recursively traverses directory tree
proc rmtree_ {baseDir} {
  set pwd [pwd] ; if [catch {cd $baseDir} result] { return }
  foreach dir [glob -nocomplain -type d -- *] {
    rmtree_ $dir
  }
  # Let's delete the regular files in, 'baseDir'
  foreach filename [glob -nocomplain -type f -- *] {
    if [catch {file delete -force -- $filename} result] {
      puts "Error deleting $filename \[$result\]"
    }
  }
  # Let's move up, out of, 'baseDir', so as to allow it's deletion
  cd $pwd
  # Let's delete, 'baseDir'
  set dirname [file join $pwd $baseDir]
  if [catch {file delete -force -- $dirname} result] {
    puts "Error deleting $dirname \[$result\]"
  }
}
# -----
# Launcher - performs validation, then starts recursive routine
proc rmtree {baseDir} {
  if {![file exists $baseDir]} {
    puts stderr "Directory does not exist" ; return
  }
  if [string match $baseDir* [pwd]] {
    puts stderr "Cannot remove current directory or its parent" ; return
  }
  # Validation passed, so start recursing through subdirectories 
  return [rmtree_ $baseDir]
}
# -------------
if {$argc < 1} { puts stderr "usage: $argv0 dir..." ; exit 1 }
foreach dirname $argv {
  rmtree $dirname
} | 
| set names [list x y z]
foreach file $names {
  # This deliberately atempts to rename an existing file to it's own, same name,
  # thus forcing an error [unless the -force option is used]
  set newname file
  # Error display mimics Perl example
  ## if [catch {file rename $file $newname} result] {
  ##   puts stderr "Couldn't rename $file to $newname"
  ## }
  # However, approach shown here is preferable as it furnishes more relevant
  # diagnostic message(s)
  if [catch {file rename $file $newname} result] {
    puts stderr $result
  }
}
# -----------------------------
#
# A modified implementation of Larry's Filename Fixer. Rather than passing
# a single expression, a 'from' regexp is passed; each match in the file
# name(s) is changed to the value of 'to'. It otherwise behaves the same
# 
if {$argc < 2} { puts stderr "usage: $argv0 from to \[files...\]" ; exit 1 }
set from [lrange $argv 0 0] ; set to [lrange $argv 1 1]
set argv [lrange $argv 2 [llength $argv]]
if {$argv == {}} {
  while {[gets stdin line] >= 0} {lappend argv $line}
} 
foreach f $argv {
  set was $f ; regsub $from $f $to file
  if {[string compare $was $file] != 0} {
    if [catch {file rename $was $file} result] {
      puts stderr $result
    }
  }
} | 
| set path {/usr/lib/libc.a}
# -----
set basename [file tail $path]
set dirname [file dirname $path]
# No equivalent to Perl's, 'fileparse', so must do:
set base [file tail $path]
set dir [file dirname $path]
set ext [file extension $path]
# -------------
set path {/usr/lib/libc.a}
# -----
set file [file tail $path]
set dir [file dirname $path]
puts "dir is $dir, file is $file"
# -----
set name [file tail $path]
set dir [file dirname $path]
set ext [file extension $path]
puts "dir is $dir, name is $name, extension is $ext"
# -------------
# According to the Tcl documentation, the 'file' command is platform-independant, so
# should correctly work for platforms such as MacOS. Code below assumes this, but is
# otherwise untested
set path {Hard%20Drive:System%20Folder:README.txt}
# -----
set name [file tail $path]
set dir [file dirname $path]
set ext [file extension $path]
puts "dir is $dir, name is $name, extension is $ext" | 
| # @@INCOMPLETE@@ # @@INCOMPLETE@@ | 
| # @@INCOMPLETE@@ # @@INCOMPLETE@@ |