#!/bin/sh
# The next line is executed by /bin/sh, but not tcl \
exec tclsh "$0" ${1+"$@"}
##
## by Aaron Hurt // Flex I.T., LLC
## Copyright 2008; All Rights Reserved
##
## positouch dbf parser for item sales
## parse dbf and generate file for import into costguard
#
namespace eval ::cgs {
## settings ##
variable AuthKey "9afgEuzFArZtRfoR89y0GRaCo24W5lJI2P56puA6"
## end settings ##
## we use alot of threads here
package require Thread
## we use sqlite for our backend
package require sqlite3
## read our config file
proc readConf {conf} {
## check file validity
if {![file exists $conf] || ![file readable $conf]} {
::cgs::log "ERROR: Non-existent or unreadable config file: '$conf'"; return
}
## parse our config file
set config [split [read [set fid [open $conf r]]] \n]; close $fid
foreach line $config {
switch -exact -- [lindex [split $line] 0] {
PORT {set ::cgs::listenPort [lindex [split $line] 1]}
HOST {set ::cgs::myAddr [lindex [split $line] 1]}
RESTRICT {set ::cgs::ipRestrict [lrange [split $line] 1 end]}
IPLIST {set ::cgs::ipList [lrange [split $line] 1 end]}
LOGT {set ::cgs::logType [lrange [split $line] 1 end]}
LOGF {set ::cgs::logFile [lrange [split $line] 1 end]}
SRVPATH {set ::cgs::srvPath [lrange [split $line] 1 end]}
SQLPATH {set ::cgs::sqlPath [lrange [split $line] 1 end]}
CSVPATH {set ::cgs::csvPath [lrange [split $line] 1 end]}
RPTPATH {set ::cgs::rptPath [lrange [split $line] 1 end]}
}
}
}
## handle our logging
proc log {text} {
if {![info exists ::cgs::logType]} {return}
set ltext "[clock format [clock seconds] -format {%a %b %d %H:%M:%S %Z %Y}] :: $text"
if {$::cgs::logType == 1 || $::cgs::logType == 3} {puts $ltext}
if {$::cgs::logType == 2 || $::cgs::logType == 3} {
if {[info exists ::cgs::logFile]} {
## check for existing log...if it's already there make sure we can use it
if {[file exists $::cgs::logFile] && ![file writable $::cgs::logFile]} {return}
puts [set fid [open $::cgs::logFile a+]] $ltext; close $fid
}
}
}
## process our dbfs and generate sqlite dbs
proc dbfThread {ifname tname addr} {
## check file validity
if {![file exists $ifname] || ![file readable $ifname]} {
::cgs::log "ERROR: Non-existent or unreadable file: '$ifname'"; return
}
## start a thread for the dbf processing...it may take awhile and we dont want to wait
set ptid [thread::id]; set tid [thread::create {
## we need this here
package require sqlite3
## logging inside the thread
proc log {text} {thread::send -async $::_ptid [list ::cgs::log "([thread::id]) $text"]}
## sqlite error handling wrapper
proc sql {cmd} {
if {[catch {eval $cmd} err] != 0} {
::log "ERROR: SQLite command '$cmd' failed: $err"
catch {[lindex $cmd 0] close}
}; return $err
}
## our dbf processor
proc dbIt {ifname tname addr} {
## log it...and get to work...
::log "started parsing dbf file $ifname"
## open file and configure channel
set fid [open $ifname r]; fconfigure $fid -encoding binary -translation binary
## read in our binary header and assign to variables...
if {![binary scan [read $fid 32] cccciss verNum yy mm dd numRecs hdrLen recLen]} {
::log "ERROR: Unable to read header...cannot continue"
thread::release [thread::id]; return
}
## dbf sanity checks...records
if {!($recLen > 1) || !($recLen < 4000)} {
::log "ERROR: Record length inconsistency...corrupt DBF structure"
thread::release [thread::id]; return
}
if {!($numRecs >= 0)} {
::log "ERROR: Invalid number of records...corrupt DBF structure"
thread::release [thread::id]; return
}
## dbf sanity checks...calculate number of fields
if {!([set numFields [expr {int(($hdrLen-1)/32)-1}]] >= 1)} {
::log "ERROR: Invalid number of fields...corrupt DBF structure"
thread::release [thread::id]; return
}
## dbf sanity checks...file size
if {[set asize [file size $ifname]] != [set calcSize [expr {($hdrLen+1)+($numRecs*$recLen)}]]} {
::log "ERROR: Invalid file size $asize...should be $calcSize...corrupt DBF structure";
thread::release [thread::id]; return
}
## initialize our record format map
set recFmt A; set scanMap v0
## populate our initial field data array
array set fieldData [list nam,0 {} typ,0 "C" ofs,0 0 len,0 1 dcnt,0 0 flg,0 0]
## read all field descriptors
for {set i 1} {$i <= $numFields} {incr i} {
## scan in our binary field data
if {![binary scan [read $fid 32] A11aiccc fieldData(nam,$i) fieldData(typ,$i) fieldData(ofs,$i) \
fieldData(len,$i) fieldData(dcnt,$i) fieldData(flg,$i)]} {
::log "ERROR: Unable to read data field...cannot continue"
thread::release [thread::id]; return
}
## convert field names to lower case and strip cr/nl
array set fieldData [list nam,$i [string tolower [string map "\r {} \n {}" $fieldData(nam,$i)]]]
## build record format map...this will not work for some field types
## however..we only have C, D and N type fields in our DBFs...fix if you have other types
append recFmt A$fieldData(len,$i); lappend scanMap v$i
}
## progress update....
::log "generating SQL tables..."
## build create table statement
for {set i 1} {$i <= $numFields} {incr i} {
switch -exact -- $fieldData(typ,$i) {
C {lappend temps "$fieldData(nam,$i) varchar\($fieldData(len,$i)\)"}
D {lappend temps "$fieldData(nam,$i) date"}
N {
## int or numeric?
if {$fieldData(dcnt,$i) == 0} {
lappend temps "$fieldData(nam,$i) int"
} else {
lappend temps "$fieldData(nam,$i) numeric\($fieldData(len,$i),$fieldData(dcnt,$i)\)"
}
}
default {
::log "ERROR: Unhandled data type \($fieldData(typ,$i)\) encountered!"
thread::release [thread::id]; return
}
}
}
## set our initial sql output buffer...
lappend outPut "CREATE TABLE IF NOT EXISTS $tname \([join $temps {, }]\)\;"; unset temps
## create indexes for specific tables
switch -exact -- $tname {
itmsales {
lappend outPut "CREATE UNIQUE INDEX IF NOT EXISTS itmsales_idx ON $tname (date,inv_num,counts);"
}
hrsales {
lappend outPut "CREATE UNIQUE INDEX IF NOT EXISTS hrsales_idx ON $tname (date,cost_centr,hour);"
}
}
## log progress...
::log "preparing SQL statements and insterting into database..."
## read last bit of headers into bit heaven...
set lhBit [read $fid 1]; unset lhBit
## alright..almost there let's read the records...
for {set i 1} {$i <= $numRecs} {incr i} {
## read it...let's hope it works...
eval binary scan [list [read $fid $recLen]] $recFmt $scanMap
## start building output buffer..skip the first null byte...
foreach var [lrange $scanMap 1 end] {
lappend temps "\'[string trim [set [set var]]]\'"
}
## build the rest of our sql buffer
lappend outPut "INSERT OR REPLACE INTO $tname VALUES \([join $temps {, }]\)\;"; unset temps
}
## we are almost done...close our dbf file and delete it...we don't need it anymore
close $fid; file delete -force -- $ifname; ::log "closed and removed temp dbf file..."
## generate sqlite db name and open the database
sqlite3 [set dbc db-$addr] [set dbname [file join $::_fpath "sqlite-[join [split $addr {.}] {-}].db"]]
## setup a timeout to avoid locking crashes with threads
::sql [list $dbc timeout 30000]
## set some sqlite pramga
::sql [list $dbc eval {PRAGMA default_synchronous = OFF;}]
::sql [list $dbc eval {PRAGMA synchronous = OFF;}]
## run the buffer into sqlite
::sql [list $dbc eval [join $outPut]]
## close up sqlite...and release thread...we are done....
::sql [list $dbc close]; ::log "sqlite db $dbname updated...releasing thread [thread::id]"
thread::release [thread::id]
}
## enter thread event loop
thread::wait
}]
## log it..
::cgs::log "created thread $tid for dbf processing ([llength [thread::names]] threads running)"
## copy important variables to thread and start the processing....
thread::send -async $tid [list set _ptid $ptid]; thread::send -async $tid [list set _fpath $::cgs::sqlPath]
thread::send -async $tid [list ::dbIt $ifname $tname $addr]
}
proc init {} {
## check for sanity and start it up...
if {[info exists ::cgs::myAddr] && [string length $::cgs::myAddr]} {
set ::cgs::_servSock [socket -server ::cgs::_connect -myaddr $::cgs::myAddr $::cgs::listenPort]
} else {set ::cgs::_servSock [socket -server ::cgs::_connect $::cgs::listenPort]}
::cgs::log "listening on port $::cgs::listenPort"
if {[info exists ::cgs::myAddr] && [string length $::cgs::myAddr]} {
::cgs::log "bound to host $::cgs::myAddr"
} else {::cgs::log "bound to host 0.0.0.0"}
if {[info exists ::cgs::ipRestrict] && $::cgs::ipRestrict && [info exists ::cgs::ipList]} {
::cgs::log "ip restriction active - allowed IPs: $::cgs::ipList"
} else {::cgs::log "ip restriction NOT ACTIVE (please activate for increased security)"}
::cgs::log "server launched socket ($::cgs::_servSock) opened"
## schedule our daily jobs...7am every day
::cgs::at 0700 ::cgs::daily
## enter event loop for remainder of script
set forever 1; vwait forever; ::cgs::log "waiting for connections..."
}
## schedule transfer to worker thread
proc _connect {sock addr cport} {
after idle [list ::cgs::connect $sock $addr $cport]
}
## handle incoming connections
proc connect {sock addr cport} {
## check for authority...
if {[info exists ::cgs::ipRestrict] && $::cgs::ipRestrict==1 && [info exists ::cgs::ipList]} {
foreach ip $::cgs::ipList {if {[string match $ip $addr]} {set allow 1; break}}
} else {set allow 1}
if {![info exists allow]} {
::cgs::log "closing unauthorized connect \($sock\) from $addr\:$cport"
catch {close $sock}; return
}; ::cgs::log "accepted $sock from $addr\:$cport"
## start threads for client connections
set ptid [thread::id]; set tid [thread::create {
## logging inside the thread
proc log {text} {thread::send -async $::_ptid [list ::cgs::log "([thread::id]) $text"]}
## bye bye client socket...
proc cs {addr sock cport} {
::log "closing connection \($sock\) from $addr\:$cport and releasing thread"
catch {close $sock}; thread::release [thread::id]
}
## fcopy callback...
proc readDone {addr sock cport out fname tname stime bytes {error {}}} {
## we are done...cleanup...set performance counters
set etime [expr {[clock seconds] - $stime}]; ::cs $addr $sock $cport; catch {close $out}
## check for error or eof on input file
if {[string length $error]} {
## oops...we had an error...log it...set done..cleanup array...and close file channels
::log "ERROR: error during file copy: $error"; return
}
## compute approximate transfer speed
if {$etime != 0} {
set speed [format %0.2f [expr {($bytes / $etime) / 1024.0}]]
} else {set speed 0}
::log "done: transfered $fname $bytes bytes in $etime seconds ($speed Kbps)"
## pass off to threaded dbf processor and release client thread
::log "passing $fname to dbf processor..."
set temps [file join $::_sqlPath $fname]; ::thread::send $::_ptid \
[list ::cgs::dbfThread $temps $tname $addr]
## release client thread
::log "releasing client thread [thread::id]"; thread::release [thread::id]
}
## accept incoming data on socket connections
proc readIt {sock addr cport} {
## clear readable event
fileevent $sock readable {}
## check for eof and get header
if {[eof $sock] || [catch {gets $sock line}]} {
::cs $addr $sock $cport
}
## examine and parse header
if {[string equal [lindex [split $line] 0] {+HEADER}]} {
set header [split [lindex [split $line] 1] :]
if {[string equal [lindex $header 0] $::_AuthKey]} {set auth 1}
if {![string equal {} [lindex $header 1]]} {
set tname [string tolower [file rootname [lindex $header 1]]]
set fname "$addr-[lindex $header 1].tmps"
} else {set auth 0}
}
## confirm autentication of client
if {![info exists auth] || $auth != 1} {
::log "unauthorized connection \($sock\) from $addr\:$cport"
::cs $addr $sock $cport; return
}
## log acceptance...
::log "accepting file $fname from $addr\:$cport"
## configure socket
fconfigure $sock -buffering full -buffersize 2048 -encoding binary -translation binary
## open and configure destination file
if {[catch {set out [open [file join $::_sqlPath $fname] w]} err] != 0} {
::log "ERROR: error opening output file [file join $::_sqlPath $fname]: $err"
catch {close $out}; ::cs $addr $sock $cport; return
}
fconfigure $out -buffering full -buffersize 1024 -encoding binary -translation binary
## set performance counters
set stime [clock seconds]
## start copy and pass information to callback when done
fcopy $sock $out -command [list ::readDone $addr $sock $cport $out $fname $tname $stime]
}
## enter thread event loop
thread::wait
}]
## log it...
::cgs::log "created thread $tid \($sock\) $addr\:$cport ([llength [thread::names]] threads running)"
## release socket from main thread
thread::detach $sock
## copy important variables into client thread
foreach var {sock addr cport ptid} {
thread::send -async $tid [list set _$var [set [set var]]]
}
thread::send -async $tid [list set _sqlPath $::cgs::sqlPath]
thread::send -async $tid [list set _AuthKey $::cgs::AuthKey]
## finish socket setup in client thread
thread::send -async $tid {
thread::attach $_sock; fconfigure $_sock -buffering line
fileevent $_sock readable [list ::readIt $_sock $_addr $_cport]
}
}
## handle our timed events
proc at {stime args} {
## process our arguments
if {[llength $args]==1} {
set cmd [lindex $args 0]
} else {set cmd $args}
## make sure we don't schedule things in the past
if {[set tsecs [clock scan $stime]] <= [clock seconds]} {
set tsecs [expr {$tsecs+86400}]
}
## log it...
::cgs::log "Scheduled '$cmd' @ [clock format $tsecs -format {%a %b %d %H:%M:%S %Z %Y}]"
## subtract current time multiply and set
after [expr {($tsecs-[clock seconds])*1000}] $cmd
}
## generate daily csvs and sales reports
proc daily {} {
## start a worker...we don't want to block the main thread
set ptid [thread::id]; set tid [thread::create {
## we need this here
package require sqlite3
## logging inside the thread
proc log {text} {thread::send -async $::_ptid [list ::cgs::log "([thread::id]) $text"]}
## sqlite error handling wrapper
proc sql {cmd} {
if {[catch {eval $cmd} err] != 0} {
::log "ERROR: SQLite command '$cmd' failed: $err"
catch {[lindex $cmd 0] close}
}; return $err
}
## csv and sales reporting generator
proc rptIt {srvpath sqlpath csvpath rptpath} {
## excel package from -> http://wiki.tcl.tk/14468
## assume it is in same directory we are...as it should be
if {[lsearch -exact [package names] excel] == -1} {
if {[file exists [set xlf [file join $srvpath excel.tcl]]] && [file readable $xlf]} {
catch {source $xlf}; unset xlf
} else {
::log "ERROR: unreadable or non-existent $exname package"
thread::release [thread::id]; return
}
}
## log it...we're starting...
::log "daily reporting started...opening databases..."
## loop through and open our sqlite dbs
foreach db [glob -nocomplain -directory $sqlpath -type f -- sqlite-*.db] {
sqlite3 [set dbc db[lindex [split $db {-}] 3]] $db; lappend dbcs $dbc
}
## opening done...log it..
::log "running queries...generating files..."
## set a value for yesterday...positouch data only available for the previous day
set d1 [clock format [clock scan yesterday] -format %Y%m%d]
## is this monday?...if so we need to select the whole week
if {[clock format [clock seconds] -format %u] == 1} {
## set types and generate value for this day last week
set types "daily weekly"; set d2 [clock format [clock scan "last week"] -format %Y%m%d]
} else {set types daily}
## set our current numeric day of week (0-7) and week of year (00-53)
set dow [clock format [clock scan $d1 -format %Y%m%d] -format %u]
set woy [clock format [clock scan $d1 -format %Y%m%d] -format %V]
## create/setup our sales excel workbook
set wbk [excel::createWorkbook]
set wks [excel::createWorkSheet $wbk "16 Week Sales"]
set head [excel::createStyle $wbk -background \#D8D8D8]
set head2 [excel::createStyle $wbk -background \#D8D8D8 -custom {}]
set head3 [excel::createStyle $wbk -background \#D8D8D8 -center]
set cash [excel::createStyle $wbk -custom {}]
## set initial column header and week list
lappend chead [list [clock format [clock scan "7 $woy" -format {%u %V}]\
-format {%G-%m-%dT00:00:00.000}] DateTime $head2]; lappend weeks $woy
## build remainder of the worksheet column header and week list
for {set x 1} {$x<=16} {incr x} {
## handle negations of the week
if {[set woy2 [expr {$woy-$x}]]<0} {set woy2 [expr {$woy2+53}]}
## build column name header
lappend chead [list [clock format [clock scan "7 $woy2" -format {%u %V}] \
-format {%G-%m-%dT00:00:00.000}] DateTime $head2] [list "+/- WTD" String $head3]
## build our week list so we only have to run this loop once
lappend weeks $woy2
};
## collect data and write our reports...
foreach dbc $dbcs {
## set our store number based on our database file...it's in there...
set store [string trimleft $dbc db]
## csvs first...loop through our types...if it's a monday we'll have a weekly and a daily
foreach type $types {
## check directory structure...create if not found...
if {![file exists [set p [file join $csvpath $type $store]]]} {
if {[catch {file mkdir $p} err] != 0} {
::log "ERROR: could not create path $p: $err"; continue
}
}
## set filename and query based on type...
switch -exact -- $type {
weekly {
set fname $store-week-$d1-$d2.csv
set q "SELECT inv_num,counts,date FROM itmsales WHERE date BETWEEN $d2 AND $d1;"
}
daily {
set fname $store-night-$d1.csv
set q "SELECT inv_num,counts,date FROM itmsales WHERE date=$d1;"
}
}
## generate a file buffer
foreach {inv_num counts date} [::sql [list $dbc eval $q]] {
lappend outs $inv_num,$counts,$date
}
## make sure we got some data...
if {[info exists outs]} {
## write the csv...
if {[catch {puts [set fid [open [file join $p $fname] w]] [join $outs \n]} err] != 0} {
::log "ERROR: creating [file join $p $fname]: $err"; continue
} else {catch {close $fid}}
} else {
## oops...nothing in our 'outs'...log it and continue
::log "ERROR: creating [file join $p $fname] query returned empty result"; continue
}
## log it...and clean up our output variable for the next file...
::log "created file [file join $p $fname] for costguard import"; unset outs
}
## weee...sales reports...start our worksheet creation loop
for {set dow2 1} {$dow2<=$dow} {incr dow2} {
## translate number into day of week and convert 0 day
switch -exact -- $dow2 {
1 {set fdow Monday}
2 {set fdow Tuesday}
3 {set fdow Wednesday}
4 {set fdow Thursday}
5 {set fdow Friday}
6 {set fdow Saturday}
7 {set fdow Sunday}
}
## pull our sales info from the database
set x 0; foreach week $weeks {
## build the row header...
if {![info exists outs($dow2)]} {lappend outs($dow2) [list $fdow String $head]}
## build our query and pull the rest of our row...
set q "SELECT sum(tot_sales) FROM hrsales WHERE date ==\
[set d3 [clock format [clock scan [list $dow2 $week] -format {%u %V}] -format %Y%m%d]]\
and cost_centr between 1 and 3;"
## blank data...damn positouch...set any blanks to 0
if {![string length [set res [join [::sql [list $dbc eval $q]]]]]} {set res 0}
lappend outs($dow2) [list [format %0.2f $res] Number $cash]
## only write percentages on weeks past the current
if {$x != 0} {
lappend outs($dow2) [list "=\(RC\[-1\]-RC\[-[expr {$x*2}]\]\)" Number $cash]
}; incr x
## debug logging....
::log "DEBUG: $d3 -> res\($dow2\,$week) == $res"
}
}
## add column header with store number to worksheet
eval excel::addRow $wks [concat [list "\#$store String $head"] $chead]
## build the worksheet in week order (1-7) with data from our array
foreach row [lsort -integer -increasing [array names outs]] {
eval excel::addRow $wks $outs($row)
}
## build a totals line to the worksheet
for {set x 1} {$x<=16} {incr x} {
lappend temps [list "=SUM\(R\[-[llength [array names outs]]\]C:R\[-1\]C\)" Number $cash]\
[list "=\(RC\[-1\]-RC\[-[expr {$x*2}]\]\)" Number $cash]
}
## complete the line and add it to the worksheet
eval excel::addRow $wks [concat [list "TOTALS String $head"]\
[list [list "=SUM\(R\[-[llength [array names outs]]\]C:R\[-1\]C\)" Number $cash]] $temps]
## add a blank line to the worksheet and cleanup our variables
excel::addRow $wks [list "" String]; array unset outs; unset temps
}
## close databases...we have everything we need...
foreach dbc $dbcs {::sql [list $dbc close]}
## aight..we built it...now let's output it...show daddy that workbook
set fname [file join $rptpath "[clock format [clock scan [list 7 $woy] -format {%u %V}]\
-format %Y%m%d]-16wks.xml"]
if {[catch {puts [set fid [open $fname w]] [excel::asXml $wbk]} err] != 0} {
::log "ERROR: creating $fname: $err"
} else {catch {close $fid}}
## destroy the workbook...
excel::deleteWorkbook $wbk
## log it...we're almost done...
::log "created 16 week sales report: $fname"
## wooo...hooo...we're done...log it and take a bow
::log "daily reporting finished..closing databases and releaseing thread [thread::id]"
## reschedule our batch for 7am and release the thread...
thread::send -async $::_ptid [list ::cgs::at 0700 ::cgs::daily]; thread::release [thread::id]
}
## enter thread event loop
thread::wait
}]
## log it..
::cgs::log "created thread $tid for daily processing ([llength [thread::names]] threads running)"
## copy important variables to thread and start the processing....
thread::send -async $tid [list set _ptid $ptid]
thread::send -async $tid [list ::rptIt $::cgs::srvPath $::cgs::sqlPath $::cgs::csvPath $::cgs::rptPath]
}
}
## handle background errors
proc ::bgerror {text} {
::cgs::log "ERROR: unhandled error: $text"
foreach line [split $::errorInfo \n] {::cgs::log $line}; exit 0
}
## handle thread errors
proc ::tderror {tid einfo} {
foreach line [split $einfo \n] {::cgs::log "ERROR: in thread $tid: $line"}; exit 0
}
thread::errorproc ::tderror
## check our argument numbers
if {$argc != 1} {
puts "Usage: $argv0 \n"
puts "Config Example:"
puts "\tPORT 65520"
puts "\tHOST 10.0.90.10"
puts "\tRESTRICT 1"
puts "\tIPLIST 10.0.90.*"
puts "\tLOGT 3"
puts "\tLOGF /path/to/logfile.log"
puts "\tSRVPATH /path/to/server/directory"
puts "\tSQLPATH /where/to/store/sqlite_dbs"
puts "\tCSVPATH /where/to/store/csv_files"
puts "\tRPTPATH /where/to/store/report_files"; return
}
## we're good...let's read our conf
::cgs::readConf [lindex $argv 0]
## finished reading conf...start our listener
::cgs::init