Class ::github::ActivityMonitor (public)
::nx::Class ::github::ActivityMonitor
Defined in packages/xowiki/tcl/github-activity-monitor-procs.tcl
- Testcases:
- No testcase defined.
Source code: :property {organization OpenACS} :property {api_base "https://api.github.com"} :property {api_token} #--- get ------------------------------------------------------------------------- :public method get {path {query_args {}}} { # # Issue an API call on GitHub for the configured # organization. # set url "${:api_base}${path}" # default query arg dict set query per_page 100 foreach {k v} $query_args { dict set query $k $v } set urlDict [ns_parseurl $url] dict set urlDict query [join [lmap {k v} $query {string cat $k=$v}] &] set headers [ns_set create headers Authorization "Bearer ${:api_token}" Accept "application/vnd.github+json" User-Agent "openacs-activity-dashboard/1.0" ] ns_log notice request: [list ns_http run -method GET -headers $headers [ns_joinurl $urlDict]] #error 1 set result [ns_http run -method GET -headers $headers [ns_joinurl $urlDict]] set status [dict get $result status] set body [dict get $result body] if {$status < 200 || $status >= 300} { ns_log Error "GitHub GET $url failed: status $status, body: $body" error "GitHub API error $status" } return $body } if 0 { set jsonText [::github-activity-monitor::github::get "/orgs/openacs/events" [list page 1]] # util::json2dict set result [[dom parse -json -- $jsonText] asTclValue] } #--- fetch_new_events ------------------------------------------------------------ :method fetch_new_events {} { # # Fetch new events since last processed ID, at most 10 # pages in single run. # # Last event we processed, if any set last_event_id [xo::dc get_value last_id { select max(event_id) from github_activity }] set events {} set page 1 set done 0 while {!$done} { set body [:get "/orgs/openacs/events" [list page $page]] set batch [::util::json2dict $body] if {[llength $batch] == 0} { break } foreach ev $batch { set ev_id [dict get $ev id] # Events are newest-first; once we reach an older or equal ID, stop. if {$last_event_id ne "" && $ev_id <= $last_event_id} { set done 1 break } lappend events $ev } incr page if {$page > 100} { ns_log Warning "GitHub: reached page limit while fetching events; stopping at page $page" break } if {$done} { break } } # We collected in newest-first order; reverse to oldest-first for insertion. return [lreverse $events] } #--- summarize_push_event -------------------------------------------------------- :public method summarize_push_event {ev} { # # Fetch commit details and build a summary for PushEvents. # # @return dict with fields of push event set type [dict get $ev type] if {$type ne "PushEvent"} { return "" } set repo_full [dict get $ev repo name] ;# e.g. "openacs/openacs-core" set ref [dict get $ev payload ref] ;# "refs/heads/main" set branch [lindex [split $ref "/"] end] set head_sha [dict get $ev payload head] set created_at [dict get $ev created_at] set actor_login [dict get $ev actor login] if {$repo_full eq "" || $head_sha eq ""} { return "" } # Fetch commit details set commit_body [:get "/repos/$repo_full/commits/$head_sha"] set c [::util::json2dict $commit_body] set commit [dict get $c commit] set author_dict [dict get $commit author] set author_name [dict get $author_dict name] set commit_date [dict get $author_dict date] ;# ISO8601 string set message_full [dict get $commit message] set title [lindex [split $message_full "\n"] 0] set stats [dict get $c stats] set additions [dict get $stats additions] set deletions [dict get $stats deletions] set files [dict get $c files] set files_changed [llength $files] return [list event_id [dict get $ev id] repo $repo_full branch $branch sha [dict get $c sha] author_name $author_name author_login $actor_login commit_date $commit_date created_at $created_at title $title url [dict get $c html_url] files_changed $files_changed additions $additions deletions $deletions raw_payload $commit_body ] } #--- sync_from_github ------------------------------------------------------------ :public method sync_from_github {} { # # Sync events from github: fetch, summarize, insert # ns_log Notice "GitHub: refresh_activity start" set new_events [:fetch_new_events] if {[llength $new_events] == 0} { ns_log Notice "GitHub: no new events" return } ::xo::dc transaction { foreach ev $new_events { set summary [:summarize_push_event $ev] if {$summary eq ""} { continue } dict with summary { # Avoid duplicate insert if raced with another run xo::dc dml insert_activity { insert into github_activity ( event_id, repo, branch, sha, author_name, author_login, commit_date, created_at, title, url, files_changed, additions, deletions, raw_payload ) values ( :event_id, :repo, :branch, :sha, :author_name, :author_login, :commit_date, :created_at, :title, :url, :files_changed, :additions, :deletions, :raw_payload ) on conflict (repo, sha) do nothing } } } } ns_log Notice "GitHub: refresh_activity done; processed [llength $new_events] events" } #--- next_backfill_event_id ------------------------------------------------------ :method next_backfill_event_id {} { # # Helper for backfill_repo_history. Returns the next # negative event_id to use. # # smallest existing event_id (could be null, positive, or already negative) set min_id [xo::dc get_value min_event_id { select min(event_id) from github_activity }] if {$min_id eq "" || $min_id >= 0} { # no rows, or only positive IDs so far -> start at -1 return -1 } else { # already have negative IDs -> one step lower return [expr {$min_id - 1}] } } #--- backfill_repo_history ------------------------------------------------------- :public method backfill_repo_history {-repo {-branch main} {-start_page 1} {-max_pages 50}} { # # Backfill commit history for a single repo/branch using # /repos/{repo}/commits. Uses synthetic negative event_id values. # # repo e.g. "openacs/openacs-core" # branch e.g. "main" or "oacs-5-10" # ns_log Notice "GitHub backfill_repo_history: $repo ($branch)" # We’ll assign event_id from next negative downwards. set next_event_id [:next_backfill_event_id] set page $start_page set pages_done 0 set done 0 set count 0 while {!$done} { if {$pages_done > $max_pages} { ns_log notice "GitHub backfill_repo_history: reached" "max_pages=$max_pages for $repo ($branch)" break } # List commits, newest first set body [:get "/repos/$repo/commits" [list sha $branch page $page per_page 100]] set batch [::util::json2dict $body] if {[llength $batch] == 0} { ns_log notice "GitHub backfill_repo_history: no more commits" at page $page for $repo ($branch) break } ::xo::dc transaction { foreach summary $batch { set sha [dict get $summary sha] # Stop if we already have this commit (from events or prior backfill) set exists [xo::dc 0or1row exists_commit { select 1 from github_activity where repo = :repo and sha = :sha limit 1 }] if {$exists} { # We assume older pages are fully covered as well ns_log notice "GitHub backfill_repo_history: found existing commit" "$repo $sha, stopping at page $page" #set done 1 #break continue } incr count # # Fetch full commit details (stats, files, etc.), # similar to summarize_push_event. # set commit_body [:get "/repos/$repo/commits/$sha"] set c [::util::json2dict $commit_body] set commit [dict get $c commit] #ns_log notice "commit keys: [dict keys $commit]" ns_log notice "(page $page, count $count) commit.message($count):" [dict get $commit message] set author_dict [dict get $commit author] set author_name [dict get $author_dict name] set commit_date [dict get $author_dict date] set stats [dict get $c stats] set additions [dict get $stats additions] set deletions [dict get $stats deletions] set files [dict get $c files] set files_changed [llength $files] set message_full [dict get $commit message] if {$message_full eq ""} { # Fallback: don’t leave title NULL, DB wants NOT NULL set title "(no commit message)" } else { set title [lindex [split [string trimleft $message_full] \n] 0] } #ns_log notice "commit.title: $title" if {[dict exists $c html_url]} { set url [dict get $c html_url] } else { # construct GitHub web URL as a fallback set url "https://github.com/$repo/commit/$sha" } # Synthetic negative event_id set event_id $next_event_id incr next_event_id -1 set created_at $commit_date ;# for backfill, we just reuse commit time ::xo::dc dml insert_backfill { insert into github_activity ( event_id, repo, branch, sha, author_name, author_login, commit_date, created_at, title, url, files_changed, additions, deletions, raw_payload ) values ( :event_id, :repo, :branch, :sha, :author_name, NULL, :commit_date, :created_at, :title, :url, :files_changed, :additions, :deletions, :commit_body ) on conflict (repo, sha) do nothing } } } if {$done} { break } incr page incr pages_done } ns_log notice "GitHub backfill_repo_history: done for $repo ($branch)" }XQL Not present: Generic, PostgreSQL, Oracle
![[i]](/resources/acs-subsite/ZoomIn16.gif)