commit
a96336ab97
4 changed files with 178 additions and 131 deletions
|
|
@ -1,5 +1,7 @@
|
|||
Travis-CI build status:
|
||||
[](https://travis-ci.org/stepcode/stepcode) [](https://ci.appveyor.com/project/mpictor/stepcode)
|
||||
Travis-CI | AppVeyor CI
|
||||
:-------------:|:---------------:
|
||||
Linux, OSX (LLVM) | Windows (MSVC)
|
||||
[](https://travis-ci.org/stepcode/stepcode) | [](https://ci.appveyor.com/project/mpictor/stepcode)
|
||||
|
||||
***********************************************************************
|
||||
STEPcode v0.8 -- stepcode.org, github.com/stepcode/stepcode
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
//summarize MSVC errors from an appveyor log
|
||||
// compile with 'go build summarize-appveyor-log.go'
|
||||
// takes 0 or 1 args; with 0, gets log from latest
|
||||
// build. with 1, uses that file as raw json-like log
|
||||
package main
|
||||
|
||||
import (
|
||||
|
|
@ -17,22 +19,29 @@ import (
|
|||
const (
|
||||
headerKey = "Authorization"
|
||||
headerVal = "Bearer %s"
|
||||
projUrl = "https://ci.appveyor.com/api/projects/mpictor/stepcode"
|
||||
projUrl = "https://ci.appveyor.com/api/projects/mpictor/stepcode"
|
||||
//"https://ci.appveyor.com/api/buildjobs/2rjxdv1rnb8jcg8y/log"
|
||||
logUrl = "https://ci.appveyor.com/api/buildjobs/%s/log"
|
||||
logUrl = "https://ci.appveyor.com/api/buildjobs/%s/log"
|
||||
consoleUrl = "https://ci.appveyor.com/api/buildjobs/%s/console"
|
||||
)
|
||||
|
||||
//uses stdin and stdout
|
||||
func main() {
|
||||
rawlog, build, err := getLog()
|
||||
var rawlog io.ReadCloser
|
||||
var build string
|
||||
var err error
|
||||
if len(os.Args) == 2 {
|
||||
rawlog, build, err = processArgv()
|
||||
} else {
|
||||
rawlog, build, err = getLog()
|
||||
}
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ERROR: %s\n", err)
|
||||
return
|
||||
}
|
||||
defer rawlog.Close()
|
||||
log := unwrap(rawlog)
|
||||
log := decodeConsole(rawlog)
|
||||
warns, errs := countMessages(log)
|
||||
fi, err := os.Create(fmt.Sprintf("appveyor-%d.smy", build))
|
||||
fi, err := os.Create(fmt.Sprintf("appveyor-%s.smy", build))
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ERROR: %s\n", err)
|
||||
return
|
||||
|
|
@ -46,20 +55,19 @@ func main() {
|
|||
|
||||
/* categorizes warnings and errors based upon the MSVC message number (i.e. C4244)
|
||||
* the regex will match lines like
|
||||
[ 00:03:42] c:\projects\stepcode\src\base\sc_benchmark.h(45): warning C4251: 'benchmark::descr' : class 'std::basic_string<char,std::char_traits<char>,std::allocator<char>>' needs to have dll-interface to be used by clients of class 'benchmark' [C:\projects\STEPcode\build\src\base\base.vcxproj]
|
||||
c:\projects\stepcode\src\base\sc_benchmark.h(45): warning C4251: 'benchmark::descr' : class 'std::basic_string<char,std::char_traits<char>,std::allocator<char>>' needs to have dll-interface to be used by clients of class 'benchmark' [C:\projects\STEPcode\build\src\base\base.vcxproj]
|
||||
[00:03:48] C:\projects\STEPcode\src\base\sc_benchmark.cc(61): warning C4244: '=' : conversion from 'SIZE_T' to 'long', possible loss of data [C:\projects\STEPcode\build\src\base\base.vcxproj]*
|
||||
*/
|
||||
func countMessages(log []string) (warns, errs map[string][]string) {
|
||||
warns = make(map[string][]string)
|
||||
errs = make(map[string][]string)
|
||||
tstamp := `\[\d\d:\d\d:\d\d\] `
|
||||
fname := " *(.*)" // $1
|
||||
fline := `(?:\((\d+)\)| ): ` // $2 - either line number in parenthesis or a space, followed by a colon
|
||||
msgNr := `([A-Z]+\d+): ` // $3 - C4251, LNK2005, etc
|
||||
msgTxt := `([^\[]*) ` // $4
|
||||
tail := `\[[^\[\]]*\]`
|
||||
warnRe := regexp.MustCompile(tstamp + fname + fline + `warning ` + msgNr + msgTxt + tail)
|
||||
errRe := regexp.MustCompile(tstamp + fname + fline + `(?:fatal )?error ` + msgNr + msgTxt + tail)
|
||||
warnRe := regexp.MustCompile(fname + fline + `warning ` + msgNr + msgTxt + tail)
|
||||
errRe := regexp.MustCompile(fname + fline + `(?:fatal )?error ` + msgNr + msgTxt + tail)
|
||||
for _, line := range log {
|
||||
if warnRe.MatchString(line) {
|
||||
key := warnRe.ReplaceAllString(line, "$3")
|
||||
|
|
@ -125,45 +133,102 @@ func printMessages(typ string, m map[string][]string, w io.Writer) {
|
|||
}
|
||||
}
|
||||
|
||||
//
|
||||
func unwrap(r io.Reader) (log []string) {
|
||||
startNewLine := true
|
||||
unwrapScanner := bufio.NewScanner(r)
|
||||
var lineOut string
|
||||
for unwrapScanner.Scan() {
|
||||
lastNewline := startNewLine
|
||||
lineIn := unwrapScanner.Text()
|
||||
startNewLine = (len(lineIn) < 240) || strings.HasSuffix(lineIn, "vcxproj]")
|
||||
if !lastNewline {
|
||||
lineOut += lineIn[11:]
|
||||
} else {
|
||||
lineOut = lineIn
|
||||
}
|
||||
if startNewLine {
|
||||
log = append(log, lineOut)
|
||||
lineOut = ""
|
||||
}
|
||||
}
|
||||
if len(lineOut) > 0 {
|
||||
log = append(log, lineOut)
|
||||
}
|
||||
if err := unwrapScanner.Err(); err != nil {
|
||||
fmt.Fprintln(os.Stderr, "Error reading appveyor log:", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
//structs from http://json2struct.mervine.net/
|
||||
|
||||
//http://json2struct.mervine.net/
|
||||
//{"values":[{"i":0,"t":"Specify a project or solution file. The directory does not contain a project or solution file.\r\n","dt":"00:00:04","bg":12,"fg":15}]}
|
||||
type AppVeyorConsoleLines struct {
|
||||
Values []struct {
|
||||
I int `json:"i"`
|
||||
Text string `json:"t"`
|
||||
DateTime string `json:"dt"`
|
||||
BgColor int `json:"bg"`
|
||||
FgColor int `json:"fg"`
|
||||
}
|
||||
}
|
||||
type AppVeyorBuild struct {
|
||||
Build struct {
|
||||
BuildNumber int `json:"buildNumber"`
|
||||
Jobs []struct {
|
||||
/*BuildNumber int `json:"buildNumber"`*/
|
||||
Version string `json:"version"`
|
||||
Jobs []struct {
|
||||
JobID string `json:"jobId"`
|
||||
} `json:"jobs"`
|
||||
} `json:"build"`
|
||||
}
|
||||
|
||||
func getLog() (log io.ReadCloser, build int, err error) {
|
||||
func splitAppend(log *[]string, blob string) {
|
||||
//blob = strings.Replace(blob,"\r\n", "\n",-1)
|
||||
blob = strings.Replace(blob, "\\", "/", -1)
|
||||
r := strings.NewReader(blob)
|
||||
unwrapScanner := bufio.NewScanner(r)
|
||||
for unwrapScanner.Scan() {
|
||||
txt := unwrapScanner.Text()
|
||||
//fmt.Printf("%s\n", txt)
|
||||
*log = append(*log, txt)
|
||||
}
|
||||
}
|
||||
|
||||
//calculate length of string without escape chars
|
||||
// func escapeLen(s string)(l int) {
|
||||
// //s = strings.Replace(s,"\\\\", "/",-1)
|
||||
// s = strings.Replace(s,"\\\"", "",-1)
|
||||
// s = strings.Replace(s,"\r\n", "RN",-1)
|
||||
// return len(s)
|
||||
// }
|
||||
|
||||
|
||||
//decode the almost-JSON console data from appveyor
|
||||
func decodeConsole(r io.Reader) (log []string) {
|
||||
wrapper := Wrap(r)
|
||||
dec := json.NewDecoder(wrapper)
|
||||
var consoleLines AppVeyorConsoleLines
|
||||
var err error
|
||||
var txtBlob string
|
||||
err = dec.Decode(&consoleLines)
|
||||
if err == io.EOF {
|
||||
err = nil
|
||||
}
|
||||
if err == nil {
|
||||
for _, l := range consoleLines.Values {
|
||||
txtBlob += l.Text
|
||||
//el := escapeLen(l.Text)
|
||||
//something inserts newlines at 229 chars (+\n\r == 231) (found in CMake output)
|
||||
lenTwoThreeOne := len(l.Text) == 231
|
||||
if lenTwoThreeOne {
|
||||
txtBlob = strings.TrimSuffix(txtBlob, "\r\n")
|
||||
}
|
||||
//something else starts new log lines at 1024 chars without inserting newlines (found in CTest error output)
|
||||
if len(l.Text) != 1024 && !lenTwoThreeOne {
|
||||
//fmt.Printf("sa for l %d, el %d\n", len(l.Text),el)
|
||||
splitAppend(&log, txtBlob)
|
||||
txtBlob = ""
|
||||
}
|
||||
}
|
||||
} else {
|
||||
fmt.Printf("decode err %s\n", err)
|
||||
}
|
||||
if len(txtBlob) > 0 {
|
||||
splitAppend(&log, txtBlob)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func processArgv() (log io.ReadCloser, build string, err error) {
|
||||
fname := os.Args[1]
|
||||
if len(fname) < 14 {
|
||||
err = fmt.Errorf("Name arg '%s' too short. Run as '%s appveyor-NNN.log'", fname, os.Args[0])
|
||||
return
|
||||
}
|
||||
buildRe := regexp.MustCompile(`appveyor-(.+).log`)
|
||||
build = buildRe.ReplaceAllString(fname, "$1")
|
||||
if len(build) == 0 {
|
||||
err = fmt.Errorf("No build id in %s", fname)
|
||||
return
|
||||
}
|
||||
log, err = os.Open(fname)
|
||||
return
|
||||
}
|
||||
|
||||
func getLog() (log io.ReadCloser, build string, err error) {
|
||||
client := &http.Client{}
|
||||
req, err := http.NewRequest("GET", projUrl, nil)
|
||||
if err != nil {
|
||||
|
|
@ -171,22 +236,23 @@ func getLog() (log io.ReadCloser, build int, err error) {
|
|||
}
|
||||
apikey := os.Getenv("APPVEYOR_API_KEY")
|
||||
//api key isn't necessary for read-only queries on public projects
|
||||
//if len(apikey) < 1 {
|
||||
if len(apikey) > 0 {
|
||||
req.Header.Add(headerKey, fmt.Sprintf(headerVal, apikey))
|
||||
} //else {
|
||||
// fmt.Printf("Env var APPVEYOR_API_KEY is not set.")
|
||||
//}
|
||||
req.Header.Add(headerKey, fmt.Sprintf(headerVal,apikey))
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
build, job := decode(resp.Body)
|
||||
fmt.Printf("build #%d, jobId %s\n", build, job)
|
||||
resp, err = http.Get(fmt.Sprintf(logUrl, job))
|
||||
build, job := decodeProjInfo(resp.Body)
|
||||
fmt.Printf("build #%s, jobId %s\n", build, job)
|
||||
resp, err = http.Get(fmt.Sprintf(consoleUrl, job))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
logName := fmt.Sprintf("appveyor-%d.log", build)
|
||||
logName := fmt.Sprintf("appveyor-%s.log", build)
|
||||
fi, err := os.Create(logName)
|
||||
if err != nil {
|
||||
return
|
||||
|
|
@ -202,7 +268,7 @@ func getLog() (log io.ReadCloser, build int, err error) {
|
|||
return
|
||||
}
|
||||
|
||||
func decode(r io.Reader) (num int, job string) {
|
||||
func decodeProjInfo(r io.Reader) (vers string, job string) {
|
||||
dec := json.NewDecoder(r)
|
||||
var av AppVeyorBuild
|
||||
err := dec.Decode(&av)
|
||||
|
|
@ -213,9 +279,52 @@ func decode(r io.Reader) (num int, job string) {
|
|||
if len(av.Build.Jobs) != 1 {
|
||||
return
|
||||
}
|
||||
num = av.Build.BuildNumber
|
||||
vers = av.Build.Version
|
||||
job = av.Build.Jobs[0].JobID
|
||||
return
|
||||
}
|
||||
|
||||
//wrap a reader, modifying content to make the json decoder happy
|
||||
//only tested with data from appveyor console
|
||||
type jsonWrapper struct {
|
||||
source io.Reader
|
||||
begin bool
|
||||
end bool
|
||||
}
|
||||
|
||||
func Wrap(r io.Reader) *jsonWrapper {
|
||||
return &jsonWrapper{
|
||||
source: r,
|
||||
begin: true,
|
||||
}
|
||||
}
|
||||
|
||||
// func nonNeg(n int) (int) {
|
||||
// if n < 0 {
|
||||
// return 0
|
||||
// }
|
||||
// return n
|
||||
// }
|
||||
|
||||
func (w *jsonWrapper) Read(p []byte) (n int, err error) {
|
||||
if w.end {
|
||||
return 0, io.EOF
|
||||
}
|
||||
if w.begin {
|
||||
w.begin = false
|
||||
n = copy(p, []byte(`{"values":[`))
|
||||
}
|
||||
m, err := w.source.Read(p[n:])
|
||||
n += m
|
||||
if err == io.EOF {
|
||||
w.end = true
|
||||
if n < len(p) {
|
||||
n = copy(p, []byte(`{"dummy":"data"}]}`))
|
||||
} else {
|
||||
err = fmt.Errorf("No room to terminate JSON struct with '}'\n")
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// kate: indent-width 8; space-indent off; replace-tabs off; replace-tabs-save off; replace-trailing-space-save on; remove-trailing-space on; tab-intent on; tab-width 8; show-tabs off;
|
||||
|
|
|
|||
|
|
@ -13,13 +13,15 @@
|
|||
#include <Registry.h>
|
||||
#include "sc_memmgr.h"
|
||||
|
||||
const TypeDescriptor * t_sdaiINTEGER = NULL;
|
||||
const TypeDescriptor * t_sdaiREAL = NULL;
|
||||
const TypeDescriptor * t_sdaiNUMBER = NULL;
|
||||
const TypeDescriptor * t_sdaiSTRING = NULL;
|
||||
const TypeDescriptor * t_sdaiBINARY = NULL;
|
||||
const TypeDescriptor * t_sdaiBOOLEAN = NULL;
|
||||
const TypeDescriptor * t_sdaiLOGICAL = NULL;
|
||||
/* these may be shared between multiple Registry instances, so don't create/destroy in Registry ctor/dtor
|
||||
* Name, FundamentalType, Originating Schema, Description */
|
||||
const TypeDescriptor * const t_sdaiINTEGER = new TypeDescriptor( "INTEGER", sdaiINTEGER, 0, "INTEGER" );
|
||||
const TypeDescriptor * const t_sdaiREAL = new TypeDescriptor( "REAL", sdaiREAL, 0, "Real" );
|
||||
const TypeDescriptor * const t_sdaiNUMBER = new TypeDescriptor( "NUMBER", sdaiNUMBER, 0, "Number" );
|
||||
const TypeDescriptor * const t_sdaiSTRING = new TypeDescriptor( "STRING", sdaiSTRING, 0, "String" );
|
||||
const TypeDescriptor * const t_sdaiBINARY = new TypeDescriptor( "BINARY", sdaiBINARY, 0, "Binary" );
|
||||
const TypeDescriptor * const t_sdaiBOOLEAN = new TypeDescriptor( "BOOLEAN", sdaiBOOLEAN, 0, "Boolean" );
|
||||
const TypeDescriptor * const t_sdaiLOGICAL = new TypeDescriptor( "LOGICAL", sdaiLOGICAL, 0, "Logical" );
|
||||
|
||||
static int uniqueNames( const char *, const SchRename * );
|
||||
|
||||
|
|
@ -30,43 +32,6 @@ Registry::Registry( CF_init initFunct )
|
|||
active_schemas = SC_HASHcreate( 10 );
|
||||
active_types = SC_HASHcreate( 100 );
|
||||
|
||||
if( !t_sdaiINTEGER ) {
|
||||
t_sdaiINTEGER = new TypeDescriptor( "INTEGER", // Name
|
||||
sdaiINTEGER, // FundamentalType
|
||||
0, // Originating Schema
|
||||
"INTEGER" ); // Description;
|
||||
}
|
||||
if( !t_sdaiREAL ) {
|
||||
t_sdaiREAL = new TypeDescriptor( "REAL", sdaiREAL,
|
||||
0, // Originating Schema
|
||||
"Real" );
|
||||
}
|
||||
if( !t_sdaiSTRING ) {
|
||||
t_sdaiSTRING = new TypeDescriptor( "STRING", sdaiSTRING,
|
||||
0, // Originating Schema
|
||||
"String" );
|
||||
}
|
||||
if( !t_sdaiBINARY ) {
|
||||
t_sdaiBINARY = new TypeDescriptor( "BINARY", sdaiBINARY,
|
||||
0, // Originating Schema
|
||||
"Binary" );
|
||||
}
|
||||
if( !t_sdaiBOOLEAN ) {
|
||||
t_sdaiBOOLEAN = new TypeDescriptor( "BOOLEAN", sdaiBOOLEAN,
|
||||
0, // Originating Schema
|
||||
"Boolean" );
|
||||
}
|
||||
if( !t_sdaiLOGICAL ) {
|
||||
t_sdaiLOGICAL = new TypeDescriptor( "LOGICAL", sdaiLOGICAL,
|
||||
0, // Originating Schema
|
||||
"Logical" );
|
||||
}
|
||||
if( !t_sdaiNUMBER ) {
|
||||
t_sdaiNUMBER = new TypeDescriptor( "NUMBER", sdaiNUMBER,
|
||||
0, // Originating Schema
|
||||
"Number" );
|
||||
}
|
||||
|
||||
initFunct( *this );
|
||||
SC_HASHlistinit( active_types, &cur_type );
|
||||
SC_HASHlistinit( primordialSwamp, &cur_entity ); // initialize cur's
|
||||
|
|
@ -80,35 +45,6 @@ Registry::~Registry() {
|
|||
SC_HASHdestroy( active_schemas );
|
||||
SC_HASHdestroy( active_types );
|
||||
delete col;
|
||||
|
||||
if( t_sdaiINTEGER ) {
|
||||
delete t_sdaiINTEGER;
|
||||
t_sdaiINTEGER = NULL;
|
||||
}
|
||||
if( t_sdaiREAL ) {
|
||||
delete t_sdaiREAL;
|
||||
t_sdaiREAL = NULL;
|
||||
}
|
||||
if( t_sdaiSTRING ) {
|
||||
delete t_sdaiSTRING;
|
||||
t_sdaiSTRING = NULL;
|
||||
}
|
||||
if( t_sdaiBINARY ) {
|
||||
delete t_sdaiBINARY;
|
||||
t_sdaiBINARY = NULL;
|
||||
}
|
||||
if( t_sdaiBOOLEAN ) {
|
||||
delete t_sdaiBOOLEAN;
|
||||
t_sdaiBOOLEAN = NULL;
|
||||
}
|
||||
if( t_sdaiLOGICAL ) {
|
||||
delete t_sdaiLOGICAL;
|
||||
t_sdaiLOGICAL = NULL;
|
||||
}
|
||||
if( t_sdaiNUMBER ) {
|
||||
delete t_sdaiNUMBER;
|
||||
t_sdaiNUMBER = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
void Registry::DeleteContents() {
|
||||
|
|
|
|||
|
|
@ -21,13 +21,13 @@
|
|||
|
||||
|
||||
// defined and created in Registry.cc
|
||||
extern SC_CORE_EXPORT const TypeDescriptor * t_sdaiINTEGER;
|
||||
extern SC_CORE_EXPORT const TypeDescriptor * t_sdaiREAL;
|
||||
extern SC_CORE_EXPORT const TypeDescriptor * t_sdaiNUMBER;
|
||||
extern SC_CORE_EXPORT const TypeDescriptor * t_sdaiSTRING;
|
||||
extern SC_CORE_EXPORT const TypeDescriptor * t_sdaiBINARY;
|
||||
extern SC_CORE_EXPORT const TypeDescriptor * t_sdaiBOOLEAN;
|
||||
extern SC_CORE_EXPORT const TypeDescriptor * t_sdaiLOGICAL;
|
||||
extern SC_CORE_EXPORT const TypeDescriptor * const t_sdaiINTEGER;
|
||||
extern SC_CORE_EXPORT const TypeDescriptor * const t_sdaiREAL;
|
||||
extern SC_CORE_EXPORT const TypeDescriptor * const t_sdaiNUMBER;
|
||||
extern SC_CORE_EXPORT const TypeDescriptor * const t_sdaiSTRING;
|
||||
extern SC_CORE_EXPORT const TypeDescriptor * const t_sdaiBINARY;
|
||||
extern SC_CORE_EXPORT const TypeDescriptor * const t_sdaiBOOLEAN;
|
||||
extern SC_CORE_EXPORT const TypeDescriptor * const t_sdaiLOGICAL;
|
||||
|
||||
typedef struct Hash_Table * HashTable;
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue