filename
stringlengths
4
198
content
stringlengths
25
939k
environment
list
variablearg
list
constarg
list
variableargjson
stringclasses
1 value
constargjson
stringlengths
2
3.9k
lang
stringclasses
3 values
constargcount
float64
0
129
variableargcount
float64
0
0
sentence
stringclasses
1 value
src/system/pose_2d/interface.py
import time import tensorflow.compat.v1 as tf tf.disable_v2_behavior() import os import cv2 import numpy as np from src.utils.body_cover import BodyCover from src.utils.drawer import Drawer from src.utils.pose import Pose2D, PoseConfig import matplotlib.image as mpimg class Pose2DInterface: def __init__(self, session, protograph, post_processing, input_size, subject_padding, input_node_name, output_node_name): os.environ['CUDA_VISIBLE_DEVICES'] = '' with tf.gfile.GFile(protograph, "rb") as f: restored_graph_def = tf.GraphDef() restored_graph_def.ParseFromString(f.read()) tf.import_graph_def( restored_graph_def, input_map=None, return_elements=None, name="" ) self.session = session self.graph = tf.get_default_graph() self.image = self.graph.get_tensor_by_name(input_node_name) self.output = self.graph.get_tensor_by_name(output_node_name) self.input_size = input_size self.post_processing = post_processing self.subject_padding = subject_padding self.body_cover = BodyCover(self.subject_padding) """ In the case the model only output heatmaps, this postprocessing transform the resulting heatmaps in the pose 2D. (defined for the post_processing attribute in the init method) """ @staticmethod def standard_heatmap_postprocessing(heatmaps, cropped_image_bbox, input_size): aligned_heatmaps = heatmaps.reshape(heatmaps.shape[0] * heatmaps.shape[1], -1) max_ids = np.argmax(aligned_heatmaps, axis=0) confidences = [aligned_heatmaps[max_ids[i], i] for i in range(len(max_ids))] xPos = np.remainder(max_ids, heatmaps.shape[1]).reshape((-1, 1)) yPos = np.divide(max_ids, heatmaps.shape[1]).astype(np.uint8).reshape((-1, 1)) res = np.hstack([xPos, yPos]).astype(np.float32) res[:, 0] /= heatmaps.shape[1] res[:, 1] /= heatmaps.shape[0] newPose = Pose2D(res).to_absolute_coordinate_from(cropped_image_bbox).clamp(0.0, 1.0) return newPose, confidences """ In the case the model output heatmaps+offset vectors, this postprocessing transform the resulting output in the pose 2D. (defined for the post_processing attribute in the init method) """ @staticmethod def our_approach_postprocessing(network_out, subject_bbox, input_size): total_joints = PoseConfig.get_total_joints() heatmap = network_out[:, :, :total_joints] xOff = network_out[:, :, total_joints:(total_joints * 2)] yOff = network_out[:, :, (total_joints * 2):] confidences = [] joints = np.zeros((total_joints, 2)) - 1 for jointId in range(total_joints): inlined_pix = heatmap[:, :, jointId].reshape(-1) pixId = np.argmax(inlined_pix) confidence = inlined_pix[pixId] # if max confidence below 0.1 => inactive joint if inlined_pix[pixId] < 0.01: confidences.append(confidence) continue outX = pixId % heatmap.shape[1] outY = pixId // heatmap.shape[1] x = outX / heatmap.shape[1] * input_size + xOff[outY, outX, jointId] y = outY / heatmap.shape[0] * input_size + yOff[outY, outX, jointId] x = x / input_size y = y / input_size joints[jointId, 0] = x joints[jointId, 1] = y confidences.append(confidence) return Pose2D(joints).to_absolute_coordinate_from(subject_bbox), confidences i = 0 """ Pose 2D inference Args: * img : the image to annotate * subject_bboxes : the bbox without padding in % * prev_poses : use by the body cover to hide stranger people when people are standing side by side Return: * a list of src.utils.pose.pose2D and a list of confidences (per person, per joint) """ def predict(self, img, subject_bboxes, prev_poses=[]): Pose2DInterface.i += 1 if len(subject_bboxes) == 0: return [], [] cropped_images = [] # filter bbox having no size, insuring the cropped image is not empty filtered_bbox,filtered_poses = [], [] for subject_id in range(len(subject_bboxes)): subject_bbox = subject_bboxes[subject_id] subject_bbox_padded = subject_bbox.to_squared(img, self.subject_padding) width = int(subject_bbox_padded.get_width() * img.shape[1]) height = int(subject_bbox_padded.get_height() * img.shape[0]) if width > 0 and height > 0: filtered_bbox.append(subject_bboxes[subject_id]) if subject_id < len(prev_poses): filtered_poses.append(prev_poses[subject_id]) subject_bboxes, prev_poses = filtered_bbox, filtered_poses # crop images and hide stranger bodies for subject_id in range(len(subject_bboxes)): subject_bbox = subject_bboxes[subject_id] subject_bbox_padded = subject_bbox.to_squared(img, self.subject_padding) ada_bboxes, adaPoses, subject_id_to_keep = [], [], subject_id for i in range(len(subject_bboxes)): curr_bbox = subject_bboxes[i] curr_bbox = curr_bbox.intersect(subject_bbox_padded) if curr_bbox is None: #intersection is empty if i < subject_id: subject_id_to_keep -= 1 continue curr_bbox = curr_bbox.translate(-subject_bbox_padded.get_min_x(), -subject_bbox_padded.get_min_y()) curr_bbox = curr_bbox.scale(1.0 / subject_bbox_padded.get_width(), 1.0 / subject_bbox_padded.get_height()) ada_bboxes.append(curr_bbox) if i < len(prev_poses) and prev_poses[i] is not None: adaPoses.append(prev_poses[i].to_relative_coordinate_into(subject_bbox_padded)) else: adaPoses.append(None) cropped_img = subject_bbox_padded.crop(img) cropped_img = self.body_cover.hide_strangers(cropped_img, ada_bboxes, subject_id_to_keep, adaPoses) cropped_img = cv2.resize(cropped_img, (self.input_size, self.input_size), interpolation=cv2.INTER_AREA) cropped_img = cropped_img.astype(np.float32) / (255.0 / 2.0) - 1.0 cropped_images.append(cropped_img) # infer the cropped images out = np.zeros((0, PoseConfig.get_total_joints() * 3)) if len(cropped_images) > 0: out = self.session.run(self.output, feed_dict={self.image: cropped_images}) # decode outputs poses_2d, confidences = [], [] for subject_id in range(out.shape[0]): # 1.- recover the pose inside the cropped image from the confidence heatmaps curr_heatmaps = out[subject_id, :, :, :] cropped_image_bbox = subject_bboxes[subject_id].to_squared(img, self.subject_padding) curr_pose_2d, curr_confidences = self.post_processing(curr_heatmaps, cropped_image_bbox, self.input_size) poses_2d.append(curr_pose_2d) confidences.append(curr_confidences) return poses_2d, confidences
[]
[]
[ "CUDA_VISIBLE_DEVICES" ]
[]
["CUDA_VISIBLE_DEVICES"]
python
1
0
tidb-server/main.go
// Copyright 2015 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. package main import ( "context" "flag" "fmt" "io/ioutil" "os" "runtime" "strconv" "strings" "sync/atomic" "time" "github.com/opentracing/opentracing-go" "github.com/pingcap/errors" "github.com/pingcap/log" "github.com/pingcap/parser/mysql" "github.com/pingcap/parser/terror" "github.com/pingcap/pd/client" pumpcli "github.com/pingcap/tidb-tools/tidb-binlog/pump_client" "github.com/pingcap/tidb/bindinfo" "github.com/pingcap/tidb/config" "github.com/pingcap/tidb/ddl" "github.com/pingcap/tidb/domain" "github.com/pingcap/tidb/kv" "github.com/pingcap/tidb/metrics" plannercore "github.com/pingcap/tidb/planner/core" "github.com/pingcap/tidb/plugin" "github.com/pingcap/tidb/privilege/privileges" "github.com/pingcap/tidb/server" "github.com/pingcap/tidb/session" "github.com/pingcap/tidb/sessionctx/binloginfo" "github.com/pingcap/tidb/sessionctx/variable" "github.com/pingcap/tidb/statistics" "github.com/pingcap/tidb/statistics/handle" kvstore "github.com/pingcap/tidb/store" "github.com/pingcap/tidb/store/mockstore" "github.com/pingcap/tidb/store/tikv" "github.com/pingcap/tidb/store/tikv/gcworker" "github.com/pingcap/tidb/util/domainutil" "github.com/pingcap/tidb/util/logutil" "github.com/pingcap/tidb/util/memory" "github.com/pingcap/tidb/util/printer" "github.com/pingcap/tidb/util/signal" "github.com/pingcap/tidb/util/storeutil" "github.com/pingcap/tidb/util/sys/linux" "github.com/pingcap/tidb/util/systimemon" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/push" "go.uber.org/automaxprocs/maxprocs" "go.uber.org/zap" "google.golang.org/grpc/grpclog" ) // Flag Names const ( nmVersion = "V" nmConfig = "config" nmConfigCheck = "config-check" nmConfigStrict = "config-strict" nmStore = "store" nmStorePath = "path" nmHost = "host" nmAdvertiseAddress = "advertise-address" nmPort = "P" nmCors = "cors" nmSocket = "socket" nmEnableBinlog = "enable-binlog" nmRunDDL = "run-ddl" nmLogLevel = "L" nmLogFile = "log-file" nmLogSlowQuery = "log-slow-query" nmReportStatus = "report-status" nmStatusHost = "status-host" nmStatusPort = "status" nmMetricsAddr = "metrics-addr" nmMetricsInterval = "metrics-interval" nmDdlLease = "lease" nmTokenLimit = "token-limit" nmPluginDir = "plugin-dir" nmPluginLoad = "plugin-load" nmRepairMode = "repair-mode" nmRepairList = "repair-list" nmProxyProtocolNetworks = "proxy-protocol-networks" nmProxyProtocolHeaderTimeout = "proxy-protocol-header-timeout" nmAffinityCPU = "affinity-cpus" ) var ( version = flagBoolean(nmVersion, false, "print version information and exit") configPath = flag.String(nmConfig, "", "config file path") configCheck = flagBoolean(nmConfigCheck, false, "check config file validity and exit") configStrict = flagBoolean(nmConfigStrict, false, "enforce config file validity") // Base store = flag.String(nmStore, "mocktikv", "registered store name, [tikv, mocktikv]") storePath = flag.String(nmStorePath, "/tmp/tidb", "tidb storage path") host = flag.String(nmHost, "0.0.0.0", "tidb server host") advertiseAddress = flag.String(nmAdvertiseAddress, "", "tidb server advertise IP") port = flag.String(nmPort, "4000", "tidb server port") cors = flag.String(nmCors, "", "tidb server allow cors origin") socket = flag.String(nmSocket, "", "The socket file to use for connection.") enableBinlog = flagBoolean(nmEnableBinlog, false, "enable generate binlog") runDDL = flagBoolean(nmRunDDL, true, "run ddl worker on this tidb-server") ddlLease = flag.String(nmDdlLease, "45s", "schema lease duration, very dangerous to change only if you know what you do") tokenLimit = flag.Int(nmTokenLimit, 1000, "the limit of concurrent executed sessions") pluginDir = flag.String(nmPluginDir, "/data/deploy/plugin", "the folder that hold plugin") pluginLoad = flag.String(nmPluginLoad, "", "wait load plugin name(separated by comma)") affinityCPU = flag.String(nmAffinityCPU, "", "affinity cpu (cpu-no. separated by comma, e.g. 1,2,3)") repairMode = flagBoolean(nmRepairMode, false, "enable admin repair mode") repairList = flag.String(nmRepairList, "", "admin repair table list") // Log logLevel = flag.String(nmLogLevel, "info", "log level: info, debug, warn, error, fatal") logFile = flag.String(nmLogFile, "", "log file path") logSlowQuery = flag.String(nmLogSlowQuery, "", "slow query file path") // Status reportStatus = flagBoolean(nmReportStatus, true, "If enable status report HTTP service.") statusHost = flag.String(nmStatusHost, "0.0.0.0", "tidb server status host") statusPort = flag.String(nmStatusPort, "10080", "tidb server status port") metricsAddr = flag.String(nmMetricsAddr, "", "prometheus pushgateway address, leaves it empty will disable prometheus push.") metricsInterval = flag.Uint(nmMetricsInterval, 15, "prometheus client push interval in second, set \"0\" to disable prometheus push.") // PROXY Protocol proxyProtocolNetworks = flag.String(nmProxyProtocolNetworks, "", "proxy protocol networks allowed IP or *, empty mean disable proxy protocol support") proxyProtocolHeaderTimeout = flag.Uint(nmProxyProtocolHeaderTimeout, 5, "proxy protocol header read timeout, unit is second.") ) var ( storage kv.Storage dom *domain.Domain svr *server.Server graceful bool ) func main() { flag.Parse() if *version { fmt.Println(printer.GetTiDBInfo()) os.Exit(0) } registerStores() registerMetrics() config.InitializeConfig(*configPath, *configCheck, *configStrict, reloadConfig, overrideConfig) if err := config.GetGlobalConfig().Valid(); err != nil { fmt.Fprintln(os.Stderr, "invalid config", err) os.Exit(1) } if *configCheck { fmt.Println("config check successful") os.Exit(0) } setGlobalVars() setCPUAffinity() setupLog() setupTracing() // Should before createServer and after setup config. printInfo() setupBinlogClient() setupMetrics() createStoreAndDomain() createServer() signal.SetupSignalHandler(serverShutdown) runServer() cleanup() syncLog() } func exit() { syncLog() os.Exit(0) } func syncLog() { if err := log.Sync(); err != nil { fmt.Fprintln(os.Stderr, "sync log err:", err) os.Exit(1) } } func setCPUAffinity() { if affinityCPU == nil || len(*affinityCPU) == 0 { return } var cpu []int for _, af := range strings.Split(*affinityCPU, ",") { af = strings.TrimSpace(af) if len(af) > 0 { c, err := strconv.Atoi(af) if err != nil { fmt.Fprintf(os.Stderr, "wrong affinity cpu config: %s", *affinityCPU) exit() } cpu = append(cpu, c) } } err := linux.SetAffinity(cpu) if err != nil { fmt.Fprintf(os.Stderr, "set cpu affinity failure: %v", err) exit() } runtime.GOMAXPROCS(len(cpu)) } func registerStores() { err := kvstore.Register("tikv", tikv.Driver{}) terror.MustNil(err) tikv.NewGCHandlerFunc = gcworker.NewGCWorker err = kvstore.Register("mocktikv", mockstore.MockDriver{}) terror.MustNil(err) } func registerMetrics() { metrics.RegisterMetrics() } func createStoreAndDomain() { cfg := config.GetGlobalConfig() fullPath := fmt.Sprintf("%s://%s", cfg.Store, cfg.Path) var err error storage, err = kvstore.New(fullPath) terror.MustNil(err) // Bootstrap a session to load information schema. dom, err = session.BootstrapSession(storage) terror.MustNil(err) } func setupBinlogClient() { cfg := config.GetGlobalConfig() if !cfg.Binlog.Enable { return } if cfg.Binlog.IgnoreError { binloginfo.SetIgnoreError(true) } var ( client *pumpcli.PumpsClient err error ) securityOption := pd.SecurityOption{ CAPath: cfg.Security.ClusterSSLCA, CertPath: cfg.Security.ClusterSSLCert, KeyPath: cfg.Security.ClusterSSLKey, } if len(cfg.Binlog.BinlogSocket) == 0 { client, err = pumpcli.NewPumpsClient(cfg.Path, cfg.Binlog.Strategy, parseDuration(cfg.Binlog.WriteTimeout), securityOption) } else { client, err = pumpcli.NewLocalPumpsClient(cfg.Path, cfg.Binlog.BinlogSocket, parseDuration(cfg.Binlog.WriteTimeout), securityOption) } terror.MustNil(err) err = pumpcli.InitLogger(cfg.Log.ToLogConfig()) terror.MustNil(err) binloginfo.SetPumpsClient(client) log.Info("tidb-server", zap.Bool("create pumps client success, ignore binlog error", cfg.Binlog.IgnoreError)) } // Prometheus push. const zeroDuration = time.Duration(0) // pushMetric pushes metrics in background. func pushMetric(addr string, interval time.Duration) { if interval == zeroDuration || len(addr) == 0 { log.Info("disable Prometheus push client") return } log.Info("start prometheus push client", zap.String("server addr", addr), zap.String("interval", interval.String())) go prometheusPushClient(addr, interval) } // prometheusPushClient pushes metrics to Prometheus Pushgateway. func prometheusPushClient(addr string, interval time.Duration) { // TODO: TiDB do not have uniq name, so we use host+port to compose a name. job := "tidb" pusher := push.New(addr, job) pusher = pusher.Gatherer(prometheus.DefaultGatherer) pusher = pusher.Grouping("instance", instanceName()) for { err := pusher.Push() if err != nil { log.Error("could not push metrics to prometheus pushgateway", zap.String("err", err.Error())) } time.Sleep(interval) } } func instanceName() string { cfg := config.GetGlobalConfig() hostname, err := os.Hostname() if err != nil { return "unknown" } return fmt.Sprintf("%s_%d", hostname, cfg.Port) } // parseDuration parses lease argument string. func parseDuration(lease string) time.Duration { dur, err := time.ParseDuration(lease) if err != nil { dur, err = time.ParseDuration(lease + "s") } if err != nil || dur < 0 { log.Fatal("invalid lease duration", zap.String("lease", lease)) } return dur } func flagBoolean(name string, defaultVal bool, usage string) *bool { if !defaultVal { // Fix #4125, golang do not print default false value in usage, so we append it. usage = fmt.Sprintf("%s (default false)", usage) return flag.Bool(name, defaultVal, usage) } return flag.Bool(name, defaultVal, usage) } func reloadConfig(nc, c *config.Config) { // Just a part of config items need to be reload explicitly. // Some of them like OOMAction are always used by getting from global config directly // like config.GetGlobalConfig().OOMAction. // These config items will become available naturally after the global config pointer // is updated in function ReloadGlobalConfig. if nc.Performance.MaxMemory != c.Performance.MaxMemory { plannercore.PreparedPlanCacheMaxMemory.Store(nc.Performance.MaxMemory) } if nc.Performance.CrossJoin != c.Performance.CrossJoin { plannercore.AllowCartesianProduct.Store(nc.Performance.CrossJoin) } if nc.Performance.FeedbackProbability != c.Performance.FeedbackProbability { statistics.FeedbackProbability.Store(nc.Performance.FeedbackProbability) } if nc.Performance.QueryFeedbackLimit != c.Performance.QueryFeedbackLimit { handle.MaxQueryFeedbackCount.Store(int64(nc.Performance.QueryFeedbackLimit)) } if nc.Performance.PseudoEstimateRatio != c.Performance.PseudoEstimateRatio { statistics.RatioOfPseudoEstimate.Store(nc.Performance.PseudoEstimateRatio) } if nc.TiKVClient.StoreLimit != c.TiKVClient.StoreLimit { storeutil.StoreLimit.Store(nc.TiKVClient.StoreLimit) } } // overrideConfig considers command arguments and overrides some config items in the Config. func overrideConfig(cfg *config.Config) { actualFlags := make(map[string]bool) flag.Visit(func(f *flag.Flag) { actualFlags[f.Name] = true }) // Base if actualFlags[nmHost] { cfg.Host = *host } if actualFlags[nmAdvertiseAddress] { cfg.AdvertiseAddress = *advertiseAddress } if len(cfg.AdvertiseAddress) == 0 { cfg.AdvertiseAddress = cfg.Host } var err error if actualFlags[nmPort] { var p int p, err = strconv.Atoi(*port) terror.MustNil(err) cfg.Port = uint(p) } if actualFlags[nmCors] { fmt.Println(cors) cfg.Cors = *cors } if actualFlags[nmStore] { cfg.Store = *store } if actualFlags[nmStorePath] { cfg.Path = *storePath } if actualFlags[nmSocket] { cfg.Socket = *socket } if actualFlags[nmEnableBinlog] { cfg.Binlog.Enable = *enableBinlog } if actualFlags[nmRunDDL] { cfg.RunDDL = *runDDL } if actualFlags[nmDdlLease] { cfg.Lease = *ddlLease } if actualFlags[nmTokenLimit] { cfg.TokenLimit = uint(*tokenLimit) } if actualFlags[nmPluginLoad] { cfg.Plugin.Load = *pluginLoad } if actualFlags[nmPluginDir] { cfg.Plugin.Dir = *pluginDir } if actualFlags[nmRepairMode] { cfg.RepairMode = *repairMode } if actualFlags[nmRepairList] { if cfg.RepairMode { cfg.RepairTableList = stringToList(*repairList) } } // Log if actualFlags[nmLogLevel] { cfg.Log.Level = *logLevel } if actualFlags[nmLogFile] { cfg.Log.File.Filename = *logFile } if actualFlags[nmLogSlowQuery] { cfg.Log.SlowQueryFile = *logSlowQuery } // Status if actualFlags[nmReportStatus] { cfg.Status.ReportStatus = *reportStatus } if actualFlags[nmStatusHost] { cfg.Status.StatusHost = *statusHost } if actualFlags[nmStatusPort] { var p int p, err = strconv.Atoi(*statusPort) terror.MustNil(err) cfg.Status.StatusPort = uint(p) } if actualFlags[nmMetricsAddr] { cfg.Status.MetricsAddr = *metricsAddr } if actualFlags[nmMetricsInterval] { cfg.Status.MetricsInterval = *metricsInterval } // PROXY Protocol if actualFlags[nmProxyProtocolNetworks] { cfg.ProxyProtocol.Networks = *proxyProtocolNetworks } if actualFlags[nmProxyProtocolHeaderTimeout] { cfg.ProxyProtocol.HeaderTimeout = *proxyProtocolHeaderTimeout } } func setGlobalVars() { cfg := config.GetGlobalConfig() ddlLeaseDuration := parseDuration(cfg.Lease) session.SetSchemaLease(ddlLeaseDuration) runtime.GOMAXPROCS(int(cfg.Performance.MaxProcs)) statsLeaseDuration := parseDuration(cfg.Performance.StatsLease) session.SetStatsLease(statsLeaseDuration) bindinfo.Lease = parseDuration(cfg.Performance.BindInfoLease) domain.RunAutoAnalyze = cfg.Performance.RunAutoAnalyze statistics.FeedbackProbability.Store(cfg.Performance.FeedbackProbability) handle.MaxQueryFeedbackCount.Store(int64(cfg.Performance.QueryFeedbackLimit)) statistics.RatioOfPseudoEstimate.Store(cfg.Performance.PseudoEstimateRatio) ddl.RunWorker = cfg.RunDDL if cfg.SplitTable { atomic.StoreUint32(&ddl.EnableSplitTableRegion, 1) } plannercore.AllowCartesianProduct.Store(cfg.Performance.CrossJoin) privileges.SkipWithGrant = cfg.Security.SkipGrantTable kv.TxnTotalSizeLimit = cfg.Performance.TxnTotalSizeLimit priority := mysql.Str2Priority(cfg.Performance.ForcePriority) variable.ForcePriority = int32(priority) variable.SysVars[variable.TiDBForcePriority].Value = mysql.Priority2Str[priority] variable.SysVars[variable.TIDBMemQuotaQuery].Value = strconv.FormatInt(cfg.MemQuotaQuery, 10) variable.SysVars["lower_case_table_names"].Value = strconv.Itoa(cfg.LowerCaseTableNames) variable.SysVars[variable.LogBin].Value = variable.BoolToIntStr(config.GetGlobalConfig().Binlog.Enable) variable.SysVars[variable.Port].Value = fmt.Sprintf("%d", cfg.Port) variable.SysVars[variable.Socket].Value = cfg.Socket variable.SysVars[variable.DataDir].Value = cfg.Path variable.SysVars[variable.TiDBSlowQueryFile].Value = cfg.Log.SlowQueryFile // For CI environment we default enable prepare-plan-cache. plannercore.SetPreparedPlanCache(config.CheckTableBeforeDrop || cfg.PreparedPlanCache.Enabled) if plannercore.PreparedPlanCacheEnabled() { plannercore.PreparedPlanCacheCapacity = cfg.PreparedPlanCache.Capacity plannercore.PreparedPlanCacheMemoryGuardRatio = cfg.PreparedPlanCache.MemoryGuardRatio if plannercore.PreparedPlanCacheMemoryGuardRatio < 0.0 || plannercore.PreparedPlanCacheMemoryGuardRatio > 1.0 { plannercore.PreparedPlanCacheMemoryGuardRatio = 0.1 } plannercore.PreparedPlanCacheMaxMemory.Store(cfg.Performance.MaxMemory) total, err := memory.MemTotal() terror.MustNil(err) if plannercore.PreparedPlanCacheMaxMemory.Load() > total || plannercore.PreparedPlanCacheMaxMemory.Load() <= 0 { plannercore.PreparedPlanCacheMaxMemory.Store(total) } } tikv.CommitMaxBackoff = int(parseDuration(cfg.TiKVClient.CommitTimeout).Seconds() * 1000) tikv.RegionCacheTTLSec = int64(cfg.TiKVClient.RegionCacheTTL) domainutil.RepairInfo.SetRepairMode(cfg.RepairMode) domainutil.RepairInfo.SetRepairTableList(cfg.RepairTableList) } func setupLog() { cfg := config.GetGlobalConfig() err := logutil.InitZapLogger(cfg.Log.ToLogConfig()) terror.MustNil(err) err = logutil.InitLogger(cfg.Log.ToLogConfig()) terror.MustNil(err) // Disable automaxprocs log nopLog := func(string, ...interface{}) {} _, err = maxprocs.Set(maxprocs.Logger(nopLog)) terror.MustNil(err) if len(os.Getenv("GRPC_DEBUG")) > 0 { grpclog.SetLoggerV2(grpclog.NewLoggerV2WithVerbosity(os.Stderr, os.Stderr, os.Stderr, 999)) } else { grpclog.SetLoggerV2(grpclog.NewLoggerV2(ioutil.Discard, ioutil.Discard, os.Stderr)) } } func printInfo() { // Make sure the TiDB info is always printed. level := log.GetLevel() log.SetLevel(zap.InfoLevel) printer.PrintTiDBInfo() log.SetLevel(level) } func createServer() { cfg := config.GetGlobalConfig() driver := server.NewTiDBDriver(storage) var err error svr, err = server.NewServer(cfg, driver) // Both domain and storage have started, so we have to clean them before exiting. terror.MustNil(err, closeDomainAndStorage) svr.SetDomain(dom) go dom.ExpensiveQueryHandle().SetSessionManager(svr).Run() dom.InfoSyncer().SetSessionManager(svr) } func serverShutdown(isgraceful bool) { if isgraceful { graceful = true } svr.Close() } func setupMetrics() { cfg := config.GetGlobalConfig() // Enable the mutex profile, 1/10 of mutex blocking event sampling. runtime.SetMutexProfileFraction(10) systimeErrHandler := func() { metrics.TimeJumpBackCounter.Inc() } callBackCount := 0 sucessCallBack := func() { callBackCount++ // It is callback by monitor per second, we increase metrics.KeepAliveCounter per 5s. if callBackCount >= 5 { callBackCount = 0 metrics.KeepAliveCounter.Inc() } } go systimemon.StartMonitor(time.Now, systimeErrHandler, sucessCallBack) pushMetric(cfg.Status.MetricsAddr, time.Duration(cfg.Status.MetricsInterval)*time.Second) } func setupTracing() { cfg := config.GetGlobalConfig() tracingCfg := cfg.OpenTracing.ToTracingConfig() tracingCfg.ServiceName = "TiDB" tracer, _, err := tracingCfg.NewTracer() if err != nil { log.Fatal("setup jaeger tracer failed", zap.String("error message", err.Error())) } opentracing.SetGlobalTracer(tracer) } func runServer() { err := svr.Run() terror.MustNil(err) } func closeDomainAndStorage() { atomic.StoreUint32(&tikv.ShuttingDown, 1) dom.Close() err := storage.Close() terror.Log(errors.Trace(err)) } func cleanup() { if graceful { svr.GracefulDown(context.Background(), nil) } else { svr.TryGracefulDown() } plugin.Shutdown(context.Background()) closeDomainAndStorage() } func stringToList(repairString string) []string { if len(repairString) <= 0 { return []string{} } if repairString[0] == '[' && repairString[len(repairString)-1] == ']' { repairString = repairString[1 : len(repairString)-1] } return strings.FieldsFunc(repairString, func(r rune) bool { return r == ',' || r == ' ' || r == '"' }) }
[ "\"GRPC_DEBUG\"" ]
[]
[ "GRPC_DEBUG" ]
[]
["GRPC_DEBUG"]
go
1
0
db_test.go
// Copyright (c) 2019 Faye Amacker. All rights reserved. // Use of this source code is governed by Apache License 2.0 found in the LICENSE file. package main import ( "bytes" "context" "database/sql" "os" "reflect" "sort" "testing" "time" "github.com/stretchr/testify/suite" ) type DBTestSuite struct { suite.Suite dbStore } type credentialsByID []credential func (c credentialsByID) Len() int { return len(c) } func (c credentialsByID) Swap(i, j int) { c[i], c[j] = c[j], c[i] } func (c credentialsByID) Less(i, j int) bool { return bytes.Compare(c[i].CredentialID, c[j].CredentialID) <= 0 } type usersByID []user func (u usersByID) Len() int { return len(u) } func (u usersByID) Swap(i, j int) { u[i], u[j] = u[j], u[i] } func (u usersByID) Less(i, j int) bool { return bytes.Compare(u[i].UserID, u[j].UserID) <= 0 } var ( userNotExist = user{ UserName: "user_not_exist", } // User with one credential user1 = user{ UserID: []byte{117, 115, 101, 104, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}, UserName: "User1", DisplayName: "User1 display name", CredentialIDs: [][]byte{ {99, 114, 101, 100, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}, }, } // User with two credentials user2 = user{ UserID: []byte{117, 115, 101, 104, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2}, UserName: "User2", DisplayName: "User2 display name", CredentialIDs: [][]byte{ {99, 114, 101, 100, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2}, {99, 114, 101, 100, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3}, }, } credentialNotExist = credential{ CredentialID: []byte{99, 114, 101, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, UserID: []byte{117, 115, 101, 104, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, } credential1 = credential{ CredentialID: []byte{99, 114, 101, 100, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}, UserID: []byte{117, 115, 101, 104, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}, Counter: 1, CoseKey: []byte{1, 2, 3}, } credential2 = credential{ CredentialID: []byte{99, 114, 101, 100, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2}, UserID: []byte{117, 115, 101, 104, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2}, Counter: 2, CoseKey: []byte{1, 2, 3}, } credential3 = credential{ CredentialID: []byte{99, 114, 101, 100, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3}, UserID: []byte{117, 115, 101, 104, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2}, Counter: 3, CoseKey: []byte{1, 2, 3}, } users = []user{user1, user2} credentials = []credential{credential1, credential2, credential3} ) func (suite *DBTestSuite) SetupSuite() { connString := os.Getenv("DB_CONNSTRING") if connString == "" { panic("Missing env variable DB_CONNSTRING") } db, err := sql.Open("postgres", connString) if err != nil { panic(err) } if err = db.Ping(); err != nil { panic(err) } suite.dbStore.DB = db } func (suite *DBTestSuite) TearDownSuite() { suite.dbStore.Close() } func (suite *DBTestSuite) SetupTest() { _, err := suite.dbStore.Exec("DELETE FROM credentials") if err != nil { panic(err) } _, err = suite.dbStore.Exec("DELETE FROM users") if err != nil { panic(err) } } func (suite *DBTestSuite) seedUserCredentialTables(ctx context.Context) { insertUserStmt, err := suite.dbStore.PrepareContext(ctx, "INSERT INTO users (id, username, display_name) VALUES ($1, $2, $3)") if err != nil { panic(err) } insertCredentialStmt, err := suite.dbStore.PrepareContext(ctx, "INSERT INTO credentials (id, user_id, counter, cose_key, registered_at, loggedin_at) VALUES ($1, $2, $3, $4, $5, $6)") if err != nil { panic(err) } for _, u := range users { _, err := insertUserStmt.ExecContext(ctx, u.UserID, u.UserName, u.DisplayName) if err != nil { panic(err) } } for _, c := range credentials { _, err := insertCredentialStmt.ExecContext(ctx, c.CredentialID, c.UserID, c.Counter, c.CoseKey, time.Now(), time.Now()) if err != nil { panic(err) } } } func (suite *DBTestSuite) queryUserCredentialTables(ctx context.Context) ([]user, []credential) { var users []user rows, err := suite.dbStore.Query("SELECT id, username, display_name FROM users") if err != nil { panic(err) } defer rows.Close() for rows.Next() { var u user if err := rows.Scan(&u.UserID, &u.UserName, &u.DisplayName); err != nil { panic(err) } users = append(users, u) } if err := rows.Err(); err != nil { panic(err) } var credentials []credential rows, err = suite.dbStore.Query("SELECT id, user_id, counter, cose_key FROM credentials") if err != nil { panic(err) } defer rows.Close() for rows.Next() { var c credential if err := rows.Scan(&c.CredentialID, &c.UserID, &c.Counter, &c.CoseKey); err != nil { panic(err) } credentials = append(credentials, c) for i := 0; i < len(users); i++ { if bytes.Equal(c.UserID, users[i].UserID) { users[i].CredentialIDs = append(users[i].CredentialIDs, c.CredentialID) } } } if err := rows.Err(); err != nil { panic(err) } return users, credentials } func (suite *DBTestSuite) TestGetUser() { ctx := context.Background() suite.seedUserCredentialTables(ctx) user, err := suite.dbStore.getUser(ctx, userNotExist.UserName) if err == nil || err != errNoRecords { suite.T().Errorf("(*dbstore).getUser(%s) returns error %q, want error %q", userNotExist.UserName, err, errNoRecords) } if user != nil { suite.T().Errorf("(*dbstore).getUser(%s) returns user %+v, want nil", userNotExist.UserName, user) } for _, expectedUser := range users { user, err := suite.dbStore.getUser(ctx, expectedUser.UserName) if err != nil { suite.T().Errorf("(*dbstore).getUser(%s) returns error %q", expectedUser.UserName, err) } if !reflect.DeepEqual(*user, expectedUser) { suite.T().Errorf("(*dbstore).getUser(%s) returns user %+v, want %+v", expectedUser.UserName, user, expectedUser) } } } func (suite *DBTestSuite) TestGetCredential() { ctx := context.Background() suite.seedUserCredentialTables(ctx) c, err := suite.dbStore.getCredential(ctx, credentialNotExist.UserID, credentialNotExist.CredentialID) if err == nil || err != errNoRecords { suite.T().Errorf("(*dbstore).getCredential(%v, %v) returns error %q, want error %q", credentialNotExist.UserID, credentialNotExist.CredentialID, err, errNoRecords) } if c != nil { suite.T().Errorf("(*dbstore).getCredential(%v, %v) returns credential %+v, want nil", credentialNotExist.UserID, credentialNotExist.CredentialID, c) } for _, expectedCredential := range credentials { c, err := suite.dbStore.getCredential(ctx, expectedCredential.UserID, expectedCredential.CredentialID) if err != nil { suite.T().Errorf("(*dbstore).getCredential(%v, %v) returns error %q", expectedCredential.UserID, expectedCredential.CredentialID, err) } if !reflect.DeepEqual(*c, expectedCredential) { suite.T().Errorf("(*dbstore).getCredential(%v, %v) returns credential %+v, want %+v", expectedCredential.UserID, expectedCredential.CredentialID, c, expectedCredential) } } } func (suite *DBTestSuite) TestAddUserCredential() { ctx := context.Background() // User does not exist, add user record and credential record if err := suite.dbStore.addUserCredential(ctx, &user2, &credential2); err != nil { suite.T().Errorf("(*dbstore).addUserAndCredential(%+v, %+v) returns error %q", user2, credential2, err) return } // User exists, add credential record if err := suite.dbStore.addUserCredential(ctx, &user2, &credential3); err != nil { suite.T().Errorf("(*dbstore).addUserAndCredential(%+v, %+v) returns error %q", user2, credential3, err) return } // User and credential exist, return error if err := suite.dbStore.addUserCredential(ctx, &user2, &credential3); err == nil || err != errRecordExists { suite.T().Errorf("(*dbstore).addUserAndCredential(%+v, %+v) returns error %q, want error %q", user2, credential3, err, errRecordExists) return } usersFromDB, credentialsFromDB := suite.queryUserCredentialTables(ctx) c := []credential{credential2, credential3} sort.Sort(credentialsByID(credentialsFromDB)) sort.Sort(credentialsByID(c)) if len(usersFromDB) != 1 { suite.T().Errorf("Got %d user records, want %d records", len(usersFromDB), 1) } if !reflect.DeepEqual(user2, usersFromDB[0]) { suite.T().Errorf("Got user %+v, want %+v", usersFromDB[0], user2) } if len(credentialsFromDB) != 2 { suite.T().Errorf("Got %d credential records, want %d records", len(credentialsFromDB), 2) } if !reflect.DeepEqual(c, credentialsFromDB) { suite.T().Errorf("Got credential %+v, want %+v", credentialsFromDB, c) } } func (suite *DBTestSuite) TestUpdateCredential() { ctx := context.Background() suite.seedUserCredentialTables(ctx) err := suite.dbStore.updateCredential(ctx, &credentialNotExist) if err == nil || err != errNoRecords { suite.T().Errorf("(*dbstore).updateCredential(%v, %v) returns error %q, want error %q", credentialNotExist.UserID, credentialNotExist.CredentialID, err, errNoRecords) } newCredentials := make([]credential, len(credentials)) copy(newCredentials, credentials) for i := 0; i < len(newCredentials); i++ { newCredentials[i].Counter++ err := suite.dbStore.updateCredential(ctx, &newCredentials[i]) if err != nil { suite.T().Errorf("(*dbstore).updateCredential(%v) returns error %q", credentials[i], err) } } _, credentialsFromDB := suite.queryUserCredentialTables(ctx) sort.Sort(credentialsByID(credentialsFromDB)) sort.Sort(credentialsByID(newCredentials)) if !reflect.DeepEqual(credentialsFromDB, newCredentials) { suite.T().Errorf("Got credential %+v, want %+v", credentialsFromDB, newCredentials) } } func TestDBTestSuite(t *testing.T) { suite.Run(t, new(DBTestSuite)) }
[ "\"DB_CONNSTRING\"" ]
[]
[ "DB_CONNSTRING" ]
[]
["DB_CONNSTRING"]
go
1
0
src/digimix/utils/debug/gstreamer.py
# based on https://github.com/voc/voctomix/blob/3156f3546890e6ae8d379df17e5cc718eee14b15/vocto/debug.py import logging import os from digimix.audio import Gst log = logging.getLogger('vocto.debug') def gst_generate_dot(pipeline, name): if 'GST_DEBUG_DUMP_DOT_DIR' in os.environ: dotfile = os.path.join(os.environ['GST_DEBUG_DUMP_DOT_DIR'], "%s.dot" % name) log.debug("Generating DOT image of pipeline '{name}' into '{file}'".format(name=name, file=dotfile)) Gst.debug_bin_to_dot_file(pipeline, Gst.DebugGraphDetails(15), name) gst_log_messages_lastmessage = None gst_log_messages_lastlevel = None gst_log_messages_repeat = 0 def gst_log_messages(level): gst_log = logging.getLogger('Gst') def log(level, msg): if level == Gst.DebugLevel.WARNING: gst_log.warning(msg) if level == Gst.DebugLevel.FIXME: gst_log.warning(msg) elif level == Gst.DebugLevel.ERROR: gst_log.error(msg) elif level == Gst.DebugLevel.INFO: gst_log.info(msg) elif level == Gst.DebugLevel.DEBUG: gst_log.debug(msg) def log_function(category, level, file, function, line, object, message, *user_data): global gst_log_messages_lastmessage, gst_log_messages_lastlevel, gst_log_messages_repeat msg = message.get() if gst_log_messages_lastmessage != msg: if gst_log_messages_repeat > 2: log(gst_log_messages_lastlevel, "%s [REPEATING %d TIMES]" % (gst_log_messages_lastmessage, gst_log_messages_repeat)) gst_log_messages_lastmessage = msg gst_log_messages_repeat = 0 gst_log_messages_lastlevel = level log(level, "%s: %s (in function %s() in file %s:%d)" % (object.name if object else "", msg, function, file, line)) else: gst_log_messages_repeat += 1 Gst.debug_remove_log_function(None) Gst.debug_add_log_function(log_function, None) Gst.debug_set_default_threshold(level) Gst.debug_set_active(True)
[]
[]
[ "GST_DEBUG_DUMP_DOT_DIR" ]
[]
["GST_DEBUG_DUMP_DOT_DIR"]
python
1
0
cinder/tests/test_migrations.py
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2010-2011 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Tests for database migrations. This test case reads the configuration file test_migrations.conf for database connection settings to use in the tests. For each connection found in the config file, the test case runs a series of test cases to ensure that migrations work properly both upgrading and downgrading, and that no data loss occurs if possible. """ import ConfigParser import os import subprocess import urlparse import uuid from migrate.versioning import repository import sqlalchemy import testtools import cinder.db.migration as migration import cinder.db.sqlalchemy.migrate_repo from cinder.db.sqlalchemy.migration import versioning_api as migration_api from cinder.openstack.common import log as logging from cinder import test LOG = logging.getLogger('cinder.tests.test_migrations') def _get_connect_string(backend, user="openstack_citest", passwd="openstack_citest", database="openstack_citest"): """Return connect string. Try to get a connection with a very specific set of values, if we get these then we'll run the tests, otherwise they are skipped. """ if backend == "postgres": backend = "postgresql+psycopg2" return ("%(backend)s://%(user)s:%(passwd)s@localhost/%(database)s" % {'backend': backend, 'user': user, 'passwd': passwd, 'database': database}) def _is_mysql_avail(**kwargs): return _is_backend_avail('mysql', **kwargs) def _is_backend_avail(backend, user="openstack_citest", passwd="openstack_citest", database="openstack_citest"): try: if backend == "mysql": connect_uri = _get_connect_string("mysql", user=user, passwd=passwd, database=database) elif backend == "postgres": connect_uri = _get_connect_string("postgres", user=user, passwd=passwd, database=database) engine = sqlalchemy.create_engine(connect_uri) connection = engine.connect() except Exception: # intentionally catch all to handle exceptions even if we don't # have any backend code loaded. LOG.exception("Backend %s is not available", backend) return False else: connection.close() engine.dispose() return True def _have_mysql(): present = os.environ.get('NOVA_TEST_MYSQL_PRESENT') if present is None: return _is_backend_avail('mysql') return present.lower() in ('', 'true') def get_table(engine, name): """Returns an sqlalchemy table dynamically from db. Needed because the models don't work for us in migrations as models will be far out of sync with the current data. """ metadata = sqlalchemy.schema.MetaData() metadata.bind = engine return sqlalchemy.Table(name, metadata, autoload=True) class TestMigrations(test.TestCase): """Test sqlalchemy-migrate migrations.""" DEFAULT_CONFIG_FILE = os.path.join(os.path.dirname(__file__), 'test_migrations.conf') # Test machines can set the CINDER_TEST_MIGRATIONS_CONF variable # to override the location of the config file for migration testing CONFIG_FILE_PATH = os.environ.get('CINDER_TEST_MIGRATIONS_CONF', DEFAULT_CONFIG_FILE) MIGRATE_FILE = cinder.db.sqlalchemy.migrate_repo.__file__ REPOSITORY = repository.Repository( os.path.abspath(os.path.dirname(MIGRATE_FILE))) def setUp(self): super(TestMigrations, self).setUp() self.snake_walk = False self.test_databases = {} # Load test databases from the config file. Only do this # once. No need to re-run this on each test... LOG.debug('config_path is %s' % TestMigrations.CONFIG_FILE_PATH) if not self.test_databases: if os.path.exists(TestMigrations.CONFIG_FILE_PATH): cp = ConfigParser.RawConfigParser() try: cp.read(TestMigrations.CONFIG_FILE_PATH) defaults = cp.defaults() for key, value in defaults.items(): self.test_databases[key] = value self.snake_walk = cp.getboolean('walk_style', 'snake_walk') except ConfigParser.ParsingError as e: self.fail("Failed to read test_migrations.conf config " "file. Got error: %s" % e) else: self.fail("Failed to find test_migrations.conf config " "file.") self.engines = {} for key, value in self.test_databases.items(): self.engines[key] = sqlalchemy.create_engine(value) # We start each test case with a completely blank slate. self._reset_databases() def tearDown(self): # We destroy the test data store between each test case, # and recreate it, which ensures that we have no side-effects # from the tests self._reset_databases() super(TestMigrations, self).tearDown() def _reset_databases(self): def execute_cmd(cmd=None): proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) output = proc.communicate()[0] LOG.debug(output) self.assertEqual(0, proc.returncode) for key, engine in self.engines.items(): conn_string = self.test_databases[key] conn_pieces = urlparse.urlparse(conn_string) engine.dispose() if conn_string.startswith('sqlite'): # We can just delete the SQLite database, which is # the easiest and cleanest solution db_path = conn_pieces.path.strip('/') if os.path.exists(db_path): os.unlink(db_path) # No need to recreate the SQLite DB. SQLite will # create it for us if it's not there... elif conn_string.startswith('mysql'): # We can execute the MySQL client to destroy and re-create # the MYSQL database, which is easier and less error-prone # than using SQLAlchemy to do this via MetaData...trust me. database = conn_pieces.path.strip('/') loc_pieces = conn_pieces.netloc.split('@') host = loc_pieces[1] auth_pieces = loc_pieces[0].split(':') user = auth_pieces[0] password = "" if len(auth_pieces) > 1: if auth_pieces[1].strip(): password = "-p\"%s\"" % auth_pieces[1] sql = ("drop database if exists %(database)s; create database " "%(database)s;") % {'database': database} cmd = ("mysql -u \"%(user)s\" %(password)s -h %(host)s " "-e \"%(sql)s\"") % {'user': user, 'password': password, 'host': host, 'sql': sql} execute_cmd(cmd) elif conn_string.startswith('postgresql'): database = conn_pieces.path.strip('/') loc_pieces = conn_pieces.netloc.split('@') host = loc_pieces[1] auth_pieces = loc_pieces[0].split(':') user = auth_pieces[0] password = "" if len(auth_pieces) > 1: password = auth_pieces[1].strip() # note(krtaylor): File creation problems with tests in # venv using .pgpass authentication, changed to # PGPASSWORD environment variable which is no longer # planned to be deprecated os.environ['PGPASSWORD'] = password os.environ['PGUSER'] = user # note(boris-42): We must create and drop database, we can't # drop database which we have connected to, so for such # operations there is a special database template1. sqlcmd = ("psql -w -U %(user)s -h %(host)s -c" " '%(sql)s' -d template1") sql = ("drop database if exists %(database)s;") % {'database': database} droptable = sqlcmd % {'user': user, 'host': host, 'sql': sql} execute_cmd(droptable) sql = ("create database %(database)s;") % {'database': database} createtable = sqlcmd % {'user': user, 'host': host, 'sql': sql} execute_cmd(createtable) os.unsetenv('PGPASSWORD') os.unsetenv('PGUSER') def test_walk_versions(self): """Test walk versions. Walks all version scripts for each tested database, ensuring that there are no errors in the version scripts for each engine """ for key, engine in self.engines.items(): self._walk_versions(engine, self.snake_walk) def test_mysql_connect_fail(self): """Test for mysql connection failure. Test that we can trigger a mysql connection failure and we fail gracefully to ensure we don't break people without mysql """ if _is_mysql_avail(user="openstack_cifail"): self.fail("Shouldn't have connected") @testtools.skipUnless(_have_mysql(), "mysql not available") def test_mysql_innodb(self): """Test that table creation on mysql only builds InnoDB tables.""" # add this to the global lists to make reset work with it, it's removed # automaticaly in tearDown so no need to clean it up here. connect_string = _get_connect_string('mysql') engine = sqlalchemy.create_engine(connect_string) self.engines["mysqlcitest"] = engine self.test_databases["mysqlcitest"] = connect_string # build a fully populated mysql database with all the tables self._reset_databases() self._walk_versions(engine, False, False) uri = _get_connect_string('mysql', database="information_schema") connection = sqlalchemy.create_engine(uri).connect() # sanity check total = connection.execute("SELECT count(*) " "from information_schema.TABLES " "where TABLE_SCHEMA='openstack_citest'") self.assertGreater(total.scalar(), 0, msg="No tables found. Wrong schema?") noninnodb = connection.execute("SELECT count(*) " "from information_schema.TABLES " "where TABLE_SCHEMA='openstack_citest' " "and ENGINE!='InnoDB' " "and TABLE_NAME!='migrate_version'") count = noninnodb.scalar() self.assertEqual(count, 0, "%d non InnoDB tables created" % count) def test_postgresql_connect_fail(self): """Test connection failure on PostgrSQL. Test that we can trigger a postgres connection failure and we fail gracefully to ensure we don't break people without postgres. """ if _is_backend_avail('postgres', user="openstack_cifail"): self.fail("Shouldn't have connected") @testtools.skipUnless(_is_backend_avail('postgres'), "postgresql not available") def test_postgresql_opportunistically(self): # add this to the global lists to make reset work with it, it's removed # automatically in tearDown so no need to clean it up here. connect_string = _get_connect_string("postgres") engine = sqlalchemy.create_engine(connect_string) self.engines["postgresqlcitest"] = engine self.test_databases["postgresqlcitest"] = connect_string # build a fully populated postgresql database with all the tables self._reset_databases() self._walk_versions(engine, False, False) def _walk_versions(self, engine=None, snake_walk=False, downgrade=True): # Determine latest version script from the repo, then # upgrade from 1 through to the latest, with no data # in the databases. This just checks that the schema itself # upgrades successfully. # Place the database under version control migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) self.assertEqual(migration.db_initial_version(), migration_api.db_version(engine, TestMigrations.REPOSITORY)) migration_api.upgrade(engine, TestMigrations.REPOSITORY, migration.db_initial_version() + 1) LOG.debug('latest version is %s' % TestMigrations.REPOSITORY.latest) for version in xrange(migration.db_initial_version() + 2, TestMigrations.REPOSITORY.latest + 1): # upgrade -> downgrade -> upgrade self._migrate_up(engine, version, with_data=True) if snake_walk: self._migrate_down(engine, version - 1) self._migrate_up(engine, version) if downgrade: # Now walk it back down to 0 from the latest, testing # the downgrade paths. for version in reversed( xrange(migration.db_initial_version() + 1, TestMigrations.REPOSITORY.latest)): # downgrade -> upgrade -> downgrade self._migrate_down(engine, version) if snake_walk: self._migrate_up(engine, version + 1) self._migrate_down(engine, version) def _migrate_down(self, engine, version): migration_api.downgrade(engine, TestMigrations.REPOSITORY, version) self.assertEqual(version, migration_api.db_version(engine, TestMigrations.REPOSITORY)) def _migrate_up(self, engine, version, with_data=False): """Migrate up to a new version of the db. We allow for data insertion and post checks at every migration version with special _prerun_### and _check_### functions in the main test. """ # NOTE(sdague): try block is here because it's impossible to debug # where a failed data migration happens otherwise try: if with_data: data = None prerun = getattr(self, "_prerun_%3.3d" % version, None) if prerun: data = prerun(engine) migration_api.upgrade(engine, TestMigrations.REPOSITORY, version) self.assertEqual( version, migration_api.db_version(engine, TestMigrations.REPOSITORY)) if with_data: check = getattr(self, "_check_%3.3d" % version, None) if check: check(engine, data) except Exception: LOG.error("Failed to migrate to version %s on engine %s" % (version, engine)) raise # migration 004 - change volume types to UUID def _prerun_004(self, engine): data = { 'volumes': [{'id': str(uuid.uuid4()), 'host': 'test1', 'volume_type_id': 1}, {'id': str(uuid.uuid4()), 'host': 'test2', 'volume_type_id': 1}, {'id': str(uuid.uuid4()), 'host': 'test3', 'volume_type_id': 3}, ], 'volume_types': [{'name': 'vtype1'}, {'name': 'vtype2'}, {'name': 'vtype3'}, ], 'volume_type_extra_specs': [{'volume_type_id': 1, 'key': 'v1', 'value': 'hotep', }, {'volume_type_id': 1, 'key': 'v2', 'value': 'bending rodrigez', }, {'volume_type_id': 2, 'key': 'v3', 'value': 'bending rodrigez', }, ]} volume_types = get_table(engine, 'volume_types') for vtype in data['volume_types']: r = volume_types.insert().values(vtype).execute() vtype['id'] = r.inserted_primary_key[0] volume_type_es = get_table(engine, 'volume_type_extra_specs') for vtes in data['volume_type_extra_specs']: r = volume_type_es.insert().values(vtes).execute() vtes['id'] = r.inserted_primary_key[0] volumes = get_table(engine, 'volumes') for vol in data['volumes']: r = volumes.insert().values(vol).execute() vol['id'] = r.inserted_primary_key[0] return data def _check_004(self, engine, data): volumes = get_table(engine, 'volumes') v1 = volumes.select(volumes.c.id == data['volumes'][0]['id'] ).execute().first() v2 = volumes.select(volumes.c.id == data['volumes'][1]['id'] ).execute().first() v3 = volumes.select(volumes.c.id == data['volumes'][2]['id'] ).execute().first() volume_types = get_table(engine, 'volume_types') vt1 = volume_types.select(volume_types.c.name == data['volume_types'][0]['name'] ).execute().first() vt2 = volume_types.select(volume_types.c.name == data['volume_types'][1]['name'] ).execute().first() vt3 = volume_types.select(volume_types.c.name == data['volume_types'][2]['name'] ).execute().first() vtes = get_table(engine, 'volume_type_extra_specs') vtes1 = vtes.select(vtes.c.key == data['volume_type_extra_specs'][0]['key'] ).execute().first() vtes2 = vtes.select(vtes.c.key == data['volume_type_extra_specs'][1]['key'] ).execute().first() vtes3 = vtes.select(vtes.c.key == data['volume_type_extra_specs'][2]['key'] ).execute().first() self.assertEqual(v1['volume_type_id'], vt1['id']) self.assertEqual(v2['volume_type_id'], vt1['id']) self.assertEqual(v3['volume_type_id'], vt3['id']) self.assertEqual(vtes1['volume_type_id'], vt1['id']) self.assertEqual(vtes2['volume_type_id'], vt1['id']) self.assertEqual(vtes3['volume_type_id'], vt2['id']) def test_migration_005(self): """Test that adding source_volid column works correctly.""" for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 4) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine migration_api.upgrade(engine, TestMigrations.REPOSITORY, 5) volumes = sqlalchemy.Table('volumes', metadata, autoload=True) self.assertIsInstance(volumes.c.source_volid.type, sqlalchemy.types.VARCHAR) def _metadatas(self, upgrade_to, downgrade_to=None): for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, upgrade_to) if downgrade_to is not None: migration_api.downgrade( engine, TestMigrations.REPOSITORY, downgrade_to) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine yield metadata def metadatas_upgraded_to(self, revision): return self._metadatas(revision) def metadatas_downgraded_from(self, revision): return self._metadatas(revision, revision - 1) def test_upgrade_006_adds_provider_location(self): for metadata in self.metadatas_upgraded_to(6): snapshots = sqlalchemy.Table('snapshots', metadata, autoload=True) self.assertIsInstance(snapshots.c.provider_location.type, sqlalchemy.types.VARCHAR) def test_downgrade_006_removes_provider_location(self): for metadata in self.metadatas_downgraded_from(6): snapshots = sqlalchemy.Table('snapshots', metadata, autoload=True) self.assertNotIn('provider_location', snapshots.c) def test_upgrade_007_adds_fk(self): for metadata in self.metadatas_upgraded_to(7): snapshots = sqlalchemy.Table('snapshots', metadata, autoload=True) volumes = sqlalchemy.Table('volumes', metadata, autoload=True) fkey, = snapshots.c.volume_id.foreign_keys self.assertEqual(volumes.c.id, fkey.column) def test_downgrade_007_removes_fk(self): for metadata in self.metadatas_downgraded_from(7): snapshots = sqlalchemy.Table('snapshots', metadata, autoload=True) self.assertEqual(0, len(snapshots.c.volume_id.foreign_keys)) def test_migration_008(self): """Test that adding and removing the backups table works correctly""" for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 7) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine migration_api.upgrade(engine, TestMigrations.REPOSITORY, 8) self.assertTrue(engine.dialect.has_table(engine.connect(), "backups")) backups = sqlalchemy.Table('backups', metadata, autoload=True) self.assertIsInstance(backups.c.created_at.type, sqlalchemy.types.DATETIME) self.assertIsInstance(backups.c.updated_at.type, sqlalchemy.types.DATETIME) self.assertIsInstance(backups.c.deleted_at.type, sqlalchemy.types.DATETIME) self.assertIsInstance(backups.c.deleted.type, sqlalchemy.types.BOOLEAN) self.assertIsInstance(backups.c.id.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(backups.c.volume_id.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(backups.c.user_id.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(backups.c.project_id.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(backups.c.host.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(backups.c.availability_zone.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(backups.c.display_name.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(backups.c.display_description.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(backups.c.container.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(backups.c.status.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(backups.c.fail_reason.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(backups.c.service_metadata.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(backups.c.service.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(backups.c.size.type, sqlalchemy.types.INTEGER) self.assertIsInstance(backups.c.object_count.type, sqlalchemy.types.INTEGER) migration_api.downgrade(engine, TestMigrations.REPOSITORY, 7) self.assertFalse(engine.dialect.has_table(engine.connect(), "backups")) def test_migration_009(self): """Test adding snapshot_metadata table works correctly.""" for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 8) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine migration_api.upgrade(engine, TestMigrations.REPOSITORY, 9) self.assertTrue(engine.dialect.has_table(engine.connect(), "snapshot_metadata")) snapshot_metadata = sqlalchemy.Table('snapshot_metadata', metadata, autoload=True) self.assertIsInstance(snapshot_metadata.c.created_at.type, sqlalchemy.types.DATETIME) self.assertIsInstance(snapshot_metadata.c.updated_at.type, sqlalchemy.types.DATETIME) self.assertIsInstance(snapshot_metadata.c.deleted_at.type, sqlalchemy.types.DATETIME) self.assertIsInstance(snapshot_metadata.c.deleted.type, sqlalchemy.types.BOOLEAN) self.assertIsInstance(snapshot_metadata.c.deleted.type, sqlalchemy.types.BOOLEAN) self.assertIsInstance(snapshot_metadata.c.id.type, sqlalchemy.types.INTEGER) self.assertIsInstance(snapshot_metadata.c.snapshot_id.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(snapshot_metadata.c.key.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(snapshot_metadata.c.value.type, sqlalchemy.types.VARCHAR) migration_api.downgrade(engine, TestMigrations.REPOSITORY, 8) self.assertFalse(engine.dialect.has_table(engine.connect(), "snapshot_metadata")) def test_migration_010(self): """Test adding transfers table works correctly.""" for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 9) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine migration_api.upgrade(engine, TestMigrations.REPOSITORY, 10) self.assertTrue(engine.dialect.has_table(engine.connect(), "transfers")) transfers = sqlalchemy.Table('transfers', metadata, autoload=True) self.assertIsInstance(transfers.c.created_at.type, sqlalchemy.types.DATETIME) self.assertIsInstance(transfers.c.updated_at.type, sqlalchemy.types.DATETIME) self.assertIsInstance(transfers.c.deleted_at.type, sqlalchemy.types.DATETIME) self.assertIsInstance(transfers.c.deleted.type, sqlalchemy.types.BOOLEAN) self.assertIsInstance(transfers.c.id.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(transfers.c.volume_id.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(transfers.c.display_name.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(transfers.c.salt.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(transfers.c.crypt_hash.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(transfers.c.expires_at.type, sqlalchemy.types.DATETIME) migration_api.downgrade(engine, TestMigrations.REPOSITORY, 9) self.assertFalse(engine.dialect.has_table(engine.connect(), "transfers")) def test_migration_011(self): """Test adding transfers table works correctly.""" for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 10) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine volumes_v10 = sqlalchemy.Table('volumes', metadata, autoload=True) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 11) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine self.assertTrue(engine.dialect.has_table(engine.connect(), "volumes")) volumes = sqlalchemy.Table('volumes', metadata, autoload=True) # Make sure we didn't miss any columns in the upgrade for column in volumes_v10.c: self.assertTrue(volumes.c.__contains__(column.name)) self.assertIsInstance(volumes.c.bootable.type, sqlalchemy.types.BOOLEAN) migration_api.downgrade(engine, TestMigrations.REPOSITORY, 10) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine volumes = sqlalchemy.Table('volumes', metadata, autoload=True) self.assertNotIn('bootable', volumes.c) # Make sure we put all the columns back for column in volumes_v10.c: self.assertTrue(volumes.c.__contains__(column.name)) def test_migration_012(self): """Test that adding attached_host column works correctly.""" for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 11) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine migration_api.upgrade(engine, TestMigrations.REPOSITORY, 12) volumes = sqlalchemy.Table('volumes', metadata, autoload=True) self.assertIsInstance(volumes.c.attached_host.type, sqlalchemy.types.VARCHAR) migration_api.downgrade(engine, TestMigrations.REPOSITORY, 11) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine volumes = sqlalchemy.Table('volumes', metadata, autoload=True) self.assertNotIn('attached_host', volumes.c) def test_migration_013(self): """Test that adding provider_geometry column works correctly.""" for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 12) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine migration_api.upgrade(engine, TestMigrations.REPOSITORY, 13) volumes = sqlalchemy.Table('volumes', metadata, autoload=True) self.assertIsInstance(volumes.c.provider_geometry.type, sqlalchemy.types.VARCHAR) migration_api.downgrade(engine, TestMigrations.REPOSITORY, 12) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine volumes = sqlalchemy.Table('volumes', metadata, autoload=True) self.assertNotIn('provider_geometry', volumes.c) def test_migration_014(self): """Test that adding _name_id column works correctly.""" for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 13) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine migration_api.upgrade(engine, TestMigrations.REPOSITORY, 14) volumes = sqlalchemy.Table('volumes', metadata, autoload=True) self.assertIsInstance(volumes.c._name_id.type, sqlalchemy.types.VARCHAR) migration_api.downgrade(engine, TestMigrations.REPOSITORY, 13) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine volumes = sqlalchemy.Table('volumes', metadata, autoload=True) self.assertNotIn('_name_id', volumes.c) def test_migration_015(self): """Test removing migrations table works correctly.""" for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 15) self.assertFalse(engine.dialect.has_table(engine.connect(), "migrations")) migration_api.downgrade(engine, TestMigrations.REPOSITORY, 14) self.assertTrue(engine.dialect.has_table(engine.connect(), "migrations")) def test_migration_016(self): """Test that dropping xen storage manager tables works correctly.""" for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 15) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine migration_api.upgrade(engine, TestMigrations.REPOSITORY, 16) self.assertFalse(engine.dialect.has_table(engine.connect(), 'sm_flavors')) self.assertFalse(engine.dialect.has_table(engine.connect(), 'sm_backend_config')) self.assertFalse(engine.dialect.has_table(engine.connect(), 'sm_volume')) migration_api.downgrade(engine, TestMigrations.REPOSITORY, 15) self.assertTrue(engine.dialect.has_table(engine.connect(), 'sm_flavors')) self.assertTrue(engine.dialect.has_table(engine.connect(), 'sm_backend_config')) self.assertTrue(engine.dialect.has_table(engine.connect(), 'sm_volume')) def test_migration_017(self): """Test that added encryption information works correctly.""" # upgrade schema for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 16) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine migration_api.upgrade(engine, TestMigrations.REPOSITORY, 17) # encryption key UUID volumes = sqlalchemy.Table('volumes', metadata, autoload=True) self.assertIn('encryption_key_id', volumes.c) self.assertIsInstance(volumes.c.encryption_key_id.type, sqlalchemy.types.VARCHAR) snapshots = sqlalchemy.Table('snapshots', metadata, autoload=True) self.assertIn('encryption_key_id', snapshots.c) self.assertIsInstance(snapshots.c.encryption_key_id.type, sqlalchemy.types.VARCHAR) self.assertIn('volume_type_id', snapshots.c) self.assertIsInstance(snapshots.c.volume_type_id.type, sqlalchemy.types.VARCHAR) # encryption types table encryption = sqlalchemy.Table('encryption', metadata, autoload=True) self.assertIsInstance(encryption.c.volume_type_id.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(encryption.c.cipher.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(encryption.c.key_size.type, sqlalchemy.types.INTEGER) self.assertIsInstance(encryption.c.provider.type, sqlalchemy.types.VARCHAR) # downgrade schema migration_api.downgrade(engine, TestMigrations.REPOSITORY, 16) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine volumes = sqlalchemy.Table('volumes', metadata, autoload=True) self.assertNotIn('encryption_key_id', volumes.c) snapshots = sqlalchemy.Table('snapshots', metadata, autoload=True) self.assertNotIn('encryption_key_id', snapshots.c) self.assertFalse(engine.dialect.has_table(engine.connect(), 'encryption')) def test_migration_018(self): """Test that added qos_specs table works correctly.""" for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 17) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine migration_api.upgrade(engine, TestMigrations.REPOSITORY, 18) self.assertTrue(engine.dialect.has_table( engine.connect(), "quality_of_service_specs")) qos_specs = sqlalchemy.Table('quality_of_service_specs', metadata, autoload=True) self.assertIsInstance(qos_specs.c.created_at.type, sqlalchemy.types.DATETIME) self.assertIsInstance(qos_specs.c.updated_at.type, sqlalchemy.types.DATETIME) self.assertIsInstance(qos_specs.c.deleted_at.type, sqlalchemy.types.DATETIME) self.assertIsInstance(qos_specs.c.deleted.type, sqlalchemy.types.BOOLEAN) self.assertIsInstance(qos_specs.c.id.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(qos_specs.c.specs_id.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(qos_specs.c.key.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(qos_specs.c.value.type, sqlalchemy.types.VARCHAR) migration_api.downgrade(engine, TestMigrations.REPOSITORY, 17) self.assertFalse(engine.dialect.has_table( engine.connect(), "quality_of_service_specs")) def test_migration_019(self): """Test that adding migration_status column works correctly.""" for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 18) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine migration_api.upgrade(engine, TestMigrations.REPOSITORY, 19) volumes = sqlalchemy.Table('volumes', metadata, autoload=True) self.assertIsInstance(volumes.c.migration_status.type, sqlalchemy.types.VARCHAR) migration_api.downgrade(engine, TestMigrations.REPOSITORY, 18) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine volumes = sqlalchemy.Table('volumes', metadata, autoload=True) self.assertNotIn('migration_status', volumes.c) def test_migration_020(self): """Test adding volume_admin_metadata table works correctly.""" for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 19) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine migration_api.upgrade(engine, TestMigrations.REPOSITORY, 20) self.assertTrue(engine.dialect.has_table(engine.connect(), "volume_admin_metadata")) volume_admin_metadata = sqlalchemy.Table('volume_admin_metadata', metadata, autoload=True) self.assertIsInstance(volume_admin_metadata.c.created_at.type, sqlalchemy.types.DATETIME) self.assertIsInstance(volume_admin_metadata.c.updated_at.type, sqlalchemy.types.DATETIME) self.assertIsInstance(volume_admin_metadata.c.deleted_at.type, sqlalchemy.types.DATETIME) self.assertIsInstance(volume_admin_metadata.c.deleted.type, sqlalchemy.types.BOOLEAN) self.assertIsInstance(volume_admin_metadata.c.deleted.type, sqlalchemy.types.BOOLEAN) self.assertIsInstance(volume_admin_metadata.c.id.type, sqlalchemy.types.INTEGER) self.assertIsInstance(volume_admin_metadata.c.volume_id.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(volume_admin_metadata.c.key.type, sqlalchemy.types.VARCHAR) self.assertIsInstance(volume_admin_metadata.c.value.type, sqlalchemy.types.VARCHAR) migration_api.downgrade(engine, TestMigrations.REPOSITORY, 19) self.assertFalse(engine.dialect.has_table(engine.connect(), "volume_admin_metadata")) def test_migration_021(self): """Test adding default data for quota classes works correctly.""" for (key, engine) in self.engines.items(): migration_api.version_control(engine, TestMigrations.REPOSITORY, migration.db_initial_version()) migration_api.upgrade(engine, TestMigrations.REPOSITORY, 20) metadata = sqlalchemy.schema.MetaData() metadata.bind = engine migration_api.upgrade(engine, TestMigrations.REPOSITORY, 21) quota_class_metadata = sqlalchemy.Table('quota_classes', metadata, autoload=True) num_defaults = quota_class_metadata.count().\ where(quota_class_metadata.c.class_name == 'default').\ execute().scalar() self.assertEqual(3, num_defaults) migration_api.downgrade(engine, TestMigrations.REPOSITORY, 20) # Defaults should not be deleted during downgrade num_defaults = quota_class_metadata.count().\ where(quota_class_metadata.c.class_name == 'default').\ execute().scalar() self.assertEqual(3, num_defaults)
[]
[]
[ "CINDER_TEST_MIGRATIONS_CONF", "PGUSER", "PGPASSWORD", "NOVA_TEST_MYSQL_PRESENT" ]
[]
["CINDER_TEST_MIGRATIONS_CONF", "PGUSER", "PGPASSWORD", "NOVA_TEST_MYSQL_PRESENT"]
python
4
0
tools/train_net.py
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. r""" Basic training script for PyTorch """ # Set up custom environment before nearly anything else is imported # NOTE: this should be the first import (no not reorder) from fcos_core.utils.env import setup_environment # noqa F401 isort:skip import argparse import os import torch from fcos_core.config import cfg from fcos_core.data import make_data_loader from fcos_core.solver import make_lr_scheduler from fcos_core.solver import make_optimizer from fcos_core.engine.inference import inference from fcos_core.engine.trainer import do_train from fcos_core.modeling.detector import build_detection_model from fcos_core.utils.checkpoint import DetectronCheckpointer from fcos_core.utils.collect_env import collect_env_info from fcos_core.utils.comm import synchronize, \ get_rank, is_pytorch_1_1_0_or_later from fcos_core.utils.imports import import_file from fcos_core.utils.logger import setup_logger from fcos_core.utils.miscellaneous import mkdir, save_config def train(cfg, local_rank, distributed): model = build_detection_model(cfg) device = torch.device(cfg.MODEL.DEVICE) model.to(device) if cfg.MODEL.USE_SYNCBN: assert is_pytorch_1_1_0_or_later(), \ "SyncBatchNorm is only available in pytorch >= 1.1.0" model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model) optimizer = make_optimizer(cfg, model) scheduler = make_lr_scheduler(cfg, optimizer) if distributed: model = torch.nn.parallel.DistributedDataParallel( model, device_ids=[local_rank], output_device=local_rank, # this should be removed if we update BatchNorm stats broadcast_buffers=False, ) arguments = {} arguments["iteration"] = 0 output_dir = cfg.OUTPUT_DIR save_to_disk = get_rank() == 0 checkpointer = DetectronCheckpointer(cfg, model, optimizer, scheduler, output_dir, save_to_disk) extra_checkpoint_data = checkpointer.load(cfg.MODEL.WEIGHT) arguments.update(extra_checkpoint_data) data_loader = make_data_loader( cfg, is_train=True, is_distributed=distributed, start_iter=arguments["iteration"], ) checkpoint_period = cfg.SOLVER.CHECKPOINT_PERIOD do_train( model, data_loader, optimizer, scheduler, checkpointer, device, checkpoint_period, arguments, ) return model def run_test(cfg, model, distributed): if distributed: model = model.module torch.cuda.empty_cache() # TODO check if it helps iou_types = ("bbox", ) if cfg.MODEL.MASK_ON: iou_types = iou_types + ("segm", ) if cfg.MODEL.KEYPOINT_ON: iou_types = iou_types + ("keypoints", ) output_folders = [None] * len(cfg.DATASETS.TEST) dataset_names = cfg.DATASETS.TEST if cfg.OUTPUT_DIR: for idx, dataset_name in enumerate(dataset_names): output_folder = os.path.join(cfg.OUTPUT_DIR, "inference", dataset_name) mkdir(output_folder) output_folders[idx] = output_folder data_loaders_val = make_data_loader(cfg, is_train=False, is_distributed=distributed) for output_folder, dataset_name, data_loader_val in zip(output_folders, dataset_names, data_loaders_val): inference( model, data_loader_val, dataset_name=dataset_name, iou_types=iou_types, box_only=False if cfg.MODEL.FCOS_ON or cfg.MODEL.RETINANET_ON else cfg.MODEL.RPN_ONLY, device=cfg.MODEL.DEVICE, expected_results=cfg.TEST.EXPECTED_RESULTS, expected_results_sigma_tol=cfg.TEST.EXPECTED_RESULTS_SIGMA_TOL, output_folder=output_folder, ) synchronize() def main(): parser = argparse.ArgumentParser(description="PyTorch Object Detection Training") parser.add_argument("--config-file", default="", metavar="FILE", help="path to config file", type=str) parser.add_argument("--netname", default="mpprcnn", help="datetime of training", type=str) parser.add_argument("--date", help="datetime of training", type=str) parser.add_argument("--local_rank", type=int, default=0) parser.add_argument("--skip-test", dest="skip_test", help="Do not test the final model", action="store_true") parser.add_argument("opts", default=None, nargs=argparse.REMAINDER, \ help="Modify config options using the command-line") args = parser.parse_args() num_gpus = int(os.environ["WORLD_SIZE"]) if "WORLD_SIZE" in os.environ else 1 args.distributed = num_gpus > 1 if args.distributed: torch.cuda.set_device(args.local_rank) torch.distributed.init_process_group(backend="nccl", init_method="env://") synchronize() cfg.merge_from_file(args.config_file) cfg.merge_from_list(args.opts) output_dir = os.path.join(cfg.OUTPUT_DIR, args.netname, args.date + "/") cfg.OUTPUT_DIR = output_dir cfg.freeze() if output_dir: mkdir(output_dir) logger = setup_logger("fcos_core", output_dir, get_rank()) logger.info("Using {} GPUs".format(num_gpus)) logger.info(args) logger.info("Collecting env info (might take some time)") logger.info("\n" + collect_env_info()) logger.info("Loaded configuration file {}".format(args.config_file)) with open(args.config_file, "r") as cf: config_str = "\n" + cf.read() logger.info(config_str) logger.info("Running with config:\n{}".format(cfg)) output_config_path = os.path.join(output_dir, 'config.yml') logger.info("Saving config into: {}".format(output_config_path)) # save overloaded model config in the output directory save_config(cfg, output_config_path) model = train(cfg, args.local_rank, args.distributed) if not args.skip_test: run_test(cfg, model, args.distributed) if __name__ == "__main__": main()
[]
[]
[ "WORLD_SIZE" ]
[]
["WORLD_SIZE"]
python
1
0
dremio_client/cli.py
# -*- coding: utf-8 -*- # # Copyright (c) 2019 Ryan Murray. # # This file is part of Dremio Client # (see https://github.com/rymurr/dremio_client). # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # """Console script for dremio_client.""" import sys import os import click import simplejson as json from .conf import get_base_url_token from .util.query import run from .error import DremioNotFoundException from .model.endpoints import sql as _sql from .model.endpoints import job_status as _job_status from .model.endpoints import job_results as _job_results from .model.endpoints import catalog as _catalog from .model.endpoints import catalog_item as _catalog_item from .model.endpoints import reflections as _reflections from .model.endpoints import reflection as _reflection from .model.endpoints import wlm_rules as _wlm_rules from .model.endpoints import wlm_queues as _wlm_queues from .model.endpoints import votes as _votes from .model.endpoints import group as _group from .model.endpoints import user as _user from .model.endpoints import personal_access_token as _pat from .model.endpoints import collaboration_wiki as _collaboration_wiki from .model.endpoints import collaboration_tags as _collaboration_tags from .model.endpoints import set_catalog as _set_catalog from .model.endpoints import delete_catalog as _delete_catalog from .model.endpoints import update_catalog as _update_catalog from .model.endpoints import refresh_pds as _refresh_pds from .model.endpoints import delete_personal_access_token as _delete_personal_access_token from .model.endpoints import set_personal_access_token as _set_personal_access_token @click.group() @click.option('--config', type=click.Path(exists=True, dir_okay=True, file_okay=False), help='Custom config file.') @click.option('-h', '--hostname', help='Hostname if different from config file') @click.option('-p', '--port', type=int, help='Hostname if different from config file') @click.option('--ssl', is_flag=True, help='Use SSL if different from config file') @click.option('-u', '--username', help='username if different from config file') @click.option('-p', '--password', help='password if different from config file') @click.option('--skip-verify', is_flag=True, help='skip verificatoin of ssl cert') @click.pass_context def cli(ctx, config, hostname, port, ssl, username, password, skip_verify): if config: os.environ['DREMIO_CLIENTDIR'] = config ctx.obj = dict() if hostname: ctx.obj['hostname'] = hostname if port: ctx.obj['port'] = port if ssl: ctx.obj['ssl'] = ssl if username: ctx.obj['auth.username'] = username if password: ctx.obj['auth.password'] = password if skip_verify: ctx.obj['ssl_verify'] = not skip_verify else: ctx.obj['ssl_verify'] = True @cli.command() @click.option('--sql', help='sql query to execute.', required=True) @click.pass_obj def query(args, sql): """ execute a query given by sql and print results """ base_url, token = get_base_url_token(args) results = list() for x in run(token, base_url, sql, ssl_verify=args.get('ssl_verify', True)): results.extend(x['rows']) click.echo(json.dumps(results)) @cli.command() @click.argument('sql-query', nargs=-1, required=True) @click.option('--context', help='context in which the sql query should execute.') @click.pass_obj def sql(args, sql_query, context): """ Execute sql statement and return job id """ base_url, token = get_base_url_token(args) x = _sql(token, base_url, ' '.join(sql_query), context, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.argument('jobid', nargs=1, required=True) @click.pass_obj def job_status(args, jobid): """ Return status of job for a given job id """ base_url, token = get_base_url_token(args) x = _job_status(token, base_url, jobid, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.argument('jobid', nargs=1, required=True) @click.option('-o', '--offset', type=int, default=0, help="offset of first result") @click.option('-l', '--limit', type=int, default=100, help="number of results to return") @click.pass_obj def job_results(args, jobid, offset, limit): """ return results for a given job id pagenated with offset and limit """ base_url, token = get_base_url_token(args) x = _job_results(token, base_url, jobid, offset, limit, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.pass_obj def catalog(args): """ return the root catalog """ base_url, token = get_base_url_token(args) x = _catalog(token, base_url, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.option('-c', '--cid', help="Path of a given catalog item") @click.option('-p', '--path', help="id of a given catalog item") @click.pass_obj def catalog_item(args, cid, path): """ return the details of a given catalog item if cid and path are both specified id is used if neither are specified it causes an error """ base_url, token = get_base_url_token(args) x = _catalog_item(token, base_url, cid, [path.replace('.', '/')] if path else None, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.option('--summary', '-s', is_flag=True, help='only return summary reflection info') @click.pass_obj def reflections(args, summary): """ return the reflection set """ base_url, token = get_base_url_token(args) x = _reflections(token, base_url, summary, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.argument('reflectionid', nargs=1, required=True) @click.pass_obj def reflection(args, reflectionid): """ return the reflection set """ base_url, token = get_base_url_token(args) x = _reflection(token, base_url, reflectionid, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.pass_obj def wlm_rules(args): """ return the list of wlm rules """ base_url, token = get_base_url_token(args) x = _wlm_rules(token, base_url, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.pass_obj def wlm_queues(args): """ return the list of wlm queues """ base_url, token = get_base_url_token(args) x = _wlm_queues(token, base_url, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.pass_obj def votes(args): """ return reflection votes """ base_url, token = get_base_url_token(args) x = _votes(token, base_url, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.option('--uid', '-u', help='unique id for a user') @click.option('--name', '-n', help='human readable name of a user') @click.pass_obj def user(args, gid, name): """ return user info """ base_url, token = get_base_url_token(args) x = _user(token, base_url, gid, name, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.option('--gid', '-g', help='unique id for a group') @click.option('--name', '-n', help='human readable name of a group') @click.pass_obj def group(args, gid, name): """ return group info """ base_url, token = get_base_url_token(args) x = _group(token, base_url, gid, name, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.argument('uid', nargs=1, required=True) @click.pass_obj def pat(args, uid): """ return personal access token info for a given user id """ base_url, token = get_base_url_token(args) x = _pat(token, base_url, uid, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.option('--cid', '-c', help='unique id for a catalog entity') @click.option('--path', '-p', help='path of a catalog entity') @click.pass_obj def tags(args, cid, path): """ returns tags for a given catalog entity id or path only cid or path can be specified. path incurs a second lookup to get the id """ base_url, token = get_base_url_token(args) if path: res = _catalog_item(token, base_url, None, [path.replace('.', '/')], ssl_verify=args.get('ssl_verify', True)) cid = res['id'] try: x = _collaboration_tags(token, base_url, cid, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) except DremioNotFoundException: click.echo("Wiki not found or entity does not exist") @cli.command() @click.option('--cid', '-c', help='unique id for a catalog entity') @click.option('--path', '-p', help='path of a catalog entity') @click.option('--pretty-print', '-v', is_flag=True, help='format markdown for terminal') @click.pass_obj def wiki(args, cid, path, pretty_print): """ returns wiki for a given catalog entity id or path only cid or path can be specified. path incurs a second lookup to get the id activating the pretty-print flag will attempt to convert the text field to plain-text for the console """ base_url, token = get_base_url_token(args) if path: res = _catalog_item(token, base_url, None, [path.replace('.', '/')], ssl_verify=args.get('ssl_verify', True)) cid = res['id'] try: x = _collaboration_wiki(token, base_url, cid, ssl_verify=args.get('ssl_verify', True)) if pretty_print: try: text = _to_text(x['text']) click.echo(text) except ImportError: click.echo("Can't convert text to console, please install markdown and BeautifulSoup") click.echo(json.dumps(x)) else: click.echo(json.dumps(x)) except DremioNotFoundException: click.echo("Wiki not found or entity does not exist") def _to_text(text): from markdown import Markdown from io import StringIO def unmark_element(element, stream=None): if stream is None: stream = StringIO() if element.text: stream.write(element.text) for sub in element: unmark_element(sub, stream) if element.tail: stream.write(element.tail) return stream.getvalue() # patching Markdown Markdown.output_formats["plain"] = unmark_element __md = Markdown(output_format="plain") __md.stripTopLevelTags = False return __md.convert(text) @cli.command() @click.argument('data', nargs=1, required=True) @click.option('-i', '--cid', help='catalog endity') @click.pass_obj def update_catalog(args, data, cid): """ update a catalog entity (cid) given a json data object """ base_url, token = get_base_url_token(args) x = _update_catalog(token, base_url, cid, data, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.argument('cid', nargs=1, required=True) @click.option('-t', '--tag', help='current tag, for concurrency') @click.pass_obj def delete_catalog(args, cid, tag): """ delete a catalog entity given by cid and tag version """ base_url, token = get_base_url_token(args) x = _delete_catalog(token, base_url, cid, tag, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.argument('data', nargs=1, required=True) @click.pass_obj def set_catalog(args, data): """ set a catalog entity with the given json string """ base_url, token = get_base_url_token(args) x = _set_catalog(token, base_url, data, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.argument('pid', nargs=1, required=True) @click.pass_obj def refresh_pds(args, pid): """ refresh metadata/reflections for a given physical dataset """ base_url, token = get_base_url_token(args) x = _refresh_pds(token, base_url, pid, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.argument('uid', nargs=1, required=True) @click.option('-l', '--lifetime', help='lifetime of token in hours', default=24, type=int) @click.option('-n', '--name', help='name of token') @click.pass_obj def set_pat(args, uid, lifetime, name): """ create a personal access token """ base_url, token = get_base_url_token(args) x = _set_personal_access_token(token, base_url, uid, name, lifetime, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) @cli.command() @click.argument('uid', nargs=1, required=True) @click.pass_obj def delete_pat(args, uid): """ delete a personal access token """ base_url, token = get_base_url_token(args) x = _delete_personal_access_token(token, base_url, uid, ssl_verify=args.get('ssl_verify', True)) click.echo(json.dumps(x)) if __name__ == "__main__": sys.exit(cli()) # pragma: no cover
[]
[]
[ "DREMIO_CLIENTDIR" ]
[]
["DREMIO_CLIENTDIR"]
python
1
0
shipyard-controller/main.go
package main import ( "github.com/gin-gonic/gin" keptncommon "github.com/keptn/go-utils/pkg/lib/keptn" "github.com/keptn/go-utils/pkg/lib/v0_2_0" "github.com/keptn/keptn/shipyard-controller/common" "github.com/keptn/keptn/shipyard-controller/controller" "github.com/keptn/keptn/shipyard-controller/db" "github.com/keptn/keptn/shipyard-controller/docs" "github.com/keptn/keptn/shipyard-controller/handler" "k8s.io/client-go/kubernetes" "k8s.io/client-go/rest" "log" "os" ) // @title Control Plane API // @version 1.0 // @description This is the API documentation of the Shipyard Controller. // @securityDefinitions.apiKey ApiKeyAuth // @in header // @name x-token // @contact.name Keptn Team // @contact.url http://www.keptn.sh // @license.name Apache 2.0 // @license.url http://www.apache.org/licenses/LICENSE-2.0.html // @BasePath /v1 func main() { if os.Getenv("GIN_MODE") == "release" { docs.SwaggerInfo.Version = os.Getenv("version") docs.SwaggerInfo.BasePath = "/api/shipyard-controller/v1" docs.SwaggerInfo.Schemes = []string{"https"} } csEndpoint, err := keptncommon.GetServiceEndpoint("CONFIGURATION_SERVICE") if err != nil { log.Fatalf("could not get configuration-service URL: %s", err.Error()) } kubeAPI, err := createKubeAPI() if err != nil { log.Fatalf("could not create kubernetes client: %s", err.Error()) } eventSender, err := v0_2_0.NewHTTPEventSender("") if err != nil { log.Fatal(err) } logger := keptncommon.NewLogger("", "", "shipyard-controller") projectManager := handler.NewProjectManager( common.NewGitConfigurationStore(csEndpoint.String()), createSecretStore(kubeAPI), createMaterializedView(logger), createTaskSequenceRepo(logger), createEventsRepo(logger)) serviceManager := handler.NewServiceManager( createMaterializedView(logger), common.NewGitConfigurationStore(csEndpoint.String()), logger) stageManager := handler.NewStageManager(createMaterializedView(logger), logger) engine := gin.Default() apiV1 := engine.Group("/v1") projectService := handler.NewProjectHandler(projectManager, eventSender, logger) projectController := controller.NewProjectController(projectService) projectController.Inject(apiV1) serviceHandler := handler.NewServiceHandler(serviceManager, eventSender, logger) serviceController := controller.NewServiceController(serviceHandler) serviceController.Inject(apiV1) eventHandler := handler.NewEventHandler() eventController := controller.NewEventController(eventHandler) eventController.Inject(apiV1) stageHandler := handler.NewStageHandler(stageManager) stageController := controller.NewStageController(stageHandler) stageController.Inject(apiV1) evaluationManager, err := handler.NewEvaluationManager(eventSender, createMaterializedView(logger), logger) if err != nil { log.Fatal(err) } evaluationHandler := handler.NewEvaluationHandler(evaluationManager) evaluationController := controller.NewEvaluationController(evaluationHandler) evaluationController.Inject(apiV1) engine.Static("/swagger-ui", "./swagger-ui") engine.Run() } func createMaterializedView(logger *keptncommon.Logger) *db.ProjectsMaterializedView { projectesMaterializedView := &db.ProjectsMaterializedView{ ProjectRepo: createProjectRepo(logger), EventsRetriever: createEventsRepo(logger), Logger: logger, } return projectesMaterializedView } func createProjectRepo(logger *keptncommon.Logger) *db.MongoDBProjectsRepo { return &db.MongoDBProjectsRepo{Logger: logger} } func createEventsRepo(logger *keptncommon.Logger) *db.MongoDBEventsRepo { return &db.MongoDBEventsRepo{Logger: logger} } func createTaskSequenceRepo(logger *keptncommon.Logger) *db.TaskSequenceMongoDBRepo { return &db.TaskSequenceMongoDBRepo{Logger: logger} } func createSecretStore(kubeAPI *kubernetes.Clientset) *common.K8sSecretStore { return common.NewK8sSecretStore(kubeAPI) } // GetKubeAPI godoc func createKubeAPI() (*kubernetes.Clientset, error) { var config *rest.Config config, err := rest.InClusterConfig() if err != nil { return nil, err } kubeAPI, err := kubernetes.NewForConfig(config) if err != nil { return nil, err } return kubeAPI, nil }
[ "\"GIN_MODE\"", "\"version\"" ]
[]
[ "version", "GIN_MODE" ]
[]
["version", "GIN_MODE"]
go
2
0
pytest_libiio/plugin.py
# -*- coding: utf-8 -*- import os import pathlib import signal import socket import subprocess import time from shutil import which import iio import pytest import yaml class iio_emu_manager: def __init__( self, xml_path: str, auto: bool = True, rx_dev: str = None, tx_dev: str = None, ): self.xml_path = xml_path self.rx_dev = rx_dev self.tx_dev = tx_dev self.current_device = None self.auto = auto self.data_devices = None iio_emu = which("iio-emu") is None if iio_emu: raise Exception("iio-emu not found on path") hostname = socket.gethostname() self.local_ip = socket.gethostbyname(hostname) self.uri = f"ip:{self.local_ip}" self.p = None if os.getenv("IIO_EMU_URI"): self.uri = os.getenv("IIO_EMU_URI") def __del__(self): if self.p: self.stop() def start(self): with open("data.bin", "w"): pass cmd = ["iio-emu", "generic", self.xml_path] if self.data_devices: for dev in self.data_devices: cmd.append(f"{dev}@data.bin") self.p = subprocess.Popen(cmd) time.sleep(3) # wait for server to boot if self.p.poll(): self.p.send_signal(signal.SIGINT) raise Exception("iio-emu failed to start... exiting") def stop(self): if self.p: self.p.send_signal(signal.SIGINT) self.p = None def get_hw_map(request): if request.config.getoption("--adi-hw-map"): path = pathlib.Path(__file__).parent.absolute() filename = os.path.join(path, "resources", "adi_hardware_map.yml") elif request.config.getoption("--custom-hw-map"): filename = request.config.getoption("--custom-hw-map") else: filename = None return import_hw_map(filename) if filename else None def get_filename(map, hw): hw = map[hw] fn = None dd = None for f in hw: if isinstance(f, dict) and "emulate" in f.keys(): emu = f["emulate"] for e in emu: if "filename" in e: fn = e["filename"] if "data_devices" in e: dd = e["data_devices"] return fn, dd def handle_iio_emu(ctx, request, _iio_emu): if "hw" in ctx and hasattr(_iio_emu, "auto") and _iio_emu.auto: if _iio_emu.current_device != ctx["hw"]: # restart with new hw if _iio_emu.p: print("Stopping iio-emu") _iio_emu.stop() elif _iio_emu.current_device: print("Using same hardware not restarting iio-emu") map = get_hw_map(request) fn, dd = get_filename(map, ctx["hw"]) if not fn: return ctx if request.config.getoption("--emu-xml-dir"): path = request.config.getoption("--emu-xml-dir") exml = os.path.join(path, fn) print("exml", exml) else: path = pathlib.Path(__file__).parent.absolute() exml = os.path.join(path, "resources", "devices", fn) if not os.path.exists(exml): pytest.skip(f"No XML file found for hardware {ctx['hw']}") _iio_emu.xml_path = exml _iio_emu.current_device = ctx["hw"] _iio_emu.data_devices = dd print("Starting iio-emu") _iio_emu.start() return ctx def pytest_addoption(parser): group = parser.getgroup("libiio") group.addoption( "--uri", action="store", dest="uri", default=None, help="Set libiio URI to utilize", ) group.addoption( "--scan-verbose", action="store_true", dest="scan_verbose", default=False, help="Print info of found contexts when scanning", ) group.addoption( "--adi-hw-map", action="store_true", dest="adi_hw_map", default=False, help="Use ADI hardware map to determine hardware names based on context drivers", ) group.addoption( "--custom-hw-map", action="store", dest="hw_map", default=None, help="Path to custom hardware map for drivers", ) group.addoption( "--hw", action="store", dest="hw_select", default=None, help="Define hardware name of provided URI. Will ignore scan information and requires input URI argument", ) group.addoption( "--skip-scan", action="store_true", dest="skip_scan", default=False, help="Skip avahi scan. This is usually used within CI.", ) group.addoption( "--emu", action="store_true", dest="skip_scan", default=False, help="Enable context emulation with iio-emu.", ) group.addoption( "--emu-xml", action="store", dest="emu_xml", default=False, help="Path or name of built-in XML for back-end context", ) group.addoption( "--emu-xml-dir", action="store", dest="emu_xml_dir", default=False, help="Path to folder with XML files for back-end context", ) def pytest_configure(config): # register an additional marker config.addinivalue_line( "markers", "iio_hardware(hardware): Provide list of hardware applicable to test" ) @pytest.fixture(scope="function") def iio_uri(_iio_emu_func): """URI fixture which provides a string of the target uri of the found board filtered by iio_hardware marker. If no hardware matching the required hardware is found, the test is skipped. If no iio_hardware marker is applied, first context uri is returned. If list of hardware markers are provided, the first matching is returned. """ if isinstance(_iio_emu_func, dict): return _iio_emu_func["uri"] else: return False @pytest.fixture(scope="function") def single_ctx_desc(request, _contexts): """Contexts description fixture which provides a single dictionary of found board filtered by iio_hardware marker. If no hardware matching the required hardware is found, the test is skipped. If no iio_hardware marker is applied, first context is returned. If list of hardware markers are provided. First matching is returned. """ marker = request.node.get_closest_marker("iio_hardware") if _contexts: if not marker or not marker.args: return _contexts[0] hardware = marker.args[0] hardware = hardware if isinstance(hardware, list) else [hardware] if not marker: return _contexts[0] else: for dec in _contexts: if dec["hw"] in marker.args[0]: return dec pytest.skip("No required hardware found") @pytest.fixture(scope="function") def context_desc(request, _contexts): """Contexts description fixture which provides a list of dictionaries of found board filtered by iio_hardware marker. If no hardware matching the required hardware if found, the test is skipped """ marker = request.node.get_closest_marker("iio_hardware") if _contexts: if not marker or not marker.args: return _contexts hardware = marker.args[0] hardware = hardware if isinstance(hardware, list) else [hardware] if not marker: return _contexts desc = [dec for dec in _contexts if dec["hw"] in marker.args[0]] if desc: return desc pytest.skip("No required hardware found") @pytest.fixture(scope="function") def _iio_emu_func(request, _contexts, _iio_emu): marker = request.node.get_closest_marker("iio_hardware") if _contexts: if not marker or not marker.args: return _contexts[0] hardware = marker.args[0] eskip = marker.args[1] if len(marker.args) > 1 else False if eskip and request.config.getoption("--emu"): pytest.skip("Test not valid in emulation mode") return hardware = hardware if isinstance(hardware, list) else [hardware] if not marker: return _contexts[0] else: for dec in _contexts: if dec["hw"] in marker.args[0]: return handle_iio_emu(dec, request, _iio_emu) pytest.skip("No required hardware found") @pytest.fixture(scope="session", autouse=True) def _iio_emu(request): """Initialization emulation fixture""" if request.config.getoption("--emu"): exml = request.config.getoption("--emu-xml") if exml: if not os.path.exists(exml): raise Exception(f"{exml} not found") emu = iio_emu_manager(xml_path=exml, auto=False) emu.start() yield emu emu.stop() return # Get list of all devices available to emulate map = get_hw_map(request) if not map: raise Exception("No hardware map selected (ex: --adi-hw-map)") hw_w_emulation = {} for hw in map: for field in map[hw]: if isinstance(field, dict) and "emulate" in field: hw_w_emulation[hw] = field if hw in hw_w_emulation: devices = [] for field in map[hw]: if isinstance(field, str): devices.append(field) hw_w_emulation[hw]["devices"] = devices emu = iio_emu_manager(xml_path="auto", auto=True) emu.hw = hw_w_emulation yield emu emu.stop() else: yield None @pytest.fixture(scope="session") def _contexts(request, _iio_emu): """Contexts fixture which provides a list of dictionaries of found boards""" map = get_hw_map(request) uri = request.config.getoption("--uri") if _iio_emu: if _iio_emu.auto: ctx_plus_hw = [] for hw in _iio_emu.hw: ctx_plus_hw.append( { "uri": _iio_emu.uri, "type": "emu", "devices": _iio_emu.hw[hw]["devices"], "hw": hw, } ) return ctx_plus_hw else: uri = _iio_emu.uri if uri: try: ctx = iio.Context(uri) except TimeoutError: raise Exception("URI {} has no reachable context".format(uri)) devices = [] for dev in ctx.devices: name = dev.name if name: devices.append(name) devices = ",".join(devices) hw = request.config.getoption("--hw") or lookup_hw_from_map(ctx, map) if "uri" in ctx.attrs: uri_type = ctx.attrs["uri"].split(":")[0] else: uri_type = uri.split(":")[0] ctx_plus_hw = { "uri": uri, "type": uri_type, "devices": devices, "hw": hw, } if request.config.getoption("--scan-verbose"): print("\nHardware found at specified uri:", ctx_plus_hw["hw"]) return [ctx_plus_hw] return find_contexts(request.config, map, request) def import_hw_map(filename): if not os.path.exists(filename): raise Exception("Hardware map file not found") with open(filename, "r") as stream: map = yaml.safe_load(stream) return map def lookup_hw_from_map(ctx, map): if not map: return "Unknown" hw = [] for device in ctx.devices: chans = sum(chan.scan_element for chan in device.channels) dev = {"name": device.name, "num_channels": chans} hw.append(dev) ctx_attrs = {attr: ctx.attrs[attr] for attr in ctx.attrs} map_tally = {} best = 0 bestDev = "Unknown" # Loop over devices for device in map: drivers_and_attrs = map[device] found = 0 for driver_or_attr in drivers_and_attrs: # Check attributes if isinstance(driver_or_attr, dict): for attr_type in driver_or_attr: # Compare context attrs if attr_type == "ctx_attr": for attr_dict in driver_or_attr["ctx_attr"]: for attr_name in attr_dict: # loop over found and compare to for hw_ctx_attr in ctx_attrs: if ( hw_ctx_attr == attr_name and attr_dict[attr_name] in ctx_attrs[hw_ctx_attr] ): found += 1 # Compare other attribute types ... if attr_type == "dev_attr": pass continue # Loop over drivers for h in hw: d = driver_or_attr.split(",") name = d[0] if h["name"] == name: found += 1 else: continue if len(d) > 1 and h["num_channels"] == int(d[1]): found += 1 map_tally[device] = found if found > best: best = found bestDev = device return bestDev def find_contexts(config, map, request): if request.config.getoption("--skip-scan"): ctxs = None else: ctxs = iio.scan_contexts() if not ctxs: print("\nNo libiio contexts found") return False ctxs_plus_hw = [] for uri in ctxs: info = ctxs[uri] type = uri.split(":")[0] devices = info.split("(")[1].split(")")[0] if config.getoption("--scan-verbose"): string = "\nContext: {}".format(uri) string += "\n\tType: {}".format(type) string += "\n\tInfo: {}".format(info) string += "\n\tDevices: {}".format(devices) print(string) ctx_plus_hw = { "uri": uri, "type": type, "devices": devices, "hw": lookup_hw_from_map(iio.Context(uri), map), } ctxs_plus_hw.append(ctx_plus_hw) else: if config.getoption("--scan-verbose"): print("\nNo libiio contexts found") return ctxs_plus_hw
[]
[]
[ "IIO_EMU_URI" ]
[]
["IIO_EMU_URI"]
python
1
0
cmd/lit-af/lit-af.go
package main import ( "fmt" "os" "path/filepath" "strconv" "strings" "github.com/chzyer/readline" "github.com/fatih/color" flags "github.com/jessevdk/go-flags" "github.com/Rjected/lit/crypto/koblitz" "github.com/Rjected/lit/litrpc" "github.com/Rjected/lit/lnutil" "github.com/Rjected/lit/logging" ) /* Lit-AF The Lit Advanced Functionality interface. This is a text mode interface to lit. It connects over jsonrpc to the a lit node and tells that lit node what to do. The lit node also responds so that lit-af can tell what's going on. lit-gtk does most of the same things with a gtk interface, but there will be some yet-undefined advanced functionality only available in lit-af. May end up using termbox-go */ const ( litHomeDirName = ".lit" historyFilename = "lit-af.history" defaultKeyFileName = "privkey.hex" ) type Command struct { Format string Description string ShortDescription string } type litAfClient struct { RPCClient *litrpc.LndcRpcClient } type litAfConfig struct { Con string `long:"con" description:"host to connect to in the form of [<lnadr>@][<host>][:<port>]"` LitHomeDir string `long:"litHomeDir" description:"directory to save settings"` Port string `long:"autoListenPort" description:"port that the lit is listening to."` Tracker string `long:"tracker" description:"service to use for looking up node addresses"` LogLevel []bool `short:"v" description:"Set verbosity level to verbose (-v), very verbose (-vv) or very very verbose (-vvv)"` } var ( defaultCon = "2448" defaultLitHomeDirName = filepath.Join(os.Getenv("HOME"), litHomeDirName) defaultTracker = "http://hubris.media.mit.edu:46580" ) // newConfigParser returns a new command line flags parser. func newConfigParser(conf *litAfConfig, options flags.Options) *flags.Parser { parser := flags.NewParser(conf, options) return parser } func (lc *litAfClient) litAfSetup(conf litAfConfig) { var err error preParser := newConfigParser(&conf, flags.HelpFlag) _, err = preParser.ParseArgs(os.Args) // parse the cli if err != nil { logging.Fatal(err) } logLevel := 0 if len(conf.LogLevel) == 1 { // -v logLevel = 1 } else if len(conf.LogLevel) == 2 { // -vv logLevel = 2 } else if len(conf.LogLevel) >= 3 { // -vvv logLevel = 3 } logging.SetLogLevel(logLevel) // defaults to zero // create home directory if it does not exist _, err = os.Stat(conf.LitHomeDir) if os.IsNotExist(err) { os.Mkdir(conf.LitHomeDir, 0700) } adr, host, port := lnutil.ParseAdrStringWithPort(conf.Con) if len(conf.Port) > 0 { custom_port, err := strconv.ParseUint(conf.Port, 10, 32) if err != nil { logging.Fatal(err.Error()) } port = uint32(custom_port) } logging.Infof("Adr: %s, Host: %s, Port: %d, LitHomeDir: %s", adr, host, port, conf.LitHomeDir) if litrpc.LndcRpcCanConnectLocallyWithHomeDir(conf.LitHomeDir) && adr == "" && (host == "localhost" || host == "127.0.0.1") { // con parameter was not passed. lc.RPCClient, err = litrpc.NewLocalLndcRpcClientWithHomeDirAndPort(conf.LitHomeDir, port) if err != nil { logging.Fatal(err.Error()) } } else { // con parameter passed. if !lnutil.LitAdrOK(adr) { logging.Fatal("lit address passed in -con parameter is not valid") } keyFilePath := filepath.Join(conf.LitHomeDir, "lit-af-key.hex") privKey, err := lnutil.ReadKeyFile(keyFilePath) if err != nil { logging.Fatal(err.Error()) } key, _ := koblitz.PrivKeyFromBytes(koblitz.S256(), privKey[:]) if adr != "" && strings.HasPrefix(adr, "ln1") && host == "" { ipv4, _, err := lnutil.Lookup(adr, conf.Tracker, "") if err != nil { logging.Fatalf("Error looking up address on the tracker: %s", err) } else { adr = fmt.Sprintf("%s@%s", adr, ipv4) } } else { adr = fmt.Sprintf("%s@%s:%d", adr, host, port) } lc.RPCClient, err = litrpc.NewLndcRpcClient(adr, key) if err != nil { logging.Fatal(err.Error()) } } } // for now just testing how to connect and get messages back and forth func main() { var err error lc := new(litAfClient) conf := litAfConfig{ Con: defaultCon, LitHomeDir: defaultLitHomeDirName, Tracker: defaultTracker, } lc.litAfSetup(conf) // setup lit-af to start rl, err := readline.NewEx(&readline.Config{ Prompt: lnutil.Prompt("lit-af") + lnutil.White("# "), HistoryFile: filepath.Join(conf.LitHomeDir, historyFilename), AutoComplete: lc.NewAutoCompleter(), }) if err != nil { logging.Fatal(err) } defer rl.Close() // main shell loop for { // setup reader with max 4K input chars msg, err := rl.Readline() if err != nil { break } msg = strings.TrimSpace(msg) if len(msg) == 0 { continue } rl.SaveHistory(msg) cmdslice := strings.Fields(msg) // chop input up on whitespace fmt.Fprintf(color.Output, "entered command: %s\n", msg) // immediate feedback err = lc.Shellparse(cmdslice) if err != nil { // only error should be user exit logging.Fatal(err) } } } func (lc *litAfClient) Call(serviceMethod string, args interface{}, reply interface{}) error { return lc.RPCClient.Call(serviceMethod, args, reply) }
[ "\"HOME\"" ]
[]
[ "HOME" ]
[]
["HOME"]
go
1
0
commands/cmdInfo.py
""" cmdInfo.py Author: Fletcher Haynes This command allows the user to see detailed information about a template, instance, room, or portal. """ import MudCommand import MudWorld import columnize import string import os class cmdInfo(MudCommand.MudCommand): def __init__(self): MudCommand.MudCommand.__init__(self) self.info['cmdName'] = "info" self.info['helpText'] = '''Displays detailed informatin about a template, instance, room or portal.''' self.info['useExample'] = "info ctemplate/cinstance/itemplate/iinstance ID" def process(self, player, args=''): argList = args.split(" ", 1) t = None if len(argList) != 2: player.writeWithPrompt("Proper format is: info ctemplate/cinstance/itemplate/iinstance ID") return if argList[0].lower() == 'cinstance': for eachChar in player.getRoomRef().getCharacters().values(): try: if eachChar.getId() == int(argList[1]): t = eachChar break except: player.writeWithPrompt("IDs must be numbers only!") return if t == None: player.writeWithPrompt("That character instance was not found.") return player.writePlain("<red>[Name]: <r> "+t.getName()+'\r\n') player.writePlain("<red>[ID ]: <r>"+str(t.getId())+'\r\n') player.writePlain("<red>[TID ]: <r>"+str(t.getTemplateId())+'\r\n') player.writePlain("\r\n<green>[STATISTICS]") statList = t.getStats().keys() newList = [] for eachItem in statList: eachItem = eachItem + ": "+str(t.getStat(eachItem)) newList.append(eachItem) columnize.columnize(player, newList, 4) cmdList = t.getCommands() player.writePlain("\r\n\r\n<green>[COMMANDS]<r>") columnize.columnize(player, cmdList, 6) player.writePlain('\r\n\r\n<green>[LOGICS]<r>') logicList = t.getLogics().keys() columnize.columnize(player, logicList, 6) player.writeWithPrompt("\r\nEND INFO") elif argList[0].lower() == 'ctemplate': t = MudWorld.world.templateDb.findTemplateById('character', int(argList[1])) if t == None: player.writeWithPrompt("That template was not found.") return player.writePlain("<red>[Name]: <r> "+t.getName()+'\r\n') player.writePlain("<red>[ID ]: <r>"+str(t.getId())+'\r\n') player.writePlain("\r\n<green>[STATISTICS]") statList = t.getStats().keys() for eachItem in statList: eachItem = eachItem + ": "+str(t.getStat(eachItem)) columnize.columnize(player, statList, 4) cmdList = t.getCommands() player.writePlain("\r\n\r\n<green>[COMMANDS]<r>") columnize.columnize(player, cmdList, 6) player.writePlain('\r\n\r\n<green>[LOGICS]<r>') logicList = t.getLogics().keys() columnize.columnize(player, logicList, 6) player.writeWithPrompt("\r\nEND INFO") elif argList[0].lower() == 'itemplate': t = MudWorld.world.templateDb.findTemplateById('item', int(argList[1])) if t == None: player.writeWithPrompt("That template was not found.") return player.writePlain("<red>[Name]: <r> "+t.getName()+'\r\n') player.writePlain("<red>[ID ]: <r>"+str(t.getId())+'\r\n') player.writePlain("\r\n<green>[STATISTICS]") statList = t.getStats().keys() for eachItem in statList: eachItem = eachItem + ": "+str(t.getStat(eachItem)) columnize.columnize(player, statList, 4) cmdList = t.getCommands() player.writePlain("\r\n\r\n<green>[COMMANDS]<r>") columnize.columnize(player, cmdList, 6) player.writePlain('\r\n\r\n<green>[LOGICS]<r>') logicList = t.getLogics().keys() columnize.columnize(player, logicList, 6) player.writeWithPrompt("\r\nEND INFO") elif argList[0].lower() == 'iinstance': for eachItem in player.getRoomRef().getItems().values(): try: if eachItem.getId() == int(argList[1]): t = eachItem break except: player.writeWithPrompt("IDs must be numbers only!") return if t == None: player.writeWithPrompt("An item with that ID was not found in this room.") return player.writePlain("<red>[Name]: <r> "+t.getName()+'\r\n') player.writePlain("<red>[ID ]: <r>"+str(t.getId())+'\r\n') player.writePlain("<red>[TID ]: <r>"+str(t.getTemplateId())+'\r\n') player.writePlain("\r\n<green>[STATISTICS]") statList = t.getStats().keys() for eachItem in statList: eachItem = eachItem + ": "+str(t.getStat(eachItem)) columnize.columnize(player, statList, 4) cmdList = t.getCommands() player.writePlain("\r\n\r\n<green>[COMMANDS]<r>") columnize.columnize(player, cmdList, 6) player.writePlain('\r\n\r\n<green>[LOGICS]<r>') logicList = t.getLogics().keys() columnize.columnize(player, logicList, 6) player.writeWithPrompt("\r\nEND INFO") player.writeWithPrompt(argList[2]+' was changed to: '+str(argList[3])+' on: '+t.getName()) else: player.writeWithPrompt("Invalid type. Please use iinstance, itemplate, cinstance, or ctemplate.") return
[]
[]
[]
[]
[]
python
null
null
null
config/envoyconfig/routes.go
package envoyconfig import ( "encoding/json" "fmt" "net/url" "sort" envoy_config_core_v3 "github.com/envoyproxy/go-control-plane/envoy/config/core/v3" envoy_config_route_v3 "github.com/envoyproxy/go-control-plane/envoy/config/route/v3" envoy_type_matcher_v3 "github.com/envoyproxy/go-control-plane/envoy/type/matcher/v3" "github.com/golang/protobuf/ptypes/any" "github.com/golang/protobuf/ptypes/wrappers" "google.golang.org/protobuf/types/known/durationpb" "google.golang.org/protobuf/types/known/structpb" "google.golang.org/protobuf/types/known/wrapperspb" "github.com/pomerium/pomerium/config" "github.com/pomerium/pomerium/internal/httputil" "github.com/pomerium/pomerium/internal/urlutil" ) const ( httpCluster = "pomerium-control-plane-http" ) func (b *Builder) buildGRPCRoutes() ([]*envoy_config_route_v3.Route, error) { action := &envoy_config_route_v3.Route_Route{ Route: &envoy_config_route_v3.RouteAction{ ClusterSpecifier: &envoy_config_route_v3.RouteAction_Cluster{ Cluster: "pomerium-control-plane-grpc", }, }, } return []*envoy_config_route_v3.Route{{ Name: "pomerium-grpc", Match: &envoy_config_route_v3.RouteMatch{ PathSpecifier: &envoy_config_route_v3.RouteMatch_Prefix{ Prefix: "/", }, Grpc: &envoy_config_route_v3.RouteMatch_GrpcRouteMatchOptions{}, }, Action: action, TypedPerFilterConfig: map[string]*any.Any{ "envoy.filters.http.ext_authz": disableExtAuthz, }, }}, nil } func (b *Builder) buildPomeriumHTTPRoutes(options *config.Options, domain string) ([]*envoy_config_route_v3.Route, error) { var routes []*envoy_config_route_v3.Route // if this is the pomerium proxy in front of the the authenticate service, don't add // these routes since they will be handled by authenticate isFrontingAuthenticate, err := isProxyFrontingAuthenticate(options, domain) if err != nil { return nil, err } if !isFrontingAuthenticate { // enable ext_authz r, err := b.buildControlPlanePathRoute("/.pomerium/jwt", true) if err != nil { return nil, err } routes = append(routes, r) // disable ext_authz and passthrough to proxy handlers r, err = b.buildControlPlanePathRoute("/ping", false) if err != nil { return nil, err } routes = append(routes, r) r, err = b.buildControlPlanePathRoute("/healthz", false) if err != nil { return nil, err } routes = append(routes, r) r, err = b.buildControlPlanePathRoute("/.pomerium", false) if err != nil { return nil, err } routes = append(routes, r) r, err = b.buildControlPlanePrefixRoute("/.pomerium/", false) if err != nil { return nil, err } routes = append(routes, r) r, err = b.buildControlPlanePathRoute("/.well-known/pomerium", false) if err != nil { return nil, err } routes = append(routes, r) r, err = b.buildControlPlanePrefixRoute("/.well-known/pomerium/", false) if err != nil { return nil, err } routes = append(routes, r) // per #837, only add robots.txt if there are no unauthenticated routes if !hasPublicPolicyMatchingURL(options, url.URL{Scheme: "https", Host: domain, Path: "/robots.txt"}) { r, err := b.buildControlPlanePathRoute("/robots.txt", false) if err != nil { return nil, err } routes = append(routes, r) } } // if we're handling authentication, add the oauth2 callback url authenticateURL, err := options.GetAuthenticateURL() if err != nil { return nil, err } if config.IsAuthenticate(options.Services) && hostMatchesDomain(authenticateURL, domain) { r, err := b.buildControlPlanePathRoute(options.AuthenticateCallbackPath, false) if err != nil { return nil, err } routes = append(routes, r) } // if we're the proxy and this is the forward-auth url forwardAuthURL, err := options.GetForwardAuthURL() if err != nil { return nil, err } if config.IsProxy(options.Services) && hostMatchesDomain(forwardAuthURL, domain) { // disable ext_authz and pass request to proxy handlers that enable authN flow r, err := b.buildControlPlanePathAndQueryRoute("/verify", []string{urlutil.QueryForwardAuthURI, urlutil.QuerySessionEncrypted, urlutil.QueryRedirectURI}) if err != nil { return nil, err } routes = append(routes, r) r, err = b.buildControlPlanePathAndQueryRoute("/", []string{urlutil.QueryForwardAuthURI, urlutil.QuerySessionEncrypted, urlutil.QueryRedirectURI}) if err != nil { return nil, err } routes = append(routes, r) r, err = b.buildControlPlanePathAndQueryRoute("/", []string{urlutil.QueryForwardAuthURI}) if err != nil { return nil, err } routes = append(routes, r) // otherwise, enforce ext_authz; pass all other requests through to an upstream // handler that will simply respond with http status 200 / OK indicating that // the fronting forward-auth proxy can continue. r, err = b.buildControlPlaneProtectedPrefixRoute("/") if err != nil { return nil, err } routes = append(routes, r) } return routes, nil } func (b *Builder) buildControlPlaneProtectedPrefixRoute(prefix string) (*envoy_config_route_v3.Route, error) { return &envoy_config_route_v3.Route{ Name: "pomerium-protected-prefix-" + prefix, Match: &envoy_config_route_v3.RouteMatch{ PathSpecifier: &envoy_config_route_v3.RouteMatch_Prefix{Prefix: prefix}, }, Action: &envoy_config_route_v3.Route_Route{ Route: &envoy_config_route_v3.RouteAction{ ClusterSpecifier: &envoy_config_route_v3.RouteAction_Cluster{ Cluster: httpCluster, }, }, }, }, nil } func (b *Builder) buildControlPlanePathAndQueryRoute(path string, queryparams []string) (*envoy_config_route_v3.Route, error) { var queryParameterMatchers []*envoy_config_route_v3.QueryParameterMatcher for _, q := range queryparams { queryParameterMatchers = append(queryParameterMatchers, &envoy_config_route_v3.QueryParameterMatcher{ Name: q, QueryParameterMatchSpecifier: &envoy_config_route_v3.QueryParameterMatcher_PresentMatch{PresentMatch: true}, }) } return &envoy_config_route_v3.Route{ Name: "pomerium-path-and-query" + path, Match: &envoy_config_route_v3.RouteMatch{ PathSpecifier: &envoy_config_route_v3.RouteMatch_Path{Path: path}, QueryParameters: queryParameterMatchers, }, Action: &envoy_config_route_v3.Route_Route{ Route: &envoy_config_route_v3.RouteAction{ ClusterSpecifier: &envoy_config_route_v3.RouteAction_Cluster{ Cluster: httpCluster, }, }, }, TypedPerFilterConfig: map[string]*any.Any{ "envoy.filters.http.ext_authz": disableExtAuthz, }, }, nil } func (b *Builder) buildControlPlanePathRoute(path string, protected bool) (*envoy_config_route_v3.Route, error) { r := &envoy_config_route_v3.Route{ Name: "pomerium-path-" + path, Match: &envoy_config_route_v3.RouteMatch{ PathSpecifier: &envoy_config_route_v3.RouteMatch_Path{Path: path}, }, Action: &envoy_config_route_v3.Route_Route{ Route: &envoy_config_route_v3.RouteAction{ ClusterSpecifier: &envoy_config_route_v3.RouteAction_Cluster{ Cluster: httpCluster, }, }, }, } if !protected { r.TypedPerFilterConfig = map[string]*any.Any{ "envoy.filters.http.ext_authz": disableExtAuthz, } } return r, nil } func (b *Builder) buildControlPlanePrefixRoute(prefix string, protected bool) (*envoy_config_route_v3.Route, error) { r := &envoy_config_route_v3.Route{ Name: "pomerium-prefix-" + prefix, Match: &envoy_config_route_v3.RouteMatch{ PathSpecifier: &envoy_config_route_v3.RouteMatch_Prefix{Prefix: prefix}, }, Action: &envoy_config_route_v3.Route_Route{ Route: &envoy_config_route_v3.RouteAction{ ClusterSpecifier: &envoy_config_route_v3.RouteAction_Cluster{ Cluster: httpCluster, }, }, }, } if !protected { r.TypedPerFilterConfig = map[string]*any.Any{ "envoy.filters.http.ext_authz": disableExtAuthz, } } return r, nil } // getClusterID returns a cluster ID var getClusterID = func(policy *config.Policy) string { prefix := getClusterStatsName(policy) if prefix == "" { prefix = "route" } id, _ := policy.RouteID() return fmt.Sprintf("%s-%x", prefix, id) } // getClusterStatsName returns human readable name that would be used by envoy to emit statistics, available as envoy_cluster_name label func getClusterStatsName(policy *config.Policy) string { if policy.EnvoyOpts != nil && policy.EnvoyOpts.Name != "" { return policy.EnvoyOpts.Name } return "" } func (b *Builder) buildPolicyRoutes(options *config.Options, domain string) ([]*envoy_config_route_v3.Route, error) { var routes []*envoy_config_route_v3.Route for i, p := range options.GetAllPolicies() { policy := p if !hostMatchesDomain(policy.Source.URL, domain) { continue } match := mkRouteMatch(&policy) envoyRoute := &envoy_config_route_v3.Route{ Name: fmt.Sprintf("policy-%d", i), Match: match, Metadata: &envoy_config_core_v3.Metadata{}, RequestHeadersToAdd: toEnvoyHeaders(policy.SetRequestHeaders), RequestHeadersToRemove: getRequestHeadersToRemove(options, &policy), ResponseHeadersToAdd: toEnvoyHeaders(policy.SetResponseHeaders), } if policy.Redirect != nil { action, err := b.buildPolicyRouteRedirectAction(policy.Redirect) if err != nil { return nil, err } envoyRoute.Action = &envoy_config_route_v3.Route_Redirect{Redirect: action} } else { action, err := b.buildPolicyRouteRouteAction(options, &policy) if err != nil { return nil, err } envoyRoute.Action = &envoy_config_route_v3.Route_Route{Route: action} } luaMetadata := map[string]*structpb.Value{ "rewrite_response_headers": getRewriteHeadersMetadata(policy.RewriteResponseHeaders), } // disable authentication entirely when the proxy is fronting authenticate isFrontingAuthenticate, err := isProxyFrontingAuthenticate(options, domain) if err != nil { return nil, err } if isFrontingAuthenticate { envoyRoute.TypedPerFilterConfig = map[string]*any.Any{ "envoy.filters.http.ext_authz": disableExtAuthz, } } else { luaMetadata["remove_pomerium_cookie"] = &structpb.Value{ Kind: &structpb.Value_StringValue{ StringValue: options.CookieName, }, } luaMetadata["remove_pomerium_authorization"] = &structpb.Value{ Kind: &structpb.Value_BoolValue{ BoolValue: true, }, } luaMetadata["remove_impersonate_headers"] = &structpb.Value{ Kind: &structpb.Value_BoolValue{ BoolValue: policy.IsForKubernetes(), }, } } if policy.IsForKubernetes() { policyID, _ := policy.RouteID() for _, hdr := range b.reproxy.GetPolicyIDHeaders(policyID) { envoyRoute.RequestHeadersToAdd = append(envoyRoute.RequestHeadersToAdd, &envoy_config_core_v3.HeaderValueOption{ Header: &envoy_config_core_v3.HeaderValue{ Key: hdr[0], Value: hdr[1], }, Append: wrapperspb.Bool(false), }) } } envoyRoute.Metadata.FilterMetadata = map[string]*structpb.Struct{ "envoy.filters.http.lua": {Fields: luaMetadata}, } routes = append(routes, envoyRoute) } return routes, nil } func (b *Builder) buildPolicyRouteRedirectAction(r *config.PolicyRedirect) (*envoy_config_route_v3.RedirectAction, error) { action := &envoy_config_route_v3.RedirectAction{} switch { case r.HTTPSRedirect != nil: action.SchemeRewriteSpecifier = &envoy_config_route_v3.RedirectAction_HttpsRedirect{ HttpsRedirect: *r.HTTPSRedirect, } case r.SchemeRedirect != nil: action.SchemeRewriteSpecifier = &envoy_config_route_v3.RedirectAction_SchemeRedirect{ SchemeRedirect: *r.SchemeRedirect, } } if r.HostRedirect != nil { action.HostRedirect = *r.HostRedirect } if r.PortRedirect != nil { action.PortRedirect = *r.PortRedirect } switch { case r.PathRedirect != nil: action.PathRewriteSpecifier = &envoy_config_route_v3.RedirectAction_PathRedirect{ PathRedirect: *r.PathRedirect, } case r.PrefixRewrite != nil: action.PathRewriteSpecifier = &envoy_config_route_v3.RedirectAction_PrefixRewrite{ PrefixRewrite: *r.PrefixRewrite, } } if r.ResponseCode != nil { action.ResponseCode = envoy_config_route_v3.RedirectAction_RedirectResponseCode(*r.ResponseCode) } if r.StripQuery != nil { action.StripQuery = *r.StripQuery } return action, nil } func (b *Builder) buildPolicyRouteRouteAction(options *config.Options, policy *config.Policy) (*envoy_config_route_v3.RouteAction, error) { clusterName := getClusterID(policy) // kubernetes requests are sent to the http control plane to be reproxied if policy.IsForKubernetes() { clusterName = httpCluster } routeTimeout := getRouteTimeout(options, policy) idleTimeout := getRouteIdleTimeout(policy) prefixRewrite, regexRewrite := getRewriteOptions(policy) upgradeConfigs := []*envoy_config_route_v3.RouteAction_UpgradeConfig{ { UpgradeType: "websocket", Enabled: &wrappers.BoolValue{Value: policy.AllowWebsockets}, }, { UpgradeType: "spdy/3.1", Enabled: &wrappers.BoolValue{Value: policy.AllowSPDY}, }, } if urlutil.IsTCP(policy.Source.URL) { upgradeConfigs = append(upgradeConfigs, &envoy_config_route_v3.RouteAction_UpgradeConfig{ UpgradeType: "CONNECT", Enabled: &wrappers.BoolValue{Value: true}, ConnectConfig: &envoy_config_route_v3.RouteAction_UpgradeConfig_ConnectConfig{}, }) } action := &envoy_config_route_v3.RouteAction{ ClusterSpecifier: &envoy_config_route_v3.RouteAction_Cluster{ Cluster: clusterName, }, UpgradeConfigs: upgradeConfigs, HostRewriteSpecifier: &envoy_config_route_v3.RouteAction_AutoHostRewrite{ AutoHostRewrite: &wrappers.BoolValue{Value: !policy.PreserveHostHeader}, }, Timeout: routeTimeout, IdleTimeout: idleTimeout, PrefixRewrite: prefixRewrite, RegexRewrite: regexRewrite, } setHostRewriteOptions(policy, action) return action, nil } func mkEnvoyHeader(k, v string) *envoy_config_core_v3.HeaderValueOption { return &envoy_config_core_v3.HeaderValueOption{ Header: &envoy_config_core_v3.HeaderValue{ Key: k, Value: v, }, Append: &wrappers.BoolValue{Value: false}, } } func toEnvoyHeaders(headers map[string]string) []*envoy_config_core_v3.HeaderValueOption { var ks []string for k := range headers { ks = append(ks, k) } sort.Strings(ks) envoyHeaders := make([]*envoy_config_core_v3.HeaderValueOption, 0, len(headers)) for _, k := range ks { envoyHeaders = append(envoyHeaders, mkEnvoyHeader(k, headers[k])) } return envoyHeaders } func mkRouteMatch(policy *config.Policy) *envoy_config_route_v3.RouteMatch { match := &envoy_config_route_v3.RouteMatch{} switch { case urlutil.IsTCP(policy.Source.URL): match.PathSpecifier = &envoy_config_route_v3.RouteMatch_ConnectMatcher_{ ConnectMatcher: &envoy_config_route_v3.RouteMatch_ConnectMatcher{}, } case policy.Regex != "": match.PathSpecifier = &envoy_config_route_v3.RouteMatch_SafeRegex{ SafeRegex: &envoy_type_matcher_v3.RegexMatcher{ EngineType: &envoy_type_matcher_v3.RegexMatcher_GoogleRe2{ GoogleRe2: &envoy_type_matcher_v3.RegexMatcher_GoogleRE2{}, }, Regex: policy.Regex, }, } case policy.Path != "": match.PathSpecifier = &envoy_config_route_v3.RouteMatch_Path{Path: policy.Path} case policy.Prefix != "": match.PathSpecifier = &envoy_config_route_v3.RouteMatch_Prefix{Prefix: policy.Prefix} default: match.PathSpecifier = &envoy_config_route_v3.RouteMatch_Prefix{Prefix: "/"} } return match } func getRequestHeadersToRemove(options *config.Options, policy *config.Policy) []string { requestHeadersToRemove := policy.RemoveRequestHeaders if !policy.PassIdentityHeaders { requestHeadersToRemove = append(requestHeadersToRemove, httputil.HeaderPomeriumJWTAssertion) for _, claim := range options.JWTClaimsHeaders { requestHeadersToRemove = append(requestHeadersToRemove, httputil.PomeriumJWTHeaderName(claim)) } } // remove these headers to prevent a user from re-proxying requests through the control plane requestHeadersToRemove = append(requestHeadersToRemove, httputil.HeaderPomeriumReproxyPolicy, httputil.HeaderPomeriumReproxyPolicyHMAC, ) return requestHeadersToRemove } func getRouteTimeout(options *config.Options, policy *config.Policy) *durationpb.Duration { var routeTimeout *durationpb.Duration if policy.UpstreamTimeout != 0 { routeTimeout = durationpb.New(policy.UpstreamTimeout) } else if shouldDisableTimeouts(policy) { routeTimeout = durationpb.New(0) } else { routeTimeout = durationpb.New(options.DefaultUpstreamTimeout) } return routeTimeout } func getRouteIdleTimeout(policy *config.Policy) *durationpb.Duration { var idleTimeout *durationpb.Duration if shouldDisableTimeouts(policy) { idleTimeout = durationpb.New(0) } return idleTimeout } func shouldDisableTimeouts(policy *config.Policy) bool { return policy.AllowWebsockets || urlutil.IsTCP(policy.Source.URL) || policy.IsForKubernetes() // disable for kubernetes so that tailing logs works (#2182) } func getRewriteOptions(policy *config.Policy) (prefixRewrite string, regexRewrite *envoy_type_matcher_v3.RegexMatchAndSubstitute) { if policy.PrefixRewrite != "" { prefixRewrite = policy.PrefixRewrite } else if policy.RegexRewritePattern != "" { regexRewrite = &envoy_type_matcher_v3.RegexMatchAndSubstitute{ Pattern: &envoy_type_matcher_v3.RegexMatcher{ EngineType: &envoy_type_matcher_v3.RegexMatcher_GoogleRe2{ GoogleRe2: &envoy_type_matcher_v3.RegexMatcher_GoogleRE2{}, }, Regex: policy.RegexRewritePattern, }, Substitution: policy.RegexRewriteSubstitution, } } else if len(policy.To) > 0 && policy.To[0].URL.Path != "" { prefixRewrite = policy.To[0].URL.Path } return prefixRewrite, regexRewrite } func setHostRewriteOptions(policy *config.Policy, action *envoy_config_route_v3.RouteAction) { switch { case policy.HostRewrite != "": action.HostRewriteSpecifier = &envoy_config_route_v3.RouteAction_HostRewriteLiteral{ HostRewriteLiteral: policy.HostRewrite, } case policy.HostRewriteHeader != "": action.HostRewriteSpecifier = &envoy_config_route_v3.RouteAction_HostRewriteHeader{ HostRewriteHeader: policy.HostRewriteHeader, } case policy.HostPathRegexRewritePattern != "": action.HostRewriteSpecifier = &envoy_config_route_v3.RouteAction_HostRewritePathRegex{ HostRewritePathRegex: &envoy_type_matcher_v3.RegexMatchAndSubstitute{ Pattern: &envoy_type_matcher_v3.RegexMatcher{ EngineType: &envoy_type_matcher_v3.RegexMatcher_GoogleRe2{ GoogleRe2: &envoy_type_matcher_v3.RegexMatcher_GoogleRE2{}, }, Regex: policy.HostPathRegexRewritePattern, }, Substitution: policy.HostPathRegexRewriteSubstitution, }, } case policy.PreserveHostHeader: action.HostRewriteSpecifier = &envoy_config_route_v3.RouteAction_AutoHostRewrite{ AutoHostRewrite: wrapperspb.Bool(false), } default: action.HostRewriteSpecifier = &envoy_config_route_v3.RouteAction_AutoHostRewrite{ AutoHostRewrite: wrapperspb.Bool(true), } } } func hasPublicPolicyMatchingURL(options *config.Options, requestURL url.URL) bool { for _, policy := range options.GetAllPolicies() { if policy.AllowPublicUnauthenticatedAccess && policy.Matches(requestURL) { return true } } return false } func isProxyFrontingAuthenticate(options *config.Options, domain string) (bool, error) { authenticateURL, err := options.GetAuthenticateURL() if err != nil { return false, err } if !config.IsAuthenticate(options.Services) && hostMatchesDomain(authenticateURL, domain) { return true, nil } return false, nil } func getRewriteHeadersMetadata(headers []config.RewriteHeader) *structpb.Value { if len(headers) == 0 { return &structpb.Value{ Kind: &structpb.Value_ListValue{ ListValue: new(structpb.ListValue), }, } } var obj interface{} bs, _ := json.Marshal(headers) _ = json.Unmarshal(bs, &obj) v, _ := structpb.NewValue(obj) return v }
[]
[]
[]
[]
[]
go
null
null
null
vendor/github.com/docker/docker/integration-cli/daemon/daemon.go
package daemon import ( "bytes" "encoding/json" "fmt" "io" "io/ioutil" "net/http" "os" "os/exec" "path/filepath" "strconv" "strings" "time" "github.com/docker/docker/api" "github.com/docker/docker/api/types" "github.com/docker/docker/api/types/events" "github.com/docker/docker/client" "github.com/docker/docker/integration-cli/checker" "github.com/docker/docker/integration-cli/request" "github.com/docker/docker/opts" "github.com/docker/docker/pkg/ioutils" "github.com/docker/docker/pkg/stringid" "github.com/docker/go-connections/sockets" "github.com/docker/go-connections/tlsconfig" "github.com/go-check/check" "github.com/gotestyourself/gotestyourself/icmd" "github.com/pkg/errors" "github.com/stretchr/testify/require" "golang.org/x/net/context" ) type testingT interface { require.TestingT logT Fatalf(string, ...interface{}) } type logT interface { Logf(string, ...interface{}) } // SockRoot holds the path of the default docker integration daemon socket var SockRoot = filepath.Join(os.TempDir(), "docker-integration") var errDaemonNotStarted = errors.New("daemon not started") // Daemon represents a Docker daemon for the testing framework. type Daemon struct { GlobalFlags []string Root string Folder string Wait chan error UseDefaultHost bool UseDefaultTLSHost bool id string logFile *os.File stdin io.WriteCloser stdout, stderr io.ReadCloser cmd *exec.Cmd storageDriver string userlandProxy bool execRoot string experimental bool dockerBinary string dockerdBinary string log logT } // Config holds docker daemon integration configuration type Config struct { Experimental bool } type clientConfig struct { transport *http.Transport scheme string addr string } // New returns a Daemon instance to be used for testing. // This will create a directory such as d123456789 in the folder specified by $DOCKER_INTEGRATION_DAEMON_DEST or $DEST. // The daemon will not automatically start. func New(t testingT, dockerBinary string, dockerdBinary string, config Config) *Daemon { dest := os.Getenv("DOCKER_INTEGRATION_DAEMON_DEST") if dest == "" { dest = os.Getenv("DEST") } if dest == "" { t.Fatalf("Please set the DOCKER_INTEGRATION_DAEMON_DEST or the DEST environment variable") } if err := os.MkdirAll(SockRoot, 0700); err != nil { t.Fatalf("could not create daemon socket root") } id := fmt.Sprintf("d%s", stringid.TruncateID(stringid.GenerateRandomID())) dir := filepath.Join(dest, id) daemonFolder, err := filepath.Abs(dir) if err != nil { t.Fatalf("Could not make %q an absolute path", dir) } daemonRoot := filepath.Join(daemonFolder, "root") if err := os.MkdirAll(daemonRoot, 0755); err != nil { t.Fatalf("Could not create daemon root %q", dir) } userlandProxy := true if env := os.Getenv("DOCKER_USERLANDPROXY"); env != "" { if val, err := strconv.ParseBool(env); err != nil { userlandProxy = val } } return &Daemon{ id: id, Folder: daemonFolder, Root: daemonRoot, storageDriver: os.Getenv("DOCKER_GRAPHDRIVER"), userlandProxy: userlandProxy, execRoot: filepath.Join(os.TempDir(), "docker-execroot", id), dockerBinary: dockerBinary, dockerdBinary: dockerdBinary, experimental: config.Experimental, log: t, } } // RootDir returns the root directory of the daemon. func (d *Daemon) RootDir() string { return d.Root } // ID returns the generated id of the daemon func (d *Daemon) ID() string { return d.id } // StorageDriver returns the configured storage driver of the daemon func (d *Daemon) StorageDriver() string { return d.storageDriver } // CleanupExecRoot cleans the daemon exec root (network namespaces, ...) func (d *Daemon) CleanupExecRoot(c *check.C) { cleanupExecRoot(c, d.execRoot) } func (d *Daemon) getClientConfig() (*clientConfig, error) { var ( transport *http.Transport scheme string addr string proto string ) if d.UseDefaultTLSHost { option := &tlsconfig.Options{ CAFile: "fixtures/https/ca.pem", CertFile: "fixtures/https/client-cert.pem", KeyFile: "fixtures/https/client-key.pem", } tlsConfig, err := tlsconfig.Client(*option) if err != nil { return nil, err } transport = &http.Transport{ TLSClientConfig: tlsConfig, } addr = fmt.Sprintf("%s:%d", opts.DefaultHTTPHost, opts.DefaultTLSHTTPPort) scheme = "https" proto = "tcp" } else if d.UseDefaultHost { addr = opts.DefaultUnixSocket proto = "unix" scheme = "http" transport = &http.Transport{} } else { addr = d.sockPath() proto = "unix" scheme = "http" transport = &http.Transport{} } if err := sockets.ConfigureTransport(transport, proto, addr); err != nil { return nil, err } transport.DisableKeepAlives = true return &clientConfig{ transport: transport, scheme: scheme, addr: addr, }, nil } // Start starts the daemon and return once it is ready to receive requests. func (d *Daemon) Start(t testingT, args ...string) { if err := d.StartWithError(args...); err != nil { t.Fatalf("Error starting daemon with arguments: %v", args) } } // StartWithError starts the daemon and return once it is ready to receive requests. // It returns an error in case it couldn't start. func (d *Daemon) StartWithError(args ...string) error { logFile, err := os.OpenFile(filepath.Join(d.Folder, "docker.log"), os.O_RDWR|os.O_CREATE|os.O_APPEND, 0600) if err != nil { return errors.Wrapf(err, "[%s] Could not create %s/docker.log", d.id, d.Folder) } return d.StartWithLogFile(logFile, args...) } // StartWithLogFile will start the daemon and attach its streams to a given file. func (d *Daemon) StartWithLogFile(out *os.File, providedArgs ...string) error { dockerdBinary, err := exec.LookPath(d.dockerdBinary) if err != nil { return errors.Wrapf(err, "[%s] could not find docker binary in $PATH", d.id) } args := append(d.GlobalFlags, "--containerd", "/var/run/docker/containerd/docker-containerd.sock", "--data-root", d.Root, "--exec-root", d.execRoot, "--pidfile", fmt.Sprintf("%s/docker.pid", d.Folder), fmt.Sprintf("--userland-proxy=%t", d.userlandProxy), ) if d.experimental { args = append(args, "--experimental", "--init") } if !(d.UseDefaultHost || d.UseDefaultTLSHost) { args = append(args, []string{"--host", d.Sock()}...) } if root := os.Getenv("DOCKER_REMAP_ROOT"); root != "" { args = append(args, []string{"--userns-remap", root}...) } // If we don't explicitly set the log-level or debug flag(-D) then // turn on debug mode foundLog := false foundSd := false for _, a := range providedArgs { if strings.Contains(a, "--log-level") || strings.Contains(a, "-D") || strings.Contains(a, "--debug") { foundLog = true } if strings.Contains(a, "--storage-driver") { foundSd = true } } if !foundLog { args = append(args, "--debug") } if d.storageDriver != "" && !foundSd { args = append(args, "--storage-driver", d.storageDriver) } args = append(args, providedArgs...) d.cmd = exec.Command(dockerdBinary, args...) d.cmd.Env = append(os.Environ(), "DOCKER_SERVICE_PREFER_OFFLINE_IMAGE=1") d.cmd.Stdout = out d.cmd.Stderr = out d.logFile = out if err := d.cmd.Start(); err != nil { return errors.Errorf("[%s] could not start daemon container: %v", d.id, err) } wait := make(chan error) go func() { wait <- d.cmd.Wait() d.log.Logf("[%s] exiting daemon", d.id) close(wait) }() d.Wait = wait tick := time.Tick(500 * time.Millisecond) // make sure daemon is ready to receive requests startTime := time.Now().Unix() for { d.log.Logf("[%s] waiting for daemon to start", d.id) if time.Now().Unix()-startTime > 5 { // After 5 seconds, give up return errors.Errorf("[%s] Daemon exited and never started", d.id) } select { case <-time.After(2 * time.Second): return errors.Errorf("[%s] timeout: daemon does not respond", d.id) case <-tick: clientConfig, err := d.getClientConfig() if err != nil { return err } client := &http.Client{ Transport: clientConfig.transport, } req, err := http.NewRequest("GET", "/_ping", nil) if err != nil { return errors.Wrapf(err, "[%s] could not create new request", d.id) } req.URL.Host = clientConfig.addr req.URL.Scheme = clientConfig.scheme resp, err := client.Do(req) if err != nil { continue } resp.Body.Close() if resp.StatusCode != http.StatusOK { d.log.Logf("[%s] received status != 200 OK: %s\n", d.id, resp.Status) } d.log.Logf("[%s] daemon started\n", d.id) d.Root, err = d.queryRootDir() if err != nil { return errors.Errorf("[%s] error querying daemon for root directory: %v", d.id, err) } return nil case <-d.Wait: return errors.Errorf("[%s] Daemon exited during startup", d.id) } } } // StartWithBusybox will first start the daemon with Daemon.Start() // then save the busybox image from the main daemon and load it into this Daemon instance. func (d *Daemon) StartWithBusybox(t testingT, arg ...string) { d.Start(t, arg...) d.LoadBusybox(t) } // Kill will send a SIGKILL to the daemon func (d *Daemon) Kill() error { if d.cmd == nil || d.Wait == nil { return errDaemonNotStarted } defer func() { d.logFile.Close() d.cmd = nil }() if err := d.cmd.Process.Kill(); err != nil { return err } return os.Remove(fmt.Sprintf("%s/docker.pid", d.Folder)) } // Pid returns the pid of the daemon func (d *Daemon) Pid() int { return d.cmd.Process.Pid } // Interrupt stops the daemon by sending it an Interrupt signal func (d *Daemon) Interrupt() error { return d.Signal(os.Interrupt) } // Signal sends the specified signal to the daemon if running func (d *Daemon) Signal(signal os.Signal) error { if d.cmd == nil || d.Wait == nil { return errDaemonNotStarted } return d.cmd.Process.Signal(signal) } // DumpStackAndQuit sends SIGQUIT to the daemon, which triggers it to dump its // stack to its log file and exit // This is used primarily for gathering debug information on test timeout func (d *Daemon) DumpStackAndQuit() { if d.cmd == nil || d.cmd.Process == nil { return } SignalDaemonDump(d.cmd.Process.Pid) } // Stop will send a SIGINT every second and wait for the daemon to stop. // If it times out, a SIGKILL is sent. // Stop will not delete the daemon directory. If a purged daemon is needed, // instantiate a new one with NewDaemon. // If an error occurs while starting the daemon, the test will fail. func (d *Daemon) Stop(t testingT) { err := d.StopWithError() if err != nil { if err != errDaemonNotStarted { t.Fatalf("Error while stopping the daemon %s : %v", d.id, err) } else { t.Logf("Daemon %s is not started", d.id) } } } // StopWithError will send a SIGINT every second and wait for the daemon to stop. // If it timeouts, a SIGKILL is sent. // Stop will not delete the daemon directory. If a purged daemon is needed, // instantiate a new one with NewDaemon. func (d *Daemon) StopWithError() error { if d.cmd == nil || d.Wait == nil { return errDaemonNotStarted } defer func() { d.logFile.Close() d.cmd = nil }() i := 1 tick := time.Tick(time.Second) if err := d.cmd.Process.Signal(os.Interrupt); err != nil { if strings.Contains(err.Error(), "os: process already finished") { return errDaemonNotStarted } return errors.Errorf("could not send signal: %v", err) } out1: for { select { case err := <-d.Wait: return err case <-time.After(20 * time.Second): // time for stopping jobs and run onShutdown hooks d.log.Logf("[%s] daemon started", d.id) break out1 } } out2: for { select { case err := <-d.Wait: return err case <-tick: i++ if i > 5 { d.log.Logf("tried to interrupt daemon for %d times, now try to kill it", i) break out2 } d.log.Logf("Attempt #%d: daemon is still running with pid %d", i, d.cmd.Process.Pid) if err := d.cmd.Process.Signal(os.Interrupt); err != nil { return errors.Errorf("could not send signal: %v", err) } } } if err := d.cmd.Process.Kill(); err != nil { d.log.Logf("Could not kill daemon: %v", err) return err } d.cmd.Wait() return os.Remove(fmt.Sprintf("%s/docker.pid", d.Folder)) } // Restart will restart the daemon by first stopping it and the starting it. // If an error occurs while starting the daemon, the test will fail. func (d *Daemon) Restart(t testingT, args ...string) { d.Stop(t) d.handleUserns() d.Start(t, args...) } // RestartWithError will restart the daemon by first stopping it and then starting it. func (d *Daemon) RestartWithError(arg ...string) error { if err := d.StopWithError(); err != nil { return err } d.handleUserns() return d.StartWithError(arg...) } func (d *Daemon) handleUserns() { // in the case of tests running a user namespace-enabled daemon, we have resolved // d.Root to be the actual final path of the graph dir after the "uid.gid" of // remapped root is added--we need to subtract it from the path before calling // start or else we will continue making subdirectories rather than truly restarting // with the same location/root: if root := os.Getenv("DOCKER_REMAP_ROOT"); root != "" { d.Root = filepath.Dir(d.Root) } } // LoadBusybox image into the daemon func (d *Daemon) LoadBusybox(t testingT) { clientHost, err := client.NewEnvClient() require.NoError(t, err, "failed to create client") defer clientHost.Close() ctx := context.Background() reader, err := clientHost.ImageSave(ctx, []string{"busybox:latest"}) require.NoError(t, err, "failed to download busybox") defer reader.Close() client, err := d.NewClient() require.NoError(t, err, "failed to create client") defer client.Close() resp, err := client.ImageLoad(ctx, reader, true) require.NoError(t, err, "failed to load busybox") defer resp.Body.Close() } func (d *Daemon) queryRootDir() (string, error) { // update daemon root by asking /info endpoint (to support user // namespaced daemon with root remapped uid.gid directory) clientConfig, err := d.getClientConfig() if err != nil { return "", err } client := &http.Client{ Transport: clientConfig.transport, } req, err := http.NewRequest("GET", "/info", nil) if err != nil { return "", err } req.Header.Set("Content-Type", "application/json") req.URL.Host = clientConfig.addr req.URL.Scheme = clientConfig.scheme resp, err := client.Do(req) if err != nil { return "", err } body := ioutils.NewReadCloserWrapper(resp.Body, func() error { return resp.Body.Close() }) type Info struct { DockerRootDir string } var b []byte var i Info b, err = request.ReadBody(body) if err == nil && resp.StatusCode == http.StatusOK { // read the docker root dir if err = json.Unmarshal(b, &i); err == nil { return i.DockerRootDir, nil } } return "", err } // Sock returns the socket path of the daemon func (d *Daemon) Sock() string { return fmt.Sprintf("unix://" + d.sockPath()) } func (d *Daemon) sockPath() string { return filepath.Join(SockRoot, d.id+".sock") } // WaitRun waits for a container to be running for 10s func (d *Daemon) WaitRun(contID string) error { args := []string{"--host", d.Sock()} return WaitInspectWithArgs(d.dockerBinary, contID, "{{.State.Running}}", "true", 10*time.Second, args...) } // Info returns the info struct for this daemon func (d *Daemon) Info(t require.TestingT) types.Info { apiclient, err := request.NewClientForHost(d.Sock()) require.NoError(t, err) info, err := apiclient.Info(context.Background()) require.NoError(t, err) return info } // Cmd executes a docker CLI command against this daemon. // Example: d.Cmd("version") will run docker -H unix://path/to/unix.sock version func (d *Daemon) Cmd(args ...string) (string, error) { result := icmd.RunCmd(d.Command(args...)) return result.Combined(), result.Error } // Command creates a docker CLI command against this daemon, to be executed later. // Example: d.Command("version") creates a command to run "docker -H unix://path/to/unix.sock version" func (d *Daemon) Command(args ...string) icmd.Cmd { return icmd.Command(d.dockerBinary, d.PrependHostArg(args)...) } // PrependHostArg prepend the specified arguments by the daemon host flags func (d *Daemon) PrependHostArg(args []string) []string { for _, arg := range args { if arg == "--host" || arg == "-H" { return args } } return append([]string{"--host", d.Sock()}, args...) } // SockRequest executes a socket request on a daemon and returns statuscode and output. func (d *Daemon) SockRequest(method, endpoint string, data interface{}) (int, []byte, error) { jsonData := bytes.NewBuffer(nil) if err := json.NewEncoder(jsonData).Encode(data); err != nil { return -1, nil, err } res, body, err := d.SockRequestRaw(method, endpoint, jsonData, "application/json") if err != nil { return -1, nil, err } b, err := request.ReadBody(body) return res.StatusCode, b, err } // SockRequestRaw executes a socket request on a daemon and returns an http // response and a reader for the output data. // Deprecated: use request package instead func (d *Daemon) SockRequestRaw(method, endpoint string, data io.Reader, ct string) (*http.Response, io.ReadCloser, error) { return request.SockRequestRaw(method, endpoint, data, ct, d.Sock()) } // LogFileName returns the path the daemon's log file func (d *Daemon) LogFileName() string { return d.logFile.Name() } // GetIDByName returns the ID of an object (container, volume, …) given its name func (d *Daemon) GetIDByName(name string) (string, error) { return d.inspectFieldWithError(name, "Id") } // ActiveContainers returns the list of ids of the currently running containers func (d *Daemon) ActiveContainers() (ids []string) { // FIXME(vdemeester) shouldn't ignore the error out, _ := d.Cmd("ps", "-q") for _, id := range strings.Split(out, "\n") { if id = strings.TrimSpace(id); id != "" { ids = append(ids, id) } } return } // ReadLogFile returns the content of the daemon log file func (d *Daemon) ReadLogFile() ([]byte, error) { return ioutil.ReadFile(d.logFile.Name()) } // InspectField returns the field filter by 'filter' func (d *Daemon) InspectField(name, filter string) (string, error) { return d.inspectFilter(name, filter) } func (d *Daemon) inspectFilter(name, filter string) (string, error) { format := fmt.Sprintf("{{%s}}", filter) out, err := d.Cmd("inspect", "-f", format, name) if err != nil { return "", errors.Errorf("failed to inspect %s: %s", name, out) } return strings.TrimSpace(out), nil } func (d *Daemon) inspectFieldWithError(name, field string) (string, error) { return d.inspectFilter(name, fmt.Sprintf(".%s", field)) } // FindContainerIP returns the ip of the specified container func (d *Daemon) FindContainerIP(id string) (string, error) { out, err := d.Cmd("inspect", "--format='{{ .NetworkSettings.Networks.bridge.IPAddress }}'", id) if err != nil { return "", err } return strings.Trim(out, " \r\n'"), nil } // BuildImageWithOut builds an image with the specified dockerfile and options and returns the output func (d *Daemon) BuildImageWithOut(name, dockerfile string, useCache bool, buildFlags ...string) (string, int, error) { buildCmd := BuildImageCmdWithHost(d.dockerBinary, name, dockerfile, d.Sock(), useCache, buildFlags...) result := icmd.RunCmd(icmd.Cmd{ Command: buildCmd.Args, Env: buildCmd.Env, Dir: buildCmd.Dir, Stdin: buildCmd.Stdin, Stdout: buildCmd.Stdout, }) return result.Combined(), result.ExitCode, result.Error } // CheckActiveContainerCount returns the number of active containers // FIXME(vdemeester) should re-use ActivateContainers in some way func (d *Daemon) CheckActiveContainerCount(c *check.C) (interface{}, check.CommentInterface) { out, err := d.Cmd("ps", "-q") c.Assert(err, checker.IsNil) if len(strings.TrimSpace(out)) == 0 { return 0, nil } return len(strings.Split(strings.TrimSpace(out), "\n")), check.Commentf("output: %q", string(out)) } // ReloadConfig asks the daemon to reload its configuration func (d *Daemon) ReloadConfig() error { if d.cmd == nil || d.cmd.Process == nil { return errors.New("daemon is not running") } errCh := make(chan error) started := make(chan struct{}) go func() { _, body, err := request.DoOnHost(d.Sock(), "/events", request.Method(http.MethodGet)) close(started) if err != nil { errCh <- err } defer body.Close() dec := json.NewDecoder(body) for { var e events.Message if err := dec.Decode(&e); err != nil { errCh <- err return } if e.Type != events.DaemonEventType { continue } if e.Action != "reload" { continue } close(errCh) // notify that we are done return } }() <-started if err := signalDaemonReload(d.cmd.Process.Pid); err != nil { return errors.Errorf("error signaling daemon reload: %v", err) } select { case err := <-errCh: if err != nil { return errors.Errorf("error waiting for daemon reload event: %v", err) } case <-time.After(30 * time.Second): return errors.New("timeout waiting for daemon reload event") } return nil } // NewClient creates new client based on daemon's socket path func (d *Daemon) NewClient() (*client.Client, error) { httpClient, err := request.NewHTTPClient(d.Sock()) if err != nil { return nil, err } return client.NewClient(d.Sock(), api.DefaultVersion, httpClient, nil) } // WaitInspectWithArgs waits for the specified expression to be equals to the specified expected string in the given time. // Deprecated: use cli.WaitCmd instead func WaitInspectWithArgs(dockerBinary, name, expr, expected string, timeout time.Duration, arg ...string) error { after := time.After(timeout) args := append(arg, "inspect", "-f", expr, name) for { result := icmd.RunCommand(dockerBinary, args...) if result.Error != nil { if !strings.Contains(strings.ToLower(result.Stderr()), "no such") { return errors.Errorf("error executing docker inspect: %v\n%s", result.Stderr(), result.Stdout()) } select { case <-after: return result.Error default: time.Sleep(10 * time.Millisecond) continue } } out := strings.TrimSpace(result.Stdout()) if out == expected { break } select { case <-after: return errors.Errorf("condition \"%q == %q\" not true in time (%v)", out, expected, timeout) default: } time.Sleep(100 * time.Millisecond) } return nil } // BuildImageCmdWithHost create a build command with the specified arguments. // Deprecated // FIXME(vdemeester) move this away func BuildImageCmdWithHost(dockerBinary, name, dockerfile, host string, useCache bool, buildFlags ...string) *exec.Cmd { args := []string{} if host != "" { args = append(args, "--host", host) } args = append(args, "build", "-t", name) if !useCache { args = append(args, "--no-cache") } args = append(args, buildFlags...) args = append(args, "-") buildCmd := exec.Command(dockerBinary, args...) buildCmd.Stdin = strings.NewReader(dockerfile) return buildCmd }
[ "\"DOCKER_INTEGRATION_DAEMON_DEST\"", "\"DEST\"", "\"DOCKER_USERLANDPROXY\"", "\"DOCKER_GRAPHDRIVER\"", "\"DOCKER_REMAP_ROOT\"", "\"DOCKER_REMAP_ROOT\"" ]
[]
[ "DOCKER_REMAP_ROOT", "DOCKER_USERLANDPROXY", "DOCKER_INTEGRATION_DAEMON_DEST", "DOCKER_GRAPHDRIVER", "DEST" ]
[]
["DOCKER_REMAP_ROOT", "DOCKER_USERLANDPROXY", "DOCKER_INTEGRATION_DAEMON_DEST", "DOCKER_GRAPHDRIVER", "DEST"]
go
5
0
src/picker/paths.py
from pathlib import Path import os if os.environ.get('XDG_CONFIG_HOME'): config_home = Path(os.environ.get('XDG_CONFIG_HOME')) else: config_home = Path.home() / '.config' if os.environ.get('XDG_CONFIG_DIRS'): config_global = [Path(dir) for dir in os.environ.get('XDG_CONFIG_DIRS').split(':')] else: config_global = [Path('/etc/xdg')] config_file_locations = [str(directory / 'rofimoji.rc') for directory in [config_home] + config_global] if os.environ.get('XDG_DATA_HOME'): data_home = Path(os.environ.get('XDG_DATA_HOME')) else: data_home = Path.home() / '.local' / 'share' recents_file_location = data_home / 'rofimoji' / 'recent' favorites_file_location = data_home / 'rofimoji' / 'favorites' if os.environ.get('XDG_CACHE_HOME'): cache_home = Path(os.environ.get('XDG_CACHE_HOME')) else: cache_home = Path.home() / '.cache' cache_file_location = cache_home / 'rofimoji'
[]
[]
[ "XDG_CONFIG_DIRS", "XDG_CACHE_HOME", "XDG_CONFIG_HOME", "XDG_DATA_HOME" ]
[]
["XDG_CONFIG_DIRS", "XDG_CACHE_HOME", "XDG_CONFIG_HOME", "XDG_DATA_HOME"]
python
4
0
internal/controlplane/xdsmgr/xdsmgr.go
// Package xdsmgr implements a resource discovery manager for envoy. package xdsmgr import ( "context" "encoding/json" "errors" "os" "sync" envoy_service_discovery_v3 "github.com/envoyproxy/go-control-plane/envoy/service/discovery/v3" "github.com/google/uuid" lru "github.com/hashicorp/golang-lru" "golang.org/x/sync/errgroup" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" "google.golang.org/protobuf/types/known/timestamppb" "github.com/pomerium/pomerium/internal/contextkeys" "github.com/pomerium/pomerium/internal/log" "github.com/pomerium/pomerium/internal/signal" "github.com/pomerium/pomerium/pkg/grpc/events" ) const ( maxNonceCacheSize = 1 << 12 ) type streamState struct { typeURL string clientResourceVersions map[string]string unsubscribedResources map[string]struct{} } var onHandleDeltaRequest = func(state *streamState) {} // A Manager manages xDS resources. type Manager struct { signal *signal.Signal eventHandler func(*events.EnvoyConfigurationEvent) mu sync.Mutex nonce string resources map[string][]*envoy_service_discovery_v3.Resource nonceToConfig *lru.Cache hostname string } // NewManager creates a new Manager. func NewManager(resources map[string][]*envoy_service_discovery_v3.Resource, eventHandler func(*events.EnvoyConfigurationEvent)) *Manager { nonceToConfig, _ := lru.New(maxNonceCacheSize) // the only error they return is when size is negative, which never happens return &Manager{ signal: signal.New(), eventHandler: eventHandler, nonceToConfig: nonceToConfig, nonce: uuid.NewString(), resources: resources, hostname: getHostname(), } } // DeltaAggregatedResources implements the increment xDS server. func (mgr *Manager) DeltaAggregatedResources( stream envoy_service_discovery_v3.AggregatedDiscoveryService_DeltaAggregatedResourcesServer, ) error { ch := mgr.signal.Bind() defer mgr.signal.Unbind(ch) stateByTypeURL := map[string]*streamState{} getDeltaResponse := func(ctx context.Context, typeURL string) *envoy_service_discovery_v3.DeltaDiscoveryResponse { mgr.mu.Lock() defer mgr.mu.Unlock() state, ok := stateByTypeURL[typeURL] if !ok { return nil } res := &envoy_service_discovery_v3.DeltaDiscoveryResponse{ TypeUrl: typeURL, Nonce: mgr.nonce, } seen := map[string]struct{}{} for _, resource := range mgr.resources[typeURL] { seen[resource.Name] = struct{}{} if resource.Version != state.clientResourceVersions[resource.Name] { res.Resources = append(res.Resources, resource) } } for name := range state.clientResourceVersions { _, ok := seen[name] if !ok { res.RemovedResources = append(res.RemovedResources, name) } } if len(res.Resources) == 0 && len(res.RemovedResources) == 0 { return nil } return res } handleDeltaRequest := func(ctx context.Context, req *envoy_service_discovery_v3.DeltaDiscoveryRequest) { mgr.mu.Lock() defer mgr.mu.Unlock() state, ok := stateByTypeURL[req.GetTypeUrl()] if !ok { // first time we've seen a message for this type URL. state = &streamState{ typeURL: req.GetTypeUrl(), clientResourceVersions: req.GetInitialResourceVersions(), unsubscribedResources: make(map[string]struct{}), } if state.clientResourceVersions == nil { state.clientResourceVersions = make(map[string]string) } stateByTypeURL[req.GetTypeUrl()] = state } switch { case req.GetResponseNonce() == "": // neither an ACK or a NACK case req.GetErrorDetail() != nil: // a NACK // - set the client resource versions to the current resource versions state.clientResourceVersions = make(map[string]string) for _, resource := range mgr.resources[req.GetTypeUrl()] { state.clientResourceVersions[resource.Name] = resource.Version } mgr.nackEvent(ctx, req) case req.GetResponseNonce() == mgr.nonce: // an ACK for the last response // - set the client resource versions to the current resource versions state.clientResourceVersions = make(map[string]string) for _, resource := range mgr.resources[req.GetTypeUrl()] { state.clientResourceVersions[resource.Name] = resource.Version } mgr.ackEvent(ctx, req) default: // an ACK for a response that's not the last response mgr.ackEvent(ctx, req) } // update subscriptions for _, name := range req.GetResourceNamesSubscribe() { delete(state.unsubscribedResources, name) } for _, name := range req.GetResourceNamesUnsubscribe() { state.unsubscribedResources[name] = struct{}{} // from the docs: // NOTE: the server must respond with all resources listed in // resource_names_subscribe, even if it believes the client has // the most recent version of them. The reason: the client may // have dropped them, but then regained interest before it had // a chance to send the unsubscribe message. // so we reset the version to treat it like a new version delete(state.clientResourceVersions, name) } onHandleDeltaRequest(state) } incoming := make(chan *envoy_service_discovery_v3.DeltaDiscoveryRequest) outgoing := make(chan *envoy_service_discovery_v3.DeltaDiscoveryResponse) eg, ctx := errgroup.WithContext(stream.Context()) // 1. receive all incoming messages eg.Go(func() error { for { req, err := stream.Recv() if err != nil { return err } select { case <-ctx.Done(): return ctx.Err() case incoming <- req: } } }) // 2. handle incoming requests or resource changes eg.Go(func() error { changeCtx := ctx for { var typeURLs []string select { case <-ctx.Done(): return ctx.Err() case req := <-incoming: handleDeltaRequest(changeCtx, req) typeURLs = []string{req.GetTypeUrl()} case changeCtx = <-ch: mgr.mu.Lock() for typeURL := range mgr.resources { typeURLs = append(typeURLs, typeURL) } mgr.mu.Unlock() } for _, typeURL := range typeURLs { res := getDeltaResponse(changeCtx, typeURL) if res == nil { continue } select { case <-ctx.Done(): return ctx.Err() case outgoing <- res: mgr.changeEvent(ctx, res) } } } }) // 3. send all outgoing messages eg.Go(func() error { for { select { case <-ctx.Done(): return ctx.Err() case res := <-outgoing: err := stream.Send(res) if err != nil { return err } } } }) return eg.Wait() } // StreamAggregatedResources is not implemented. func (mgr *Manager) StreamAggregatedResources( stream envoy_service_discovery_v3.AggregatedDiscoveryService_StreamAggregatedResourcesServer, ) error { return status.Errorf(codes.Unimplemented, "method StreamAggregatedResources not implemented") } // Update updates the state of resources. If any changes are made they will be pushed to any listening // streams. For each TypeURL the list of resources should be the complete list of resources. func (mgr *Manager) Update(ctx context.Context, resources map[string][]*envoy_service_discovery_v3.Resource) { nonce := uuid.New().String() mgr.mu.Lock() mgr.nonce = nonce mgr.resources = resources mgr.nonceToConfig.Add(nonce, ctx.Value(contextkeys.DatabrokerConfigVersion)) mgr.mu.Unlock() mgr.signal.Broadcast(ctx) } func (mgr *Manager) nonceToConfigVersion(nonce string) (ver uint64) { val, ok := mgr.nonceToConfig.Get(nonce) if !ok { return 0 } ver, _ = val.(uint64) return ver } func (mgr *Manager) nackEvent(ctx context.Context, req *envoy_service_discovery_v3.DeltaDiscoveryRequest) { mgr.eventHandler(&events.EnvoyConfigurationEvent{ Instance: mgr.hostname, Kind: events.EnvoyConfigurationEvent_EVENT_DISCOVERY_REQUEST, Time: timestamppb.Now(), Message: req.ErrorDetail.Message, Code: req.ErrorDetail.Code, Details: req.ErrorDetail.Details, ResourceSubscribed: req.ResourceNamesSubscribe, ResourceUnsubscribed: req.ResourceNamesUnsubscribe, ConfigVersion: mgr.nonceToConfigVersion(req.ResponseNonce), TypeUrl: req.TypeUrl, }) bs, _ := json.Marshal(req.ErrorDetail.Details) log.Error(ctx). Err(errors.New(req.ErrorDetail.Message)). Str("resource_type", req.TypeUrl). Strs("resources_unsubscribe", req.ResourceNamesUnsubscribe). Strs("resources_subscribe", req.ResourceNamesSubscribe). Uint64("nonce_version", mgr.nonceToConfigVersion(req.ResponseNonce)). Int32("code", req.ErrorDetail.Code). RawJSON("details", bs).Msg("error applying configuration") } func (mgr *Manager) ackEvent(ctx context.Context, req *envoy_service_discovery_v3.DeltaDiscoveryRequest) { mgr.eventHandler(&events.EnvoyConfigurationEvent{ Instance: mgr.hostname, Kind: events.EnvoyConfigurationEvent_EVENT_DISCOVERY_REQUEST, Time: timestamppb.Now(), ConfigVersion: mgr.nonceToConfigVersion(req.ResponseNonce), ResourceSubscribed: req.ResourceNamesSubscribe, ResourceUnsubscribed: req.ResourceNamesUnsubscribe, TypeUrl: req.TypeUrl, Message: "ok", }) log.Debug(ctx). Str("resource_type", req.TypeUrl). Strs("resources_unsubscribe", req.ResourceNamesUnsubscribe). Strs("resources_subscribe", req.ResourceNamesSubscribe). Uint64("nonce_version", mgr.nonceToConfigVersion(req.ResponseNonce)). Msg("ACK") } func (mgr *Manager) changeEvent(ctx context.Context, res *envoy_service_discovery_v3.DeltaDiscoveryResponse) { mgr.eventHandler(&events.EnvoyConfigurationEvent{ Instance: mgr.hostname, Kind: events.EnvoyConfigurationEvent_EVENT_DISCOVERY_RESPONSE, Time: timestamppb.Now(), ConfigVersion: mgr.nonceToConfigVersion(res.Nonce), ResourceSubscribed: resourceNames(res.Resources), ResourceUnsubscribed: res.RemovedResources, TypeUrl: res.TypeUrl, Message: "change", }) log.Debug(ctx). Uint64("ctx_config_version", mgr.nonceToConfigVersion(res.Nonce)). Str("nonce", res.Nonce). Str("type", res.TypeUrl). Strs("subscribe", resourceNames(res.Resources)). Strs("removed", res.RemovedResources). Msg("sent update") } func resourceNames(res []*envoy_service_discovery_v3.Resource) []string { txt := make([]string, 0, len(res)) for _, r := range res { txt = append(txt, r.Name) } return txt } func getHostname() string { hostname, err := os.Hostname() if err != nil { hostname = os.Getenv("HOSTNAME") } if hostname == "" { hostname = "__unknown__" } return hostname }
[ "\"HOSTNAME\"" ]
[]
[ "HOSTNAME" ]
[]
["HOSTNAME"]
go
1
0
server.py
# coding: utf-8 # In[4]: import sys import os import json from flask import Flask from flask import jsonify sys.path.append(os.path.join(os.getcwd(),'..')) import watson_developer_cloud import watson_developer_cloud.natural_language_understanding.features.v1 as \ features app = Flask("NLU App") nlu = watson_developer_cloud.NaturalLanguageUnderstandingV1( version='2017-02-27', username=os.getenv('NATURAL_LANGUAGE_UNDERSTANDING_USERNAME'), password=os.getenv('NATURAL_LANGUAGE_UNDERSTANDING_PASSWORD')) @app.route("/") def default_welcome(): return 'Welcome to the NLU App !' @app.route("/entities") def eval_entities(): response = nlu.analyze( text='Bruce Banner is the Hulk and Bruce Wayne is BATMAN! ' 'Superman fears not Banner, but Wayne.', features=[features.Entities()]) return jsonify(response) @app.route("/keywords") def eval_keywords(): response = nlu.analyze( text='Bruce Banner is the Hulk and Bruce Wayne is BATMAN! ' 'Superman fears not Banner, but Wayne.', features=[features.Keywords()]) return jsonify(response) @app.route("/categories") def eval_categories(): response = nlu.analyze( url='www.cnn.com', features=[features.Categories()]) return jsonify(response) @app.route("/concepts") def eval_concepts(): response = nlu.analyze( text='Natural Language Understanding uses natural language processing to analyze text.', features=[features.Concepts()]) return jsonify(response) @app.route("/emotion") def eval_emotion(): response = nlu.analyze( text='I love apples, but I hate oranges.', features=[features.Emotion(targets=['apples','oranges'])]) return jsonify(response) @app.route("/metadata") def eval_metadata(): response = nlu.analyze( url='https://www.ibm.com/blogs/think/2017/01/cognitive-grid/', features=[features.MetaData()]) return jsonify(response) @app.route("/relations") def eval_relations(): response = nlu.analyze( text='The Nobel Prize in Physics 1921 was awarded to Albert Einstein.', features=[features.Relations()]) return jsonify(response) @app.route("/semantic_roles") def eval_semantic_roles(): response = nlu.analyze( text='In 2011, Watson competed on Jeopardy!', features=[features.SemanticRoles()]) return jsonify(response) @app.route("/sentiment") def eval_Sentiment(): response = nlu.analyze( text='Thank you and have a nice day!', features=[features.Sentiment()]) return jsonify(response) if __name__ == "__main__": app.run(host='0.0.0.0',debug=True,port=int(os.getenv('PORT',8080)))
[]
[]
[ "NATURAL_LANGUAGE_UNDERSTANDING_USERNAME", "PORT", "NATURAL_LANGUAGE_UNDERSTANDING_PASSWORD" ]
[]
["NATURAL_LANGUAGE_UNDERSTANDING_USERNAME", "PORT", "NATURAL_LANGUAGE_UNDERSTANDING_PASSWORD"]
python
3
0
proxy_switcher.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # Author: gexiao # Created on 2017-01-13 18:27 import time import shlex import logging import subprocess import os from random import randint from config.config import ss_config_list, http_proxy_list from data.data import DataManager ## subprocess.call(('killall', 'ss-local')) current_ip = '' local_ip = 'local_ip' data_manager = DataManager() ss_process_list = [] ss_proxy_list = [] random_proxy_mute_dict = {} def create_ss_proxies(): global ss_process_list, ss_proxy_list if ss_process_list: return proxies_env = os.getenv('HTTP_PROXY_LIST') if proxies_env: proxies = proxies_env.split(',') for p in proxies: ss_proxy_list.append(p) for http_proxy in http_proxy_list: ss_proxy_list.append(http_proxy) logging.info('ss_proxy_list ' + str(ss_proxy_list)) for index, config in enumerate(ss_config_list): local_port = 1090 + index args = shlex.split( 'ss-local -s {0} -p {1} -l {local_port} ' '-k {3} -m {2} -t 10 -b 0.0.0.0'.format(local_port=local_port, *config)) p = subprocess.Popen(args) ss_process_list.append(p) ss_proxy_list.append('127.0.0.1:' + str(local_port)) if index == len(ss_config_list) - 1: time.sleep(2) def close_all_ss_local(): for ss_process in ss_process_list: ss_process.terminate() ss_process.wait() def auto_choose_ip(): limited_ip_list = data_manager.find_limited_ips() if current_ip and current_ip not in limited_ip_list: return current_ip # if local_ip not in limited_ip_list: # return local_ip for ss_proxy in ss_proxy_list: if ss_proxy not in limited_ip_list: return ss_proxy return '' def get_proxy(): valid_ip = auto_choose_ip() global current_ip if current_ip != valid_ip: logging.info('Change IP to ' + valid_ip) current_ip = valid_ip if not current_ip: logging.info('No valid ip, go to sleep') time.sleep(300) return get_proxy() if current_ip == local_ip: return {} # TODO if 'https://' in current_ip: return {'https': current_ip} if '127.0.0.1:' in current_ip: return {'https': 'socks5://{ss_proxy}'.format(ss_proxy=current_ip)} if '10.0.' in current_ip: return {'https': 'http://{ss_proxy}'.format(ss_proxy=current_ip)} return {} def random_proxy(): def new_random_proxy(): random_index = randint(0, len(ss_proxy_list)-1) random_ip = local_ip if random_index < len(ss_proxy_list): random_ip = ss_proxy_list[random_index] if random_ip == local_ip: return {} if '127.0.0.1:' in random_ip: return {'https': 'socks5://{ss_proxy}'.format(ss_proxy=random_ip)} if '10.0.' in random_ip: return {'https': 'http://{ss_proxy}'.format(ss_proxy=random_ip)} return {'https': '{ss_proxy}'.format(ss_proxy=random_ip)} def test_all_proxies(): if len(random_proxy_mute_dict) < len(ss_proxy_list)+1: return for proxy, mute_time in random_proxy_mute_dict.items(): if mute_time <= time.time(): return logging.info('All proxies are muted') time.sleep(200) proxy = new_random_proxy() if str(proxy) in random_proxy_mute_dict: mute_time = random_proxy_mute_dict[str(proxy)] if mute_time > time.time(): test_all_proxies() return random_proxy() else: del random_proxy_mute_dict[str(proxy)] return proxy def mute_random_proxy(proxy, second=3600): if proxy is not None: random_proxy_mute_dict[str(proxy)] = time.time() + second logging.info('mute proxy: ' + str(proxy)) def tag_current_ip_limited(reset_time): global current_ip data_manager.upsert_ip({'ip': current_ip, 'reset_time': reset_time}) current_ip = ''
[]
[]
[ "HTTP_PROXY_LIST" ]
[]
["HTTP_PROXY_LIST"]
python
1
0
auth/sts.go
/* Copyright 2019 Nike Inc. Licensed under the Apache License, Version 2.0 (the License); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package auth import ( "bytes" "encoding/json" "fmt" "github.com/Nike-Inc/cerberus-go-client/api" "github.com/Nike-Inc/cerberus-go-client/utils" "github.com/aws/aws-sdk-go/aws/credentials" "github.com/aws/aws-sdk-go/aws/defaults" "github.com/aws/aws-sdk-go/aws/endpoints" "github.com/aws/aws-sdk-go/aws/signer/v4" "net/http" "net/url" "os" "time" ) // STSAuth uses AWS V4 signing authenticate to Cerberus. type STSAuth struct { token string region string expiry time.Time baseURL *url.URL headers http.Header } // NewSTSAuth returns an STSAuth given a valid URL and region. If the CERBERUS_URL // environment variable is set, it will be used over what is passed to this function. // Valid AWS credentials configured either by environment or through a credentials // config file are also required. func NewSTSAuth(cerberusURL, region string) (*STSAuth, error) { // Check for the environment variable if the user has set it if os.Getenv("CERBERUS_URL") != "" { cerberusURL = os.Getenv("CERBERUS_URL") } if len(region) == 0 { return nil, fmt.Errorf("Region cannot be empty") } if len(cerberusURL) == 0 { return nil, fmt.Errorf("Cerberus URL cannot be empty") } parsedURL, err := utils.ValidateURL(cerberusURL) if err != nil { return nil, err } if err != nil { return nil, fmt.Errorf("Unable to create AWS session: %s", err) } return &STSAuth{ region: region, baseURL: parsedURL, headers: http.Header{ "X-Cerberus-Client": []string{api.ClientHeader}, "Content-Type": []string{"application/json"}, }, }, nil } // GetToken returns a token if it already exists and is not expired. Otherwise, // it authenticates using the provided URL and region and then returns the token. func (a *STSAuth) GetToken(*os.File) (string, error) { if a.IsAuthenticated() { return a.token, nil } err := a.authenticate() return a.token, err } // GetExpiry returns the expiry time of the token if it already exists. Otherwise, // it returns a zero-valued time.Time struct and an error. func (a *STSAuth) GetExpiry() (time.Time, error) { if len(a.token) > 0 { return a.expiry, nil } return time.Time{}, fmt.Errorf("Expiry time not set.") } func (a *STSAuth) authenticate() error { builtURL := *a.baseURL builtURL.Path = "v2/auth/sts-identity" body := bytes.NewReader([]byte("Action=GetCallerIdentity&Version=2011-06-15")) request, err := http.NewRequest("POST", builtURL.String(), body) if err != nil { return fmt.Errorf("Problem while creating request to Cerberus: %v", err) } headers, err := a.sign() if err != nil { return fmt.Errorf("Problem signing request to Cerberus: %v", err) } for k, v := range headers { request.Header.Set(k, v[0]) } client := http.Client{Timeout: 10 * time.Second} response, err := client.Do(request) if err != nil { return fmt.Errorf("Problem while performing request to Cerberus: %v", err) } defer response.Body.Close() if response.StatusCode == http.StatusUnauthorized || response.StatusCode == http.StatusForbidden { return api.ErrorUnauthorized } if response.StatusCode != http.StatusOK { return fmt.Errorf("Error while trying to authenticate. Got HTTP response code %d", response.StatusCode) } decoder := json.NewDecoder(response.Body) authResponse := &api.IAMAuthResponse{} dErr := decoder.Decode(authResponse) if dErr != nil { return fmt.Errorf("Error while trying to parse response from Cerberus: %v", err) } a.token = authResponse.Token a.headers.Set("X-Cerberus-Token", authResponse.Token) a.expiry = time.Now().Add((time.Duration(authResponse.Duration) * time.Second) - expiryDelta) return nil } // IsAuthenticated returns whether or not the current token is set and is not expired. func (a *STSAuth) IsAuthenticated() bool { return len(a.token) > 0 && time.Now().Before(a.expiry) } // Refresh refreshes the current token by reauthenticating against the API. func (a *STSAuth) Refresh() error { if !a.IsAuthenticated() { return api.ErrorUnauthenticated } // A note on why we are just reauthenticating: You can refresh an AWS token, // but there is a limit (24) to the number of refreshes and the API requests // that you refresh your token on every SDB creation. When doing this in an // automation context, you could surpass this limit. You could not refresh // the token, but it can get you in to a state where you can't perform some // operations. This is less than ideal but better than having an arbitary // bound on the number of refreshes and having to track how many have been // done. return a.authenticate() } // Logout deauthorizes the current valid token. This will return an error if the token // is expired or non-existent. func (a *STSAuth) Logout() error { if !a.IsAuthenticated() { return api.ErrorUnauthenticated } // Use a copy of the base URL if err := Logout(*a.baseURL, a.headers); err != nil { return err } // Reset the token and header a.token = "" a.headers.Del("X-Cerberus-Token") return nil } // GetHeaders returns the headers needed to authenticate against Cerberus. This will // return an error if the token is expired or non-existent. func (a *STSAuth) GetHeaders() (http.Header, error) { if !a.IsAuthenticated() { return nil, api.ErrorUnauthenticated } return a.headers, nil } // GetURL returns the configured Cerberus URL. func (a *STSAuth) GetURL() *url.URL { return a.baseURL } // credentials obtains default AWS credentials. func creds() *credentials.Credentials { creds := defaults.Get().Config.Credentials return creds } // signer returns a V4 signer for signing a request. func signer() (*v4.Signer, error) { creds := creds() _, err := creds.Get() if err != nil { return nil, fmt.Errorf("Credentials are required and cannot be found: %v", err) } signer := v4.NewSigner(creds) return signer, nil } // request creates an STS Auth request. func (a *STSAuth) request() (*http.Request, error) { _, err := endpoints.DefaultResolver().EndpointFor("sts", a.region, endpoints.StrictMatchingOption) if err != nil { return nil, fmt.Errorf("Endpoint could not be created. "+ "Confirm that region, %v, is a valid AWS region : %v", a.region, err) } method := "POST" url := "https://sts." + a.region + ".amazonaws.com" request, _ := http.NewRequest(method, url, nil) return request, nil } // sign signs a AWS v4 request and returns the signed headers. func (a *STSAuth) sign() (http.Header, error) { signer, signErr := signer() if signErr != nil { return nil, signErr } request, reqErr := a.request() if reqErr != nil { return nil, reqErr } service := "sts" body := bytes.NewReader([]byte("Action=GetCallerIdentity&Version=2011-06-15")) _, signerErr := signer.Sign(request, body, service, a.region, time.Now()) if signerErr != nil { return nil, signerErr } return request.Header, nil }
[ "\"CERBERUS_URL\"", "\"CERBERUS_URL\"" ]
[]
[ "CERBERUS_URL" ]
[]
["CERBERUS_URL"]
go
1
0
platform/osx/detect.py
import os import sys import subprocess from methods import detect_darwin_sdk_path def is_active(): return True def get_name(): return "OSX" def can_build(): if sys.platform == "darwin" or ("OSXCROSS_ROOT" in os.environ): return True return False def get_opts(): from SCons.Variables import BoolVariable, EnumVariable return [ ("osxcross_sdk", "OSXCross SDK version", "darwin14"), ("MACOS_SDK_PATH", "Path to the macOS SDK", ""), BoolVariable( "use_static_mvk", "Link MoltenVK statically as Level-0 driver (better portability) or use Vulkan ICD loader (enables validation layers)", False, ), EnumVariable("debug_symbols", "Add debugging symbols to release builds", "yes", ("yes", "no", "full")), BoolVariable("separate_debug_symbols", "Create a separate file containing debugging symbols", False), BoolVariable("use_ubsan", "Use LLVM/GCC compiler undefined behavior sanitizer (UBSAN)", False), BoolVariable("use_asan", "Use LLVM/GCC compiler address sanitizer (ASAN))", False), BoolVariable("use_tsan", "Use LLVM/GCC compiler thread sanitizer (TSAN))", False), ] def get_flags(): return [] def configure(env): ## Build type if env["target"] == "release": if env["optimize"] == "speed": # optimize for speed (default) env.Prepend(CCFLAGS=["-O3", "-fomit-frame-pointer", "-ftree-vectorize", "-msse2"]) else: # optimize for size env.Prepend(CCFLAGS=["-Os", "-ftree-vectorize", "-msse2"]) if env["debug_symbols"] == "yes": env.Prepend(CCFLAGS=["-g1"]) if env["debug_symbols"] == "full": env.Prepend(CCFLAGS=["-g2"]) elif env["target"] == "release_debug": if env["optimize"] == "speed": # optimize for speed (default) env.Prepend(CCFLAGS=["-O2"]) else: # optimize for size env.Prepend(CCFLAGS=["-Os"]) env.Prepend(CPPDEFINES=["DEBUG_ENABLED"]) if env["debug_symbols"] == "yes": env.Prepend(CCFLAGS=["-g1"]) if env["debug_symbols"] == "full": env.Prepend(CCFLAGS=["-g2"]) elif env["target"] == "debug": env.Prepend(CCFLAGS=["-g3"]) env.Prepend(CPPDEFINES=["DEBUG_ENABLED", "DEBUG_MEMORY_ENABLED"]) ## Architecture # Mac OS X no longer runs on 32-bit since 10.7 which is unsupported since 2014 # As such, we only support 64-bit env["bits"] = "64" ## Compiler configuration # Save this in environment for use by other modules if "OSXCROSS_ROOT" in os.environ: env["osxcross"] = True if not "osxcross" in env: # regular native build if env["arch"] == "arm64": print("Building for macOS 10.15+, platform arm64.") env.Append(CCFLAGS=["-arch", "arm64", "-mmacosx-version-min=10.15", "-target", "arm64-apple-macos10.15"]) env.Append(LINKFLAGS=["-arch", "arm64", "-mmacosx-version-min=10.15", "-target", "arm64-apple-macos10.15"]) else: print("Building for macOS 10.12+, platform x86-64.") env.Append(CCFLAGS=["-arch", "x86_64", "-mmacosx-version-min=10.12"]) env.Append(LINKFLAGS=["-arch", "x86_64", "-mmacosx-version-min=10.12"]) if env["macports_clang"] != "no": mpprefix = os.environ.get("MACPORTS_PREFIX", "/opt/local") mpclangver = env["macports_clang"] env["CC"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/clang" env["LINK"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/clang++" env["CXX"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/clang++" env["AR"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-ar" env["RANLIB"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-ranlib" env["AS"] = mpprefix + "/libexec/llvm-" + mpclangver + "/bin/llvm-as" env.Append(CPPDEFINES=["__MACPORTS__"]) # hack to fix libvpx MM256_BROADCASTSI128_SI256 define else: env["CC"] = "clang" env["CXX"] = "clang++" detect_darwin_sdk_path("osx", env) env.Append(CCFLAGS=["-isysroot", "$MACOS_SDK_PATH"]) env.Append(LINKFLAGS=["-isysroot", "$MACOS_SDK_PATH"]) else: # osxcross build root = os.environ.get("OSXCROSS_ROOT", 0) basecmd = root + "/target/bin/x86_64-apple-" + env["osxcross_sdk"] + "-" ccache_path = os.environ.get("CCACHE") if ccache_path is None: env["CC"] = basecmd + "cc" env["CXX"] = basecmd + "c++" else: # there aren't any ccache wrappers available for OS X cross-compile, # to enable caching we need to prepend the path to the ccache binary env["CC"] = ccache_path + " " + basecmd + "cc" env["CXX"] = ccache_path + " " + basecmd + "c++" env["AR"] = basecmd + "ar" env["RANLIB"] = basecmd + "ranlib" env["AS"] = basecmd + "as" env.Append(CPPDEFINES=["__MACPORTS__"]) # hack to fix libvpx MM256_BROADCASTSI128_SI256 define if env["CXX"] == "clang++": env.Append(CPPDEFINES=["TYPED_METHOD_BIND"]) env["CC"] = "clang" env["LINK"] = "clang++" if env["use_ubsan"] or env["use_asan"] or env["use_tsan"]: env.extra_suffix += "s" if env["use_ubsan"]: env.Append(CCFLAGS=["-fsanitize=undefined"]) env.Append(LINKFLAGS=["-fsanitize=undefined"]) if env["use_asan"]: env.Append(CCFLAGS=["-fsanitize=address"]) env.Append(LINKFLAGS=["-fsanitize=address"]) if env["use_tsan"]: env.Append(CCFLAGS=["-fsanitize=thread"]) env.Append(LINKFLAGS=["-fsanitize=thread"]) ## Dependencies if env["builtin_libtheora"]: if env["arch"] != "arm64": env["x86_libtheora_opt_gcc"] = True ## Flags env.Prepend(CPPPATH=["#platform/osx"]) env.Append(CPPDEFINES=["OSX_ENABLED", "UNIX_ENABLED", "APPLE_STYLE_KEYS", "COREAUDIO_ENABLED", "COREMIDI_ENABLED"]) env.Append( LINKFLAGS=[ "-framework", "Cocoa", "-framework", "Carbon", "-framework", "AudioUnit", "-framework", "CoreAudio", "-framework", "CoreMIDI", "-framework", "IOKit", "-framework", "ForceFeedback", "-framework", "CoreVideo", "-framework", "AVFoundation", "-framework", "CoreMedia", ] ) env.Append(LIBS=["pthread", "z"]) env.Append(CPPDEFINES=["VULKAN_ENABLED"]) env.Append(LINKFLAGS=["-framework", "Metal", "-framework", "QuartzCore", "-framework", "IOSurface"]) if env["use_static_mvk"]: env.Append(LINKFLAGS=["-framework", "MoltenVK"]) env["builtin_vulkan"] = False elif not env["builtin_vulkan"]: env.Append(LIBS=["vulkan"]) # env.Append(CPPDEFINES=['GLES_ENABLED', 'OPENGL_ENABLED'])
[]
[]
[ "CCACHE", "MACPORTS_PREFIX", "OSXCROSS_ROOT" ]
[]
["CCACHE", "MACPORTS_PREFIX", "OSXCROSS_ROOT"]
python
3
0
setup.py
# SPDX-License-Identifier: Apache-2.0 from __future__ import (absolute_import, division, print_function, unicode_literals) import glob import multiprocessing import os import platform import shlex import subprocess import sys from collections import namedtuple from contextlib import contextmanager from datetime import date from distutils import log, sysconfig from distutils.spawn import find_executable from textwrap import dedent import setuptools import setuptools.command.build_ext import setuptools.command.build_py import setuptools.command.develop TOP_DIR = os.path.realpath(os.path.dirname(__file__)) SRC_DIR = os.path.join(TOP_DIR, 'onnx') TP_DIR = os.path.join(TOP_DIR, 'third_party') CMAKE_BUILD_DIR = os.path.join(TOP_DIR, '.setuptools-cmake-build') PACKAGE_NAME = 'onnx' WINDOWS = (os.name == 'nt') CMAKE = find_executable('cmake3') or find_executable('cmake') MAKE = find_executable('make') install_requires = [] setup_requires = [] tests_require = [] extras_require = {} ################################################################################ # Global variables for controlling the build variant ################################################################################ # Default value is set to TRUE\1 to keep the settings same as the current ones. # However going forward the recomemded way to is to set this to False\0 ONNX_ML = not bool(os.getenv('ONNX_ML') == '0') ONNX_VERIFY_PROTO3 = bool(os.getenv('ONNX_VERIFY_PROTO3') == '1') ONNX_NAMESPACE = os.getenv('ONNX_NAMESPACE', 'onnx') ONNX_BUILD_TESTS = bool(os.getenv('ONNX_BUILD_TESTS') == '1') ONNX_DISABLE_EXCEPTIONS = bool(os.getenv('ONNX_DISABLE_EXCEPTIONS') == '1') USE_MSVC_STATIC_RUNTIME = bool(os.getenv('USE_MSVC_STATIC_RUNTIME', '0') == '1') DEBUG = bool(os.getenv('DEBUG', '0') == '1') COVERAGE = bool(os.getenv('COVERAGE', '0') == '1') ################################################################################ # Version ################################################################################ try: git_version = subprocess.check_output(['git', 'rev-parse', 'HEAD'], cwd=TOP_DIR).decode('ascii').strip() except (OSError, subprocess.CalledProcessError): git_version = None with open(os.path.join(TOP_DIR, 'VERSION_NUMBER')) as version_file: VERSION_NUMBER = version_file.read().strip() if '--weekly_build' in sys.argv: today_number = date.today().strftime("%Y%m%d") VERSION_NUMBER += '.dev' + today_number PACKAGE_NAME = 'onnx-weekly' sys.argv.remove('--weekly_build') VersionInfo = namedtuple('VersionInfo', ['version', 'git_version'])( version=VERSION_NUMBER, git_version=git_version ) ################################################################################ # Pre Check ################################################################################ assert CMAKE, 'Could not find "cmake" executable!' ################################################################################ # Utilities ################################################################################ @contextmanager def cd(path): if not os.path.isabs(path): raise RuntimeError('Can only cd to absolute path, got: {}'.format(path)) orig_path = os.getcwd() os.chdir(path) try: yield finally: os.chdir(orig_path) ################################################################################ # Customized commands ################################################################################ class ONNXCommand(setuptools.Command): user_options = [] def initialize_options(self): pass def finalize_options(self): pass class create_version(ONNXCommand): def run(self): with open(os.path.join(SRC_DIR, 'version.py'), 'w') as f: f.write(dedent('''\ # This file is generated by setup.py. DO NOT EDIT! from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals version = '{version}' git_version = '{git_version}' '''.format(**dict(VersionInfo._asdict())))) class cmake_build(setuptools.Command): """ Compiles everything when `python setupmnm.py build` is run using cmake. Custom args can be passed to cmake by specifying the `CMAKE_ARGS` environment variable. The number of CPUs used by `make` can be specified by passing `-j<ncpus>` to `setup.py build`. By default all CPUs are used. """ user_options = [ (str('jobs='), str('j'), str('Specifies the number of jobs to use with make')) ] built = False def initialize_options(self): self.jobs = None def finalize_options(self): if sys.version_info[0] >= 3: self.set_undefined_options('build', ('parallel', 'jobs')) if self.jobs is None and os.getenv("MAX_JOBS") is not None: self.jobs = os.getenv("MAX_JOBS") self.jobs = multiprocessing.cpu_count() if self.jobs is None else int(self.jobs) def run(self): if cmake_build.built: return cmake_build.built = True if not os.path.exists(CMAKE_BUILD_DIR): os.makedirs(CMAKE_BUILD_DIR) with cd(CMAKE_BUILD_DIR): build_type = 'Release' # configure cmake_args = [ CMAKE, '-DPYTHON_INCLUDE_DIR={}'.format(sysconfig.get_python_inc()), '-DPYTHON_EXECUTABLE={}'.format(sys.executable), '-DBUILD_ONNX_PYTHON=ON', '-DCMAKE_EXPORT_COMPILE_COMMANDS=ON', '-DONNX_NAMESPACE={}'.format(ONNX_NAMESPACE), '-DPY_EXT_SUFFIX={}'.format(sysconfig.get_config_var('EXT_SUFFIX') or ''), ] if COVERAGE: cmake_args.append('-DONNX_COVERAGE=ON') if COVERAGE or DEBUG: # in order to get accurate coverage information, the # build needs to turn off optimizations build_type = 'Debug' cmake_args.append('-DCMAKE_BUILD_TYPE=%s' % build_type) if WINDOWS: cmake_args.extend([ # we need to link with libpython on windows, so # passing python version to window in order to # find python in cmake '-DPY_VERSION={}'.format('{0}.{1}'.format(*sys.version_info[:2])), ]) if USE_MSVC_STATIC_RUNTIME: cmake_args.append('-DONNX_USE_MSVC_STATIC_RUNTIME=ON') if platform.architecture()[0] == '64bit': cmake_args.extend(['-A', 'x64', '-T', 'host=x64']) else: cmake_args.extend(['-A', 'Win32', '-T', 'host=x86']) if ONNX_ML: cmake_args.append('-DONNX_ML=1') if ONNX_VERIFY_PROTO3: cmake_args.append('-DONNX_VERIFY_PROTO3=1') if ONNX_BUILD_TESTS: cmake_args.append('-DONNX_BUILD_TESTS=ON') if ONNX_DISABLE_EXCEPTIONS: cmake_args.append('-DONNX_DISABLE_EXCEPTIONS=ON') if 'CMAKE_ARGS' in os.environ: extra_cmake_args = shlex.split(os.environ['CMAKE_ARGS']) # prevent crossfire with downstream scripts del os.environ['CMAKE_ARGS'] log.info('Extra cmake args: {}'.format(extra_cmake_args)) cmake_args.extend(extra_cmake_args) cmake_args.append(TOP_DIR) log.info('Using cmake args: {}'.format(cmake_args)) if '-DONNX_DISABLE_EXCEPTIONS=ON' in cmake_args: raise RuntimeError("-DONNX_DISABLE_EXCEPTIONS=ON option is only available for c++ builds. Python binding require exceptions to be enabled.") subprocess.check_call(cmake_args) build_args = [CMAKE, '--build', os.curdir] if WINDOWS: build_args.extend(['--config', build_type]) build_args.extend(['--', '/maxcpucount:{}'.format(self.jobs)]) else: build_args.extend(['--', '-j', str(self.jobs)]) subprocess.check_call(build_args) class build_py(setuptools.command.build_py.build_py): def run(self): self.run_command('create_version') self.run_command('cmake_build') generated_python_files = \ glob.glob(os.path.join(CMAKE_BUILD_DIR, 'onnx', '*.py')) + \ glob.glob(os.path.join(CMAKE_BUILD_DIR, 'onnx', '*.pyi')) for src in generated_python_files: dst = os.path.join( TOP_DIR, os.path.relpath(src, CMAKE_BUILD_DIR)) self.copy_file(src, dst) return setuptools.command.build_py.build_py.run(self) class develop(setuptools.command.develop.develop): def run(self): self.run_command('build_py') setuptools.command.develop.develop.run(self) class build_ext(setuptools.command.build_ext.build_ext): def run(self): self.run_command('cmake_build') setuptools.command.build_ext.build_ext.run(self) def build_extensions(self): for ext in self.extensions: fullname = self.get_ext_fullname(ext.name) filename = os.path.basename(self.get_ext_filename(fullname)) lib_path = CMAKE_BUILD_DIR if os.name == 'nt': debug_lib_dir = os.path.join(lib_path, "Debug") release_lib_dir = os.path.join(lib_path, "Release") if os.path.exists(debug_lib_dir): lib_path = debug_lib_dir elif os.path.exists(release_lib_dir): lib_path = release_lib_dir src = os.path.join(lib_path, filename) dst = os.path.join(os.path.realpath(self.build_lib), "onnx", filename) self.copy_file(src, dst) class mypy_type_check(ONNXCommand): description = 'Run MyPy type checker' def run(self): """Run command.""" onnx_script = os.path.realpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), "tools/mypy-onnx.py")) returncode = subprocess.call([sys.executable, onnx_script]) sys.exit(returncode) cmdclass = { 'create_version': create_version, 'cmake_build': cmake_build, 'build_py': build_py, 'develop': develop, 'build_ext': build_ext, 'typecheck': mypy_type_check, } ################################################################################ # Extensions ################################################################################ ext_modules = [ setuptools.Extension( name=str('onnx.onnx_cpp2py_export'), sources=[]) ] ################################################################################ # Packages ################################################################################ # no need to do fancy stuff so far packages = setuptools.find_packages() requirements_file = "requirements.txt" requirements_path = os.path.join(os.getcwd(), requirements_file) if not os.path.exists(requirements_path): this = os.path.dirname(__file__) requirements_path = os.path.join(this, requirements_file) if not os.path.exists(requirements_path): raise FileNotFoundError("Unable to find " + requirements_file) with open(requirements_path) as f: install_requires = f.read().splitlines() ################################################################################ # Test ################################################################################ setup_requires.append('pytest-runner') tests_require.append('pytest') tests_require.append('nbval') tests_require.append('tabulate') extras_require["mypy"] = ["mypy==0.910"] ################################################################################ # Final ################################################################################ setuptools.setup( name=PACKAGE_NAME, version=VersionInfo.version, description="Open Neural Network Exchange", long_description=open("README.md").read(), long_description_content_type="text/markdown", ext_modules=ext_modules, cmdclass=cmdclass, packages=packages, license='Apache License v2.0', include_package_data=True, install_requires=install_requires, setup_requires=setup_requires, tests_require=tests_require, extras_require=extras_require, author='ONNX', author_email='[email protected]', url='https://github.com/onnx/onnx', entry_points={ 'console_scripts': [ 'check-model = onnx.bin.checker:check_model', 'check-node = onnx.bin.checker:check_node', 'backend-test-tools = onnx.backend.test.cmd_tools:main', ] }, )
[]
[]
[ "ONNX_ML", "ONNX_BUILD_TESTS", "CMAKE_ARGS", "ONNX_NAMESPACE", "COVERAGE", "MAX_JOBS", "ONNX_VERIFY_PROTO3", "DEBUG", "USE_MSVC_STATIC_RUNTIME", "ONNX_DISABLE_EXCEPTIONS" ]
[]
["ONNX_ML", "ONNX_BUILD_TESTS", "CMAKE_ARGS", "ONNX_NAMESPACE", "COVERAGE", "MAX_JOBS", "ONNX_VERIFY_PROTO3", "DEBUG", "USE_MSVC_STATIC_RUNTIME", "ONNX_DISABLE_EXCEPTIONS"]
python
10
0
applications/DEMApplication/tests/test_kinematic_constraints.py
import os import KratosMultiphysics as Kratos from KratosMultiphysics import Logger Logger.GetDefaultOutput().SetSeverity(Logger.Severity.WARNING) import KratosMultiphysics.KratosUnittest as KratosUnittest import KratosMultiphysics.DEMApplication.DEM_analysis_stage import KratosMultiphysics.kratos_utilities as kratos_utils this_working_dir_backup = os.getcwd() def GetFilePath(fileName): return os.path.join(os.path.dirname(os.path.realpath(__file__)), fileName) def CreateAndRunStageInOneOpenMPThread(my_obj, model, parameters_file_name): omp_utils = Kratos.OpenMPUtils() if "OMP_NUM_THREADS" in os.environ: initial_number_of_threads = os.environ['OMP_NUM_THREADS'] omp_utils.SetNumThreads(1) with open(parameters_file_name,'r') as parameter_file: project_parameters = Kratos.Parameters(parameter_file.read()) my_obj(model, project_parameters).Run() if "OMP_NUM_THREADS" in os.environ: omp_utils.SetNumThreads(int(initial_number_of_threads)) class KinematicConstraintsTestSolution(KratosMultiphysics.DEMApplication.DEM_analysis_stage.DEMAnalysisStage): @classmethod def GetMainPath(self): return os.path.join(os.path.dirname(os.path.realpath(__file__)), "kinematic_constraints_tests_files") def GetProblemNameWithPath(self): return os.path.join(self.main_path, self.DEM_parameters["problem_name"].GetString()) def FinalizeTimeStep(self, time): tolerance = 1e-3 for node in self.spheres_model_part.Nodes: velocity = node.GetSolutionStepValue(Kratos.VELOCITY) angular_velocity = node.GetSolutionStepValue(Kratos.ANGULAR_VELOCITY) if node.Id == 1: if time > 0.18 and time < 0.2: expected_value = 0.0 self.CheckValueOfVelocity(velocity, 0, expected_value, tolerance) expected_value = 0.0 self.CheckValueOfVelocity(velocity, 1, expected_value, tolerance) expected_value = 0.0 self.CheckValueOfVelocity(velocity, 2, expected_value, tolerance) expected_value = 0.0 self.CheckValueOfAngularVelocity(angular_velocity, 0, expected_value, tolerance) expected_value = 0.0 self.CheckValueOfAngularVelocity(angular_velocity, 1, expected_value, tolerance) expected_value = 0.0 self.CheckValueOfAngularVelocity(angular_velocity, 2, expected_value, tolerance) elif time > 0.31999 and time < 0.32: expected_value = -1.179 self.CheckValueOfVelocity(velocity, 1, expected_value, tolerance) if node.Id == 2: if time > 0.25 and time < 0.3: expected_value = -10.0 * time self.CheckValueOfVelocity(velocity, 0, expected_value, tolerance) if time > 0.59999 and time < 0.6: expected_value = -1.962 self.CheckValueOfVelocity(velocity, 1, expected_value, tolerance) if node.Id == 3: if time < 0.1: expected_value = -5.0 self.CheckValueOfVelocity(velocity, 0, expected_value, tolerance) expected_value = 0.0 self.CheckValueOfVelocity(velocity, 1, expected_value, tolerance) expected_value = 0.0 self.CheckValueOfVelocity(velocity, 2, expected_value, tolerance) expected_value = 0.0 self.CheckValueOfAngularVelocity(angular_velocity, 0, expected_value, tolerance) expected_value = 0.0 self.CheckValueOfAngularVelocity(angular_velocity, 1, expected_value, tolerance) expected_value = -10.0 self.CheckValueOfAngularVelocity(angular_velocity, 2, expected_value, tolerance) if node.Id == 4: if time > 0.22 and time < 0.25: expected_value = 0.2192 self.CheckValueOfAngularVelocity(angular_velocity, 2, expected_value, tolerance) @classmethod def CheckValueOfVelocity(self, velocity, component, expected_value, tolerance): if velocity[component] > expected_value + tolerance or velocity[component] < expected_value - tolerance: raise ValueError('Incorrect value for VELOCITY ' + str(component) + ': expected value was '+ str(expected_value) + ' but received ' + str(velocity)) @classmethod def CheckValueOfAngularVelocity(self, angular_velocity, component, expected_value, tolerance): if angular_velocity[component] > expected_value + tolerance or angular_velocity[component] < expected_value - tolerance: raise ValueError('Incorrect value for ANGULAR_VELOCITY ' + str(component) + ': expected value was '+ str(expected_value) + ' but received ' + str(angular_velocity)) def Finalize(self): super(KinematicConstraintsTestSolution, self).Finalize() class TestKinematicConstraints(KratosUnittest.TestCase): def setUp(self): pass @classmethod def test_KinematicConstraints_1(self): path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "kinematic_constraints_tests_files") parameters_file_name = os.path.join(path, "ProjectParametersDEM.json") model = Kratos.Model() CreateAndRunStageInOneOpenMPThread(KinematicConstraintsTestSolution, model, parameters_file_name) def tearDown(self): file_to_remove = os.path.join("kinematic_constraints_tests_files", "TimesPartialRelease") kratos_utils.DeleteFileIfExisting(GetFilePath(file_to_remove)) os.chdir(this_working_dir_backup) if __name__ == "__main__": Kratos.Logger.GetDefaultOutput().SetSeverity(Logger.Severity.WARNING) KratosUnittest.main()
[]
[]
[ "OMP_NUM_THREADS" ]
[]
["OMP_NUM_THREADS"]
python
1
0
test_edge.py
import argparse import os import random import numpy as np import torch import shapenet_edge as shapenet_dataset import model_edge as models import util_edge as util import ops parser = argparse.ArgumentParser() parser.add_argument('--remove_point_num', type=int, default=512) parser.add_argument('--cal_edge', action='store_true') parser.add_argument('--test_unseen', action='store_true') parser.add_argument('--train_seen', action='store_true') # action='store_true' parser.add_argument('--loss_type', type=str,default='topnet') parser.add_argument('--train_pcn', action='store_true') parser.add_argument('--n_in_points', type=int, default=2048) parser.add_argument('--n_gt_points', type=int, default=2048) parser.add_argument('--n_out_points', type=int, default=2048) parser.add_argument('--eval_path', default='data/shapenetcore_partanno_segmentation_benchmark_v0_test2_edge_200_5.000000.h5') # data/topnet_dataset2019/val_edge.h5 # data/shapenetcore_partanno_segmentation_benchmark_v0_test2_edge_200_5.000000.h5 parser.add_argument('--gpu', type=str,default='1') parser.add_argument('--run_name', default='test') parser.add_argument('--batch_size', type=int, default=32) parser.add_argument('--num_gpus', type=int, default=1) parser.add_argument('--normalize_ratio', type=float,default=0.5) parser.add_argument('--pre_trained_model', default='model_best.pth.tar') parser.add_argument('--grid_size', type=int,default=32) parser.add_argument('--augment', action='store_true') #### set to true if use scale etc... parser.add_argument('--pc_augm_scale', default=1.0, type=float) parser.add_argument('--pc_augm_rot', default=0, type=int,help='Training augmentation: Bool, random rotation around z-axis') parser.add_argument('--pc_augm_mirror_prob', default=0.0, type=float,help='Training augmentation: Probability of mirroring about x or y axes') parser.add_argument('--pc_augm_jitter', default=0, type=int) parser.add_argument('--random_seed', type=int, default=42) args = parser.parse_args() if args.num_gpus==1: os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu chamfer_index=ops.chamferdist_index.chamferdist.ChamferDistance() torch.manual_seed(args.random_seed) np.random.seed(args.random_seed) random.seed(args.random_seed) def apply_net(net, dataloader, args): eval_loss=0 for train_id,batch_data in enumerate(dataloader): inp, target, _, _,_ = batch_data inp = inp.cuda().transpose(2, 1).contiguous() target = target.cuda().transpose(2, 1).contiguous() pred, dens, dens_cls, reg, voxels, pred_edge,reg_edge,dens_cls_edge,dens_edge = net(inp, n_points=args.n_out_points) dist1_fine, dist2_fine, _, _ = chamfer_index(pred.transpose(2, 1).contiguous(), target.transpose(2, 1).contiguous()) if args.loss_type == 'pcn': out_cd = (torch.mean(torch.sqrt(dist2_fine), dim=1) + torch.mean(torch.sqrt(dist1_fine), dim=1)) / 2 else: out_cd = torch.mean(dist2_fine, dim=1) + torch.mean(dist1_fine, dim=1) chamfer_loss = torch.sum(out_cd)#*pred.shape[0] eval_loss+=chamfer_loss.item() return eval_loss / len(dataloader.dataset), pred.detach().cpu() def eval_net(net, dataloader, args): net.eval() with torch.no_grad(): return apply_net(net, dataloader, args) def test(args): if args.train_pcn: val_data = shapenet_dataset.PartDatasetPCN(args, path=args.eval_path, training=False) else: val_data = shapenet_dataset.PartDataset(args, path=args.eval_path, training=False) val_dataloader = torch.utils.data.DataLoader(val_data, num_workers=0, batch_size=args.batch_size, shuffle=False, drop_last=False) net = models.GridAutoEncoderAdaIN(args,rnd_dim=2, adain_layer=3, ops=ops) if args.num_gpus > 1: net = torch.nn.DataParallel(net) # ,device_ids=[0,1] net=net.cuda() net.apply(util.init_weights) if os.path.isfile(os.path.join('runs/{}/{}'.format(args.run_name,args.pre_trained_model))): checkpoint = torch.load(os.path.join('runs/{}/{}'.format(args.run_name,args.pre_trained_model))) net.load_state_dict(checkpoint['state_dict']) print("=> loaded checkpoint '{}'".format(args.pre_trained_model)) args.training = False chamfer_avg, _ = eval_net(net, val_dataloader, args) print(chamfer_avg) if __name__ == '__main__': test(args)
[]
[]
[ "CUDA_VISIBLE_DEVICES" ]
[]
["CUDA_VISIBLE_DEVICES"]
python
1
0
demo4test/scripts/utp_map_job_excuting.py
import os import zipfile import shutil def utp_print(s): print"utp>" + s def command(cmd, visble=True): if visble: utp_print(cmd) ret = os.system(cmd) return ret # return as string list def command2(cmd, visble=True): if visble: utp_print(cmd) r = os.popen(cmd) lines = r.readlines() r.close() return lines def createExtractToFolder(extract_to): extract_to_path = os.path.join(os.getcwd(), extract_to) if not os.path.isdir(extract_to_path): os.mkdir(extract_to_path) # result/ def move_out(src, src_temp, dst): src_folder = src if not os.path.isdir(src): src_folder = src_temp for f in os.listdir(src_folder): src_file = os.path.join(src_folder, f) dst_file = os.path.join(dst, f) if os.path.isfile(src_file): shutil.copy(src_file, dst_file) # dst folder must created. utp_print("copy %s to %s complete." %(src_file, dst)) def main(): apk_path = os.environ.get('debug_apk')#"demo4test-debug.apk" #os.environ.get('debug_apk') apk_pkg = "com.tencent.mig.demo4test.SOSOMap" #os.environ.get('debug_pkg') test_apk_path = os.environ.get('test_apk')#"demo4test-debug-androidTest-unaligned.apk"#os.environ.get('test_apk') test_apk_pkg = "com.tencent.mig.demo4test.SOSOMap.test" #os.environ.get('test_pkg') test_class = os.environ.get('test_class')#"com.tencent.mig.demo4test.testcases.SdkBVTTestSuite" android_device_serial = os.environ.get('ANDROID_SERIAL')#"192.168.206.102:5555"#os.environ.get('ANDROID_SERIAL') target_name = "tencentMapSDKTest" zip_on_phone = "/sdcard/" + target_name + "/" move_to = "output" temp = "temp" target_in_temp_folder = os.path.join(temp, target_name) print "TestCases:\n" + test_class print "Current Apk Path:" + apk_path if android_device_serial == "": print "Android Device Serial is Empty! Something wrong & Test Abort!" else: command('adb -s %s shell ls /sdcard/' % (android_device_serial)) command('adb -s %s shell rm -rf %s' % (android_device_serial,zip_on_phone)) command('adb -s %s uninstall %s' % (android_device_serial,apk_pkg)) command('adb -s %s uninstall %s' % (android_device_serial,test_apk_pkg)) s_sdk = command2('adb -s %s shell getprop ro.build.version.sdk' % (android_device_serial)) sdk = int(s_sdk[0]) if sdk >= 23: command('adb -s %s install -g %s' % (android_device_serial,apk_path)) command('adb -s %s install -g %s' % (android_device_serial,test_apk_path)) utp_print('sdk>=23, sdk=%s' % (sdk)) else: command('adb -s %s install %s' % (android_device_serial,apk_path)) command('adb -s %s install %s' % (android_device_serial,test_apk_path)) utp_print('sdk<23, sdk=%s' % (sdk)) command('adb -s %s shell am instrument -w -r \ -e debug false -e class %s \ %s/android.support.test.runner.AndroidJUnitRunner' % (android_device_serial,test_class,test_apk_pkg)) # must create folder extract_to first, otherwise 'adb: error: cannot create file/directory' on Linux createExtractToFolder(temp) createExtractToFolder(move_to) command('adb -s %s pull %s %s' % (android_device_serial,zip_on_phone,temp)) command('adb -s %s shell ls /sdcard/' % (android_device_serial)) move_out(target_in_temp_folder, temp, move_to) if __name__ == '__main__': main()
[]
[]
[ "test_apk", "test_pkg", "ANDROID_SERIAL", "test_class", "debug_apk", "debug_pkg" ]
[]
["test_apk", "test_pkg", "ANDROID_SERIAL", "test_class", "debug_apk", "debug_pkg"]
python
6
0
vendor/github.com/elastic/go-txfile/internal/mint/qc.go
// Licensed to Elasticsearch B.V. under one or more contributor // license agreements. See the NOTICE file distributed with // this work for additional information regarding copyright // ownership. Elasticsearch B.V. licenses this file to you under // the Apache License, Version 2.0 (the "License"); you may // not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package mint import ( "fmt" "math/rand" "os" "strconv" "testing/quick" "time" "github.com/urso/qcgen" ) // SetDefaultGenerators sets default generators for use with QuickCheck. func (t *T) SetDefaultGenerators(fns ...interface{}) { t.defaultGenerators = fns } // QuickCheck runs a quick/check test. The last function passed specifies the // test to be run. The test function can accept any parameters, but must return // a bool value. The arguments are generated by random. Custom typed generator // function of type `func(*rand.Rand) T` can be passed before the function // under test. A generator with return value T will be used for every argument // of type T. // By default the random number generators seed is based on the current // timestamp. Use the TEST_SEED environment value to configure a static seed value to be used by every test. // The random number generator is not shared between tests. func (t *T) QuickCheck(fns ...interface{}) { L := len(fns) check, generators := fns[L-1], fns[:L-1] if len(t.defaultGenerators) > 0 { if len(generators) > 0 { tmp := make([]interface{}, len(t.defaultGenerators)+len(generators)) n := copy(tmp, t.defaultGenerators) copy(tmp[n:], generators) generators = tmp } else { generators = t.defaultGenerators } } seed := qcSeed() rng := NewRng(seed) t.Log("quick check rng seed: ", seed) t.NoError(quick.Check(check, &quick.Config{ Rand: rng, Values: qcgen.NewGenerator(check, generators...).Gen, })) } func NewRng(seed int64) *rand.Rand { if seed <= 0 { seed = qcSeed() } return rand.New(rand.NewSource(seed)) } func RngSeed() int64 { return qcSeed() } func qcSeed() int64 { v := os.Getenv("TEST_SEED") if v == "" { return time.Now().UnixNano() } i, err := strconv.ParseInt(v, 10, 64) if err != nil { panic(fmt.Errorf("invalid seed '%v': %v", v, err)) } return i }
[ "\"TEST_SEED\"" ]
[]
[ "TEST_SEED" ]
[]
["TEST_SEED"]
go
1
0
rllab/misc/instrument.py
import os import re import subprocess import base64 import os.path as osp import pickle as pickle import inspect import hashlib import sys from contextlib import contextmanager import errno from rllab.core.serializable import Serializable from rllab import config from rllab.misc.console import mkdir_p from rllab.misc import ext from io import StringIO import datetime import dateutil.tz import json import time import numpy as np from rllab.misc.ext import AttrDict from rllab.viskit.core import flatten import collections class StubBase(object): def __getitem__(self, item): return StubMethodCall(self, "__getitem__", args=[item], kwargs=dict()) def __getattr__(self, item): try: return super(self.__class__, self).__getattribute__(item) except AttributeError: if item.startswith("__") and item.endswith("__"): raise return StubAttr(self, item) def __pow__(self, power, modulo=None): return StubMethodCall(self, "__pow__", [power, modulo], dict()) def __call__(self, *args, **kwargs): return StubMethodCall(self.obj, self.attr_name, args, kwargs) def __add__(self, other): return StubMethodCall(self, "__add__", [other], dict()) def __rmul__(self, other): return StubMethodCall(self, "__rmul__", [other], dict()) def __div__(self, other): return StubMethodCall(self, "__div__", [other], dict()) def __rdiv__(self, other): return StubMethodCall(BinaryOp(), "rdiv", [self, other], dict()) # self, "__rdiv__", [other], dict()) def __rpow__(self, power, modulo=None): return StubMethodCall(self, "__rpow__", [power, modulo], dict()) class BinaryOp(Serializable): def __init__(self): Serializable.quick_init(self, locals()) def rdiv(self, a, b): return b / a # def __init__(self, opname, a, b): # self.opname = opname # self.a = a # self.b = b class StubAttr(StubBase): def __init__(self, obj, attr_name): self.__dict__["_obj"] = obj self.__dict__["_attr_name"] = attr_name @property def obj(self): return self.__dict__["_obj"] @property def attr_name(self): return self.__dict__["_attr_name"] def __str__(self): return "StubAttr(%s, %s)" % (str(self.obj), str(self.attr_name)) class StubMethodCall(StubBase, Serializable): def __init__(self, obj, method_name, args, kwargs): self._serializable_initialized = False Serializable.quick_init(self, locals()) self.obj = obj self.method_name = method_name self.args = args self.kwargs = kwargs def __str__(self): return "StubMethodCall(%s, %s, %s, %s)" % ( str(self.obj), str(self.method_name), str(self.args), str(self.kwargs)) class StubClass(StubBase): def __init__(self, proxy_class): self.proxy_class = proxy_class def __call__(self, *args, **kwargs): if len(args) > 0: # Convert the positional arguments to keyword arguments spec = inspect.getargspec(self.proxy_class.__init__) kwargs = dict(list(zip(spec.args[1:], args)), **kwargs) args = tuple() return StubObject(self.proxy_class, *args, **kwargs) def __getstate__(self): return dict(proxy_class=self.proxy_class) def __setstate__(self, dict): self.proxy_class = dict["proxy_class"] def __getattr__(self, item): if hasattr(self.proxy_class, item): return StubAttr(self, item) raise AttributeError def __str__(self): return "StubClass(%s)" % self.proxy_class class StubObject(StubBase): def __init__(self, __proxy_class, *args, **kwargs): if len(args) > 0: spec = inspect.getargspec(__proxy_class.__init__) kwargs = dict(list(zip(spec.args[1:], args)), **kwargs) args = tuple() self.proxy_class = __proxy_class self.args = args self.kwargs = kwargs def __getstate__(self): return dict(args=self.args, kwargs=self.kwargs, proxy_class=self.proxy_class) def __setstate__(self, dict): self.args = dict["args"] self.kwargs = dict["kwargs"] self.proxy_class = dict["proxy_class"] def __getattr__(self, item): # why doesnt the commented code work? # return StubAttr(self, item) # checks bypassed to allow for accesing instance fileds if hasattr(self.proxy_class, item): return StubAttr(self, item) raise AttributeError('Cannot get attribute %s from %s' % (item, self.proxy_class)) def __str__(self): return "StubObject(%s, *%s, **%s)" % (str(self.proxy_class), str(self.args), str(self.kwargs)) class VariantDict(AttrDict): def __init__(self, d, hidden_keys): super(VariantDict, self).__init__(d) self._hidden_keys = hidden_keys def dump(self): return {k: v for k, v in self.items() if k not in self._hidden_keys} class VariantGenerator(object): """ Usage: vg = VariantGenerator() vg.add("param1", [1, 2, 3]) vg.add("param2", ['x', 'y']) vg.variants() => # all combinations of [1,2,3] x ['x','y'] Supports noncyclic dependency among parameters: vg = VariantGenerator() vg.add("param1", [1, 2, 3]) vg.add("param2", lambda param1: [param1+1, param1+2]) vg.variants() => # .. """ def __init__(self): self._variants = [] self._populate_variants() self._hidden_keys = [] for k, vs, cfg in self._variants: if cfg.get("hide", False): self._hidden_keys.append(k) def add(self, key, vals, **kwargs): self._variants.append((key, vals, kwargs)) def _populate_variants(self): methods = inspect.getmembers( self.__class__, predicate=lambda x: inspect.isfunction(x) or inspect.ismethod(x)) methods = [x[1].__get__(self, self.__class__) for x in methods if getattr(x[1], '__is_variant', False)] for m in methods: self.add(m.__name__, m, **getattr(m, "__variant_config", dict())) def variants(self, randomized=False): ret = list(self.ivariants()) if randomized: np.random.shuffle(ret) return list(map(self.variant_dict, ret)) def variant_dict(self, variant): return VariantDict(variant, self._hidden_keys) def to_name_suffix(self, variant): suffix = [] for k, vs, cfg in self._variants: if not cfg.get("hide", False): suffix.append(k + "_" + str(variant[k])) return "_".join(suffix) def ivariants(self): dependencies = list() for key, vals, _ in self._variants: if hasattr(vals, "__call__"): args = inspect.getargspec(vals).args if hasattr(vals, 'im_self') or hasattr(vals, "__self__"): # remove the first 'self' parameter args = args[1:] dependencies.append((key, set(args))) else: dependencies.append((key, set())) sorted_keys = [] # topo sort all nodes while len(sorted_keys) < len(self._variants): # get all nodes with zero in-degree free_nodes = [k for k, v in dependencies if len(v) == 0] if len(free_nodes) == 0: error_msg = "Invalid parameter dependency: \n" for k, v in dependencies: if len(v) > 0: error_msg += k + " depends on " + " & ".join(v) + "\n" raise ValueError(error_msg) dependencies = [(k, v) for k, v in dependencies if k not in free_nodes] # remove the free nodes from the remaining dependencies for _, v in dependencies: v.difference_update(free_nodes) sorted_keys += free_nodes return self._ivariants_sorted(sorted_keys) def _ivariants_sorted(self, sorted_keys): if len(sorted_keys) == 0: yield dict() else: first_keys = sorted_keys[:-1] first_variants = self._ivariants_sorted(first_keys) last_key = sorted_keys[-1] last_vals = [v for k, v, _ in self._variants if k == last_key][0] if hasattr(last_vals, "__call__"): last_val_keys = inspect.getargspec(last_vals).args if hasattr(last_vals, 'im_self') or hasattr(last_vals, '__self__'): last_val_keys = last_val_keys[1:] else: last_val_keys = None for variant in first_variants: if hasattr(last_vals, "__call__"): last_variants = last_vals( **{k: variant[k] for k in last_val_keys}) for last_choice in last_variants: yield AttrDict(variant, **{last_key: last_choice}) else: for last_choice in last_vals: yield AttrDict(variant, **{last_key: last_choice}) def variant(*args, **kwargs): def _variant(fn): fn.__is_variant = True fn.__variant_config = kwargs return fn if len(args) == 1 and isinstance(args[0], collections.Callable): return _variant(args[0]) return _variant def stub(glbs): # replace the __init__ method in all classes # hacky!!! for k, v in list(glbs.items()): # look at all variables that are instances of a class (not yet Stub) if isinstance(v, type) and v != StubClass: glbs[k] = StubClass(v) # and replaces them by a the same but Stub def query_yes_no(question, default="yes"): """Ask a yes/no question via raw_input() and return their answer. "question" is a string that is presented to the user. "default" is the presumed answer if the user just hits <Enter>. It must be "yes" (the default), "no" or None (meaning an answer is required of the user). The "answer" return value is True for "yes" or False for "no". """ valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False} if default is None: prompt = " [y/n] " elif default == "yes": prompt = " [Y/n] " elif default == "no": prompt = " [y/N] " else: raise ValueError("invalid default answer: '%s'" % default) while True: sys.stdout.write(question + prompt) choice = input().lower() if default is not None and choice == '': return valid[default] elif choice in valid: return valid[choice] else: sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n") exp_count = 0 now = datetime.datetime.now(dateutil.tz.tzlocal()) timestamp = now.strftime('%Y_%m_%d_%H_%M_%S') remote_confirmed = False def run_experiment_lite( stub_method_call=None, batch_tasks=None, exp_prefix="experiment", exp_name=None, log_dir=None, script="scripts/run_experiment_lite.py", python_command="python", mode="local", dry=False, docker_image=None, aws_config=None, env=None, variant=None, use_gpu=False, sync_s3_pkl=False, sync_s3_png=False, sync_s3_log=False, sync_log_on_termination=True, confirm_remote=True, terminate_machine=True, periodic_sync=True, periodic_sync_interval=15, sync_all_data_node_to_s3=True, use_cloudpickle=None, pre_commands=None, added_project_directories=[], **kwargs): """ Serialize the stubbed method call and run the experiment using the specified mode. :param stub_method_call: A stubbed method call. :param script: The name of the entrance point python script :param mode: Where & how to run the experiment. Should be one of "local", "local_docker", "ec2", and "lab_kube". :param dry: Whether to do a dry-run, which only prints the commands without executing them. :param exp_prefix: Name prefix for the experiments :param docker_image: name of the docker image. Ignored if using local mode. :param aws_config: configuration for AWS. Only used under EC2 mode :param env: extra environment variables :param kwargs: All other parameters will be passed directly to the entrance python script. :param variant: If provided, should be a dictionary of parameters :param use_gpu: Whether the launched task is running on GPU. This triggers a few configuration changes including certain environment flags :param sync_s3_pkl: Whether to sync pkl files during execution of the experiment (they will always be synced at the end of the experiment) :param sync_s3_png: Whether to sync png files during execution of the experiment (they will always be synced at the end of the experiment) :param sync_s3_log: Whether to sync log files during execution of the experiment (they will always be synced at the end of the experiment) :param confirm_remote: Whether to confirm before launching experiments remotely :param terminate_machine: Whether to terminate machine after experiment finishes. Only used when using mode="ec2". This is useful when one wants to debug after an experiment finishes abnormally. :param periodic_sync: Whether to synchronize certain experiment files periodically during execution. :param periodic_sync_interval: Time interval between each periodic sync, in seconds. """ assert stub_method_call is not None or batch_tasks is not None, "Must provide at least either stub_method_call or batch_tasks" if use_cloudpickle is None: for maybe_stub in (batch_tasks or [stub_method_call]): # decide mode if isinstance(maybe_stub, StubBase): use_cloudpickle = False else: assert hasattr(maybe_stub, '__call__') use_cloudpickle = True # ensure variant exists if variant is None: variant = dict() if batch_tasks is None: batch_tasks = [ dict( kwargs, pre_commands=pre_commands, stub_method_call=stub_method_call, exp_name=exp_name, log_dir=log_dir, env=env, variant=variant, use_cloudpickle=use_cloudpickle ) ] global exp_count global remote_confirmed config.USE_GPU = use_gpu # params_list = [] for task in batch_tasks: call = task.pop("stub_method_call") if use_cloudpickle: import cloudpickle data = base64.b64encode(cloudpickle.dumps(call)).decode("utf-8") else: data = base64.b64encode(pickle.dumps(call)).decode("utf-8") task["args_data"] = data exp_count += 1 params = dict(kwargs) if task.get("exp_name", None) is None: task["exp_name"] = "%s_%s_%04d" % ( exp_prefix, timestamp, exp_count) if task.get("log_dir", None) is None: task["log_dir"] = config.LOG_DIR + "/local/" + \ exp_prefix.replace("_", "-") + "/" + task["exp_name"] if task.get("variant", None) is not None: variant = task.pop("variant") if "exp_name" not in variant: variant["exp_name"] = task["exp_name"] task["variant_data"] = base64.b64encode(pickle.dumps(variant)).decode("utf-8") elif "variant" in task: del task["variant"] task["remote_log_dir"] = osp.join( config.AWS_S3_PATH, exp_prefix.replace("_", "-"), task["exp_name"]) task["env"] = task.get("env", dict()) or dict() task["env"]["RLLAB_USE_GPU"] = str(use_gpu) if mode not in ["local", "local_docker"] and not remote_confirmed and not dry and confirm_remote: remote_confirmed = query_yes_no( "Running in (non-dry) mode %s. Confirm?" % mode) if not remote_confirmed: sys.exit(1) if hasattr(mode, "__call__"): if docker_image is None: docker_image = config.DOCKER_IMAGE mode( task, docker_image=docker_image, use_gpu=use_gpu, exp_prefix=exp_prefix, script=script, python_command=python_command, sync_s3_pkl=sync_s3_pkl, sync_log_on_termination=sync_log_on_termination, periodic_sync=periodic_sync, periodic_sync_interval=periodic_sync_interval, sync_all_data_node_to_s3=sync_all_data_node_to_s3, ) elif mode == "local": for task in batch_tasks: del task["remote_log_dir"] env = task.pop("env", None) command = to_local_command( task, python_command=python_command, script=osp.join(config.PROJECT_PATH, script), use_gpu=use_gpu ) print(command) if dry: return try: if env is None: env = dict() subprocess.call( command, shell=True, env=dict(os.environ, **env)) except Exception as e: print(e) if isinstance(e, KeyboardInterrupt): raise elif mode == "local_docker": if docker_image is None: docker_image = config.DOCKER_IMAGE for task in batch_tasks: del task["remote_log_dir"] env = task.pop("env", None) command = to_docker_command( task, # these are the params. Pre and Post command can be here docker_image=docker_image, script=script, env=env, use_gpu=use_gpu, use_tty=True, python_command=python_command, ) print(command) if dry: return p = subprocess.Popen(command, shell=True) try: p.wait() except KeyboardInterrupt: try: print("terminating") p.terminate() except OSError: print("os error!") pass p.wait() elif mode == "ec2": if docker_image is None: docker_image = config.DOCKER_IMAGE s3_code_path = s3_sync_code(config, dry=dry, added_project_directories=added_project_directories) launch_ec2(batch_tasks, exp_prefix=exp_prefix, docker_image=docker_image, python_command=python_command, script=script, aws_config=aws_config, dry=dry, terminate_machine=terminate_machine, use_gpu=use_gpu, code_full_path=s3_code_path, sync_s3_pkl=sync_s3_pkl, sync_s3_png=sync_s3_png, sync_s3_log=sync_s3_log, sync_log_on_termination=sync_log_on_termination, periodic_sync=periodic_sync, periodic_sync_interval=periodic_sync_interval) elif mode == "lab_kube": # assert env is None # first send code folder to s3 s3_code_path = s3_sync_code(config, dry=dry) if docker_image is None: docker_image = config.DOCKER_IMAGE for task in batch_tasks: # if 'env' in task: # assert task.pop('env') is None # TODO: dangerous when there are multiple tasks? task["resources"] = params.pop( "resources", config.KUBE_DEFAULT_RESOURCES) task["node_selector"] = params.pop( "node_selector", config.KUBE_DEFAULT_NODE_SELECTOR) task["exp_prefix"] = exp_prefix pod_dict = to_lab_kube_pod( task, code_full_path=s3_code_path, docker_image=docker_image, script=script, is_gpu=use_gpu, python_command=python_command, sync_s3_pkl=sync_s3_pkl, periodic_sync=periodic_sync, periodic_sync_interval=periodic_sync_interval, sync_all_data_node_to_s3=sync_all_data_node_to_s3, terminate_machine=terminate_machine, ) pod_str = json.dumps(pod_dict, indent=1) if dry: print(pod_str) dir = "{pod_dir}/{exp_prefix}".format( pod_dir=config.POD_DIR, exp_prefix=exp_prefix) ensure_dir(dir) fname = "{dir}/{exp_name}.json".format( dir=dir, exp_name=task["exp_name"] ) with open(fname, "w") as fh: fh.write(pod_str) kubecmd = "kubectl create -f %s" % fname print(kubecmd) if dry: return retry_count = 0 wait_interval = 1 while retry_count <= 5: try: return_code = subprocess.call(kubecmd, shell=True) if return_code == 0: break retry_count += 1 print("trying again...") time.sleep(wait_interval) except Exception as e: if isinstance(e, KeyboardInterrupt): raise print(e) else: raise NotImplementedError _find_unsafe = re.compile(r'[a-zA-Z0-9_^@%+=:,./-]').search def ensure_dir(dirname): """ Ensure that a named directory exists; if it does not, attempt to create it. """ try: os.makedirs(dirname) except OSError as e: if e.errno != errno.EEXIST: raise def _shellquote(s): """Return a shell-escaped version of the string *s*.""" if not s: return "''" if _find_unsafe(s) is None: return s # use single quotes, and put single quotes into double quotes # the string $'b is then quoted as '$'"'"'b' return "'" + s.replace("'", "'\"'\"'") + "'" def _to_param_val(v): if v is None: return "" elif isinstance(v, list): return " ".join(map(_shellquote, list(map(str, v)))) else: return _shellquote(str(v)) def to_local_command(params, python_command="python", script=osp.join(config.PROJECT_PATH, 'scripts/run_experiment.py'), use_gpu=False): command = python_command + " " + script if use_gpu and not config.USE_TF: command = "THEANO_FLAGS='device=gpu,dnn.enabled=auto,floatX=float32' " + command for k, v in config.ENV.items(): command = ("%s=%s " % (k, v)) + command pre_commands = params.pop("pre_commands", None) post_commands = params.pop("post_commands", None) if pre_commands is not None or post_commands is not None: print("Not executing the pre_commands: ", pre_commands, ", nor post_commands: ", post_commands) for k, v in params.items(): if isinstance(v, dict): for nk, nv in v.items(): if str(nk) == "_name": command += " --%s %s" % (k, _to_param_val(nv)) else: command += \ " --%s_%s %s" % (k, nk, _to_param_val(nv)) else: command += " --%s %s" % (k, _to_param_val(v)) return command def to_docker_command(params, docker_image, python_command="python", script='scripts/run_experiment_lite.py', pre_commands=None, use_tty=False, mujoco_path=None, post_commands=None, dry=False, use_gpu=False, env=None, local_code_dir=None): """ :param params: The parameters for the experiment. If logging directory parameters are provided, we will create docker volume mapping to make sure that the logging files are created at the correct locations :param docker_image: docker image to run the command on :param script: script command for running experiment :return: """ log_dir = params.get("log_dir") docker_args = params.pop("docker_args", "") if pre_commands is None: pre_commands = params.pop("pre_commands", None) if post_commands is None: post_commands = params.pop("post_commands", None) if mujoco_path is None: mujoco_path = config.MUJOCO_KEY_PATH # script = 'rllab/' + script # if not dry: # create volume for logging directory if use_gpu: command_prefix = "nvidia-docker run" else: command_prefix = "docker run" docker_log_dir = config.DOCKER_LOG_DIR if env is None: env = dict() env = dict( env, AWS_ACCESS_KEY_ID=config.AWS_ACCESS_KEY, AWS_SECRET_ACCESS_KEY=config.AWS_ACCESS_SECRET, ) if env is not None: for k, v in env.items(): command_prefix += " -e \"{k}={v}\"".format(k=k, v=v) command_prefix += " -v {local_mujoco_key_dir}:{docker_mujoco_key_dir}".format( local_mujoco_key_dir=mujoco_path, docker_mujoco_key_dir='/root/.mujoco') command_prefix += " -v {local_log_dir}:{docker_log_dir}".format( local_log_dir=log_dir, docker_log_dir=docker_log_dir ) command_prefix += docker_args if local_code_dir is None: local_code_dir = config.PROJECT_PATH command_prefix += " -v {local_code_dir}:{docker_code_dir}".format( local_code_dir=local_code_dir, docker_code_dir=config.DOCKER_CODE_DIR ) params = dict(params, log_dir=docker_log_dir) if use_tty: command_prefix += " -ti " + docker_image + " /bin/bash -c " else: command_prefix += " -i " + docker_image + " /bin/bash -c " command_list = list() if pre_commands is not None: command_list.extend(pre_commands) command_list.append("echo \"Running in docker\"") command_list.append(to_local_command( params, python_command=python_command, script=osp.join(config.DOCKER_CODE_DIR, script), use_gpu=use_gpu)) # We for 2 min sleep after termination to allow for last syncs. if post_commands is None: post_commands = ['sleep 120'] command_list.extend(post_commands) return command_prefix + "'" + "; ".join(command_list) + "'" def dedent(s): lines = [l.strip() for l in s.split('\n')] return '\n'.join(lines) def launch_ec2(params_list, exp_prefix, docker_image, code_full_path, python_command="python", script='scripts/run_experiment.py', aws_config=None, dry=False, terminate_machine=True, use_gpu=False, sync_s3_pkl=False, sync_s3_png=False, sync_s3_log=False, sync_log_on_termination=True, periodic_sync=True, periodic_sync_interval=15): if len(params_list) == 0: return default_config = dict( image_id=config.AWS_IMAGE_ID, instance_type=config.AWS_INSTANCE_TYPE, key_name=config.AWS_KEY_NAME, spot=config.AWS_SPOT, spot_price=config.AWS_SPOT_PRICE, iam_instance_profile_name=config.AWS_IAM_INSTANCE_PROFILE_NAME, security_groups=config.AWS_SECURITY_GROUPS, security_group_ids=config.AWS_SECURITY_GROUP_IDS, network_interfaces=config.AWS_NETWORK_INTERFACES, ) if aws_config is None: aws_config = dict() aws_config = dict(default_config, **aws_config) sio = StringIO() sio.write("#!/bin/bash\n") sio.write("{\n") sio.write(""" die() { status=$1; shift; echo "FATAL: $*"; exit $status; } """) sio.write(""" EC2_INSTANCE_ID="`wget -q -O - http://169.254.169.254/latest/meta-data/instance-id`" """) sio.write(""" aws ec2 create-tags --resources $EC2_INSTANCE_ID --tags Key=Name,Value={exp_name} --region {aws_region} """.format(exp_name=params_list[0].get("exp_name"), aws_region=config.AWS_REGION_NAME)) if config.LABEL: sio.write(""" aws ec2 create-tags --resources $EC2_INSTANCE_ID --tags Key=owner,Value={label} --region {aws_region} """.format(label=config.LABEL, aws_region=config.AWS_REGION_NAME)) sio.write(""" aws ec2 create-tags --resources $EC2_INSTANCE_ID --tags Key=exp_prefix,Value={exp_prefix} --region {aws_region} """.format(exp_prefix=exp_prefix, aws_region=config.AWS_REGION_NAME)) sio.write(""" service docker start """) sio.write(""" docker --config /home/ubuntu/.docker pull {docker_image} """.format(docker_image=docker_image)) sio.write(""" export AWS_DEFAULT_REGION={aws_region} """.format(aws_region=config.AWS_REGION_NAME)) if config.FAST_CODE_SYNC: # sio.write(""" # aws s3 cp {code_full_path} /tmp/rllab_code.tar.gz --region {aws_region} # """.format(code_full_path=code_full_path, local_code_path=config.DOCKER_CODE_DIR, # aws_region=config.AWS_REGION_NAME)) sio.write(""" aws s3 cp {code_full_path} /tmp/rllab_code.tar.gz """.format(code_full_path=code_full_path, local_code_path=config.DOCKER_CODE_DIR)) sio.write(""" mkdir -p {local_code_path} """.format(code_full_path=code_full_path, local_code_path=config.DOCKER_CODE_DIR, aws_region=config.AWS_REGION_NAME)) sio.write(""" tar -zxvf /tmp/rllab_code.tar.gz -C {local_code_path} """.format(code_full_path=code_full_path, local_code_path=config.DOCKER_CODE_DIR, aws_region=config.AWS_REGION_NAME)) else: # sio.write(""" # aws s3 cp --recursive {code_full_path} {local_code_path} --region {aws_region} # """.format(code_full_path=code_full_path, local_code_path=config.DOCKER_CODE_DIR, # aws_region=config.AWS_REGION_NAME)) sio.write(""" aws s3 cp --recursive {code_full_path} {local_code_path} """.format(code_full_path=code_full_path, local_code_path=config.DOCKER_CODE_DIR)) s3_mujoco_key_path = config.AWS_CODE_SYNC_S3_PATH + '/.mujoco/' # sio.write(""" # aws s3 cp --recursive {} {} --region {} # """.format(s3_mujoco_key_path, config.MUJOCO_KEY_PATH, config.AWS_REGION_NAME)) sio.write(""" aws s3 cp --recursive {} {} """.format(s3_mujoco_key_path, config.MUJOCO_KEY_PATH)) sio.write(""" cd {local_code_path} """.format(local_code_path=config.DOCKER_CODE_DIR)) for params in params_list: log_dir = params.get("log_dir") remote_log_dir = params.pop("remote_log_dir") env = params.pop("env", None) sio.write(""" aws ec2 create-tags --resources $EC2_INSTANCE_ID --tags Key=Name,Value={exp_name} --region {aws_region} """.format(exp_name=params.get("exp_name"), aws_region=config.AWS_REGION_NAME)) sio.write(""" mkdir -p {log_dir} """.format(log_dir=log_dir)) if periodic_sync: include_png = " --include '*.png' " if sync_s3_png else " " include_pkl = " --include '*.pkl' " if sync_s3_pkl else " " include_log = " --include '*.log' " if sync_s3_log else " " # sio.write(""" # while /bin/true; do # aws s3 sync --exclude '*' {include_png} {include_pkl} {include_log}--include '*.csv' --include '*.json' {log_dir} {remote_log_dir} --region {aws_region} # sleep {periodic_sync_interval} # done & echo sync initiated""".format(include_png=include_png, include_pkl=include_pkl, include_log=include_log, # log_dir=log_dir, remote_log_dir=remote_log_dir, # aws_region=config.AWS_REGION_NAME, # periodic_sync_interval=periodic_sync_interval)) sio.write(""" while /bin/true; do aws s3 sync --exclude '*' {include_png} {include_pkl} {include_log}--include '*.csv' --include '*.json' {log_dir} {remote_log_dir} sleep {periodic_sync_interval} done & echo sync initiated""".format(include_png=include_png, include_pkl=include_pkl, include_log=include_log, log_dir=log_dir, remote_log_dir=remote_log_dir, periodic_sync_interval=periodic_sync_interval)) if sync_log_on_termination: # sio.write(""" # while /bin/true; do # if [ -z $(curl -Is http://169.254.169.254/latest/meta-data/spot/termination-time | head -1 | grep 404 | cut -d \ -f 2) ] # then # logger "Running shutdown hook." # aws s3 cp /home/ubuntu/user_data.log {remote_log_dir}/stdout.log --region {aws_region} # aws s3 cp --recursive {log_dir} {remote_log_dir} --region {aws_region} # break # else # # Spot instance not yet marked for termination. # sleep 5 # fi # done & echo log sync initiated # """.format(log_dir=log_dir, remote_log_dir=remote_log_dir, aws_region=config.AWS_REGION_NAME)) sio.write(""" while /bin/true; do if [ -z $(curl -Is http://169.254.169.254/latest/meta-data/spot/termination-time | head -1 | grep 404 | cut -d \ -f 2) ] then logger "Running shutdown hook." aws s3 cp /home/ubuntu/user_data.log {remote_log_dir}/stdout.log aws s3 cp --recursive {log_dir} {remote_log_dir} break else # Spot instance not yet marked for termination. sleep 5 fi done & echo log sync initiated """.format(log_dir=log_dir, remote_log_dir=remote_log_dir)) if use_gpu: sio.write(""" for i in {1..800}; do su -c "nvidia-modprobe -u -c=0" ubuntu && break || sleep 3; done systemctl start nvidia-docker """) sio.write(""" {command} """.format(command=to_docker_command(params, docker_image, python_command=python_command, script=script, use_gpu=use_gpu, env=env, local_code_dir=config.DOCKER_CODE_DIR))) # sio.write(""" # aws s3 cp --recursive {log_dir} {remote_log_dir} --region {aws_region} # """.format(log_dir=log_dir, remote_log_dir=remote_log_dir, aws_region=config.AWS_REGION_NAME)) sio.write(""" aws s3 cp --recursive {log_dir} {remote_log_dir} """.format(log_dir=log_dir, remote_log_dir=remote_log_dir)) # sio.write(""" # aws s3 cp /home/ubuntu/user_data.log {remote_log_dir}/stdout.log --region {aws_region} # """.format(remote_log_dir=remote_log_dir, aws_region=config.AWS_REGION_NAME)) sio.write(""" aws s3 cp /home/ubuntu/user_data.log {remote_log_dir}/stdout.log """.format(remote_log_dir=remote_log_dir)) if terminate_machine: sio.write(""" EC2_INSTANCE_ID="`wget -q -O - http://169.254.169.254/latest/meta-data/instance-id || die \"wget instance-id has failed: $?\"`" aws ec2 terminate-instances --instance-ids $EC2_INSTANCE_ID --region {aws_region} """.format(aws_region=config.AWS_REGION_NAME)) sio.write("} >> /home/ubuntu/user_data.log 2>&1\n") full_script = dedent(sio.getvalue()) import boto3 import botocore if aws_config["spot"]: ec2 = boto3.client( "ec2", region_name=config.AWS_REGION_NAME, aws_access_key_id=config.AWS_ACCESS_KEY, aws_secret_access_key=config.AWS_ACCESS_SECRET, ) else: ec2 = boto3.resource( "ec2", region_name=config.AWS_REGION_NAME, aws_access_key_id=config.AWS_ACCESS_KEY, aws_secret_access_key=config.AWS_ACCESS_SECRET, ) if len(full_script) > 10000 or len(base64.b64encode(full_script.encode()).decode("utf-8")) > 10000: # Script too long; need to upload script to s3 first. # We're being conservative here since the actual limit is 16384 bytes s3_path = upload_file_to_s3(full_script) sio = StringIO() sio.write("#!/bin/bash\n") sio.write(""" aws s3 cp {s3_path} /home/ubuntu/remote_script.sh --region {aws_region} && \\ chmod +x /home/ubuntu/remote_script.sh && \\ bash /home/ubuntu/remote_script.sh """.format(s3_path=s3_path, aws_region=config.AWS_REGION_NAME)) user_data = dedent(sio.getvalue()) else: user_data = full_script print(full_script) with open("/tmp/full_script", "w") as f: f.write(full_script) instance_args = dict( ImageId=aws_config["image_id"], KeyName=aws_config["key_name"], UserData=user_data, InstanceType=aws_config["instance_type"], EbsOptimized=config.EBS_OPTIMIZED, SecurityGroups=aws_config["security_groups"], SecurityGroupIds=aws_config["security_group_ids"], NetworkInterfaces=aws_config["network_interfaces"], IamInstanceProfile=dict( Name=aws_config["iam_instance_profile_name"], ), **config.AWS_EXTRA_CONFIGS, ) if len(instance_args["NetworkInterfaces"]) > 0: # disable_security_group = query_yes_no( # "Cannot provide both network interfaces and security groups info. Do you want to disable security group settings?", # default="yes", # ) disable_security_group = True if disable_security_group: instance_args.pop("SecurityGroups") instance_args.pop("SecurityGroupIds") if aws_config.get("placement", None) is not None: instance_args["Placement"] = aws_config["placement"] if not aws_config["spot"]: instance_args["MinCount"] = 1 instance_args["MaxCount"] = 1 print("************************************************************") print(instance_args["UserData"]) print("************************************************************") if aws_config["spot"]: instance_args["UserData"] = base64.b64encode(instance_args["UserData"].encode()).decode("utf-8") spot_args = dict( DryRun=dry, InstanceCount=1, LaunchSpecification=instance_args, SpotPrice=aws_config["spot_price"], # ClientToken=params_list[0]["exp_name"], ) import pprint pprint.pprint(spot_args) if not dry: response = ec2.request_spot_instances(**spot_args) print(response) spot_request_id = response['SpotInstanceRequests'][ 0]['SpotInstanceRequestId'] for _ in range(10): try: ec2.create_tags( Resources=[spot_request_id], Tags=[ {'Key': 'Name', 'Value': params_list[0]["exp_name"]} ], ) break except botocore.exceptions.ClientError: continue else: import pprint pprint.pprint(instance_args) ec2.create_instances( DryRun=dry, **instance_args ) S3_CODE_PATH = None def s3_sync_code(config, dry=False, added_project_directories=[]): global S3_CODE_PATH if S3_CODE_PATH is not None: return S3_CODE_PATH base = config.AWS_CODE_SYNC_S3_PATH has_git = True if config.FAST_CODE_SYNC: try: current_commit = subprocess.check_output( ["git", "rev-parse", "HEAD"]).strip().decode("utf-8") except subprocess.CalledProcessError as _: print("Warning: failed to execute git commands") current_commit = None file_name = str(timestamp) + "_" + hashlib.sha224( subprocess.check_output(["pwd"]) + str(current_commit).encode() + str(timestamp).encode() ).hexdigest() + ".tar.gz" file_path = "/tmp/" + file_name tar_cmd = ["tar", "-zcvf", file_path, "-C", config.PROJECT_PATH] for pattern in config.FAST_CODE_SYNC_IGNORES: tar_cmd += ["--exclude", pattern] tar_cmd += ["-h", "."] for path in added_project_directories: tar_cmd.append("-C") tar_cmd.append(path) tar_cmd += ["."] remote_path = "%s/%s" % (base, file_name) upload_cmd = ["aws", "s3", "cp", file_path, remote_path] mujoco_key_cmd = [ "aws", "s3", "sync", config.MUJOCO_KEY_PATH, "{}/.mujoco/".format(base)] print(" ".join(tar_cmd)) print(" ".join(upload_cmd)) print(" ".join(mujoco_key_cmd)) if not dry: subprocess.check_call(tar_cmd) subprocess.check_call(upload_cmd) try: subprocess.check_call(mujoco_key_cmd) except Exception as e: print(e) S3_CODE_PATH = remote_path return remote_path else: try: current_commit = subprocess.check_output( ["git", "rev-parse", "HEAD"]).strip().decode("utf-8") clean_state = len( subprocess.check_output(["git", "status", "--porcelain"])) == 0 except subprocess.CalledProcessError as _: print("Warning: failed to execute git commands") has_git = False dir_hash = base64.b64encode(subprocess.check_output(["pwd"])).decode("utf-8") code_path = "%s_%s" % ( dir_hash, (current_commit if clean_state else "%s_dirty_%s" % (current_commit, timestamp)) if has_git else timestamp ) full_path = "%s/%s" % (base, code_path) cache_path = "%s/%s" % (base, dir_hash) cache_cmds = ["aws", "s3", "cp", "--recursive"] + \ flatten(["--exclude", "%s" % pattern] for pattern in config.CODE_SYNC_IGNORES) + \ [cache_path, full_path] cmds = ["aws", "s3", "cp", "--recursive"] + \ flatten(["--exclude", "%s" % pattern] for pattern in config.CODE_SYNC_IGNORES) + \ [".", full_path] caching_cmds = ["aws", "s3", "cp", "--recursive"] + \ flatten(["--exclude", "%s" % pattern] for pattern in config.CODE_SYNC_IGNORES) + \ [full_path, cache_path] mujoco_key_cmd = [ "aws", "s3", "sync", config.MUJOCO_KEY_PATH, "{}/.mujoco/".format(base)] print(cache_cmds, cmds, caching_cmds, mujoco_key_cmd) if not dry: subprocess.check_call(cache_cmds) subprocess.check_call(cmds) subprocess.check_call(caching_cmds) try: subprocess.check_call(mujoco_key_cmd) except Exception: print('Unable to sync mujoco keys!') S3_CODE_PATH = full_path return full_path def upload_file_to_s3(script_content): import tempfile import uuid f = tempfile.NamedTemporaryFile(delete=False) f.write(script_content.encode()) f.close() remote_path = os.path.join( config.AWS_CODE_SYNC_S3_PATH, "oversize_bash_scripts", str(uuid.uuid4())) subprocess.check_call(["aws", "s3", "cp", f.name, remote_path]) os.unlink(f.name) return remote_path def to_lab_kube_pod( params, docker_image, code_full_path, python_command="python", script='scripts/run_experiment.py', is_gpu=False, sync_s3_pkl=False, periodic_sync=True, periodic_sync_interval=15, sync_all_data_node_to_s3=False, terminate_machine=True ): """ :param params: The parameters for the experiment. If logging directory parameters are provided, we will create docker volume mapping to make sure that the logging files are created at the correct locations :param docker_image: docker image to run the command on :param script: script command for running experiment :return: """ log_dir = params.get("log_dir") remote_log_dir = params.pop("remote_log_dir") resources = params.pop("resources") node_selector = params.pop("node_selector") exp_prefix = params.pop("exp_prefix") kube_env = [ {"name": k, "value": v} for k, v in (params.pop("env", None) or dict()).items() ] mkdir_p(log_dir) pre_commands = list() pre_commands.append('mkdir -p ~/.aws') pre_commands.append('mkdir ~/.mujoco') # fetch credentials from the kubernetes secret file pre_commands.append('echo "[default]" >> ~/.aws/credentials') pre_commands.append( "echo \"aws_access_key_id = %s\" >> ~/.aws/credentials" % config.AWS_ACCESS_KEY) pre_commands.append( "echo \"aws_secret_access_key = %s\" >> ~/.aws/credentials" % config.AWS_ACCESS_SECRET) s3_mujoco_key_path = config.AWS_CODE_SYNC_S3_PATH + '/.mujoco/' pre_commands.append( 'aws s3 cp --recursive {} {}'.format(s3_mujoco_key_path, '~/.mujoco')) if config.FAST_CODE_SYNC: pre_commands.append('aws s3 cp %s /tmp/rllab_code.tar.gz' % code_full_path) pre_commands.append('mkdir -p %s' % config.DOCKER_CODE_DIR) pre_commands.append('tar -zxvf /tmp/rllab_code.tar.gz -C %s' % config.DOCKER_CODE_DIR) else: pre_commands.append('aws s3 cp --recursive %s %s' % (code_full_path, config.DOCKER_CODE_DIR)) pre_commands.append('cd %s' % config.DOCKER_CODE_DIR) pre_commands.append('mkdir -p %s' % (log_dir)) if sync_all_data_node_to_s3: print('Syncing all data from node to s3.') if periodic_sync: if sync_s3_pkl: pre_commands.append(""" while /bin/true; do aws s3 sync {log_dir} {remote_log_dir} --region {aws_region} --quiet sleep {periodic_sync_interval} done & echo sync initiated""".format(log_dir=log_dir, remote_log_dir=remote_log_dir, aws_region=config.AWS_REGION_NAME, periodic_sync_interval=periodic_sync_interval)) else: pre_commands.append(""" while /bin/true; do aws s3 sync {log_dir} {remote_log_dir} --region {aws_region} --quiet sleep {periodic_sync_interval} done & echo sync initiated""".format(log_dir=log_dir, remote_log_dir=remote_log_dir, aws_region=config.AWS_REGION_NAME, periodic_sync_interval=periodic_sync_interval)) else: if periodic_sync: if sync_s3_pkl: pre_commands.append(""" while /bin/true; do aws s3 sync --exclude '*' --include '*.csv' --include '*.json' --include '*.pkl' {log_dir} {remote_log_dir} --region {aws_region} --quiet sleep {periodic_sync_interval} done & echo sync initiated""".format(log_dir=log_dir, remote_log_dir=remote_log_dir, aws_region=config.AWS_REGION_NAME, periodic_sync_interval=periodic_sync_interval)) else: pre_commands.append(""" while /bin/true; do aws s3 sync --exclude '*' --include '*.csv' --include '*.json' {log_dir} {remote_log_dir} --region {aws_region} --quiet sleep {periodic_sync_interval} done & echo sync initiated""".format(log_dir=log_dir, remote_log_dir=remote_log_dir, aws_region=config.AWS_REGION_NAME, periodic_sync_interval=periodic_sync_interval)) # copy the file to s3 after execution post_commands = list() post_commands.append('aws s3 cp --recursive %s %s' % (log_dir, remote_log_dir)) if not terminate_machine: post_commands.append('sleep infinity') command_list = list() if pre_commands is not None: command_list.extend(pre_commands) command_list.append("echo \"Running in docker\"") command_list.append( "%s 2>&1 | tee -a %s" % ( to_local_command(params, python_command=python_command, script=script), "%s/stdouterr.log" % log_dir ) ) if post_commands is not None: command_list.extend(post_commands) command = "; ".join(command_list) pod_name = config.KUBE_PREFIX + params["exp_name"] # underscore is not allowed in pod names pod_name = pod_name.replace("_", "-") print("Is gpu: ", is_gpu) if not is_gpu: return { "apiVersion": "v1", "kind": "Pod", "metadata": { "name": pod_name, "labels": { "owner": config.LABEL, "expt": pod_name, "exp_time": timestamp, "exp_prefix": exp_prefix, }, }, "spec": { "containers": [ { "name": "foo", "image": docker_image, "command": [ "/bin/bash", "-c", "-li", # to load conda env file command, ], "resources": resources, "imagePullPolicy": "Always", } ], "restartPolicy": "Never", "nodeSelector": node_selector, "dnsPolicy": "Default", } } return { "apiVersion": "v1", "kind": "Pod", "metadata": { "name": pod_name, "labels": { "owner": config.LABEL, "expt": pod_name, "exp_time": timestamp, "exp_prefix": exp_prefix, }, }, "spec": { "containers": [ { "name": "foo", "image": docker_image, "env": kube_env, "command": [ "/bin/bash", "-c", "-li", # to load conda env file command, ], "resources": resources, "imagePullPolicy": "Always", # gpu specific "volumeMounts": [ { "name": "nvidia", "mountPath": "/usr/local/nvidia", "readOnly": True, } ], "securityContext": { "privileged": True, } } ], "volumes": [ { "name": "nvidia", "hostPath": { "path": "/var/lib/docker/volumes/nvidia_driver_352.63/_data", } } ], "restartPolicy": "Never", "nodeSelector": node_selector, "dnsPolicy": "Default", } } def concretize(maybe_stub): if isinstance(maybe_stub, StubMethodCall): obj = concretize(maybe_stub.obj) method = getattr(obj, maybe_stub.method_name) args = concretize(maybe_stub.args) kwargs = concretize(maybe_stub.kwargs) return method(*args, **kwargs) elif isinstance(maybe_stub, StubClass): return maybe_stub.proxy_class elif isinstance(maybe_stub, StubAttr): obj = concretize(maybe_stub.obj) attr_name = maybe_stub.attr_name attr_val = getattr(obj, attr_name) return concretize(attr_val) elif isinstance(maybe_stub, StubObject): if not hasattr(maybe_stub, "__stub_cache"): args = concretize(maybe_stub.args) kwargs = concretize(maybe_stub.kwargs) try: maybe_stub.__stub_cache = maybe_stub.proxy_class( *args, **kwargs) except Exception as e: print(("Error while instantiating %s" % maybe_stub.proxy_class)) import traceback traceback.print_exc() ret = maybe_stub.__stub_cache return ret elif isinstance(maybe_stub, dict): # make sure that there's no hidden caveat ret = dict() for k, v in maybe_stub.items(): ret[concretize(k)] = concretize(v) return ret elif isinstance(maybe_stub, (list, tuple)): return maybe_stub.__class__(list(map(concretize, maybe_stub))) else: return maybe_stub
[]
[]
[]
[]
[]
python
0
0
tests/test_other.py
import httplib2 import mock import os import pickle import pytest import socket import sys import tests import time from six.moves import urllib @pytest.mark.skipif( sys.version_info <= (3,), reason=( "TODO: httplib2._convert_byte_str was defined only in python3 code " "version" ), ) def test_convert_byte_str(): with tests.assert_raises(TypeError): httplib2._convert_byte_str(4) assert httplib2._convert_byte_str(b"Hello") == "Hello" assert httplib2._convert_byte_str("World") == "World" def test_reflect(): http = httplib2.Http() with tests.server_reflect() as uri: response, content = http.request(uri + "?query", "METHOD") assert response.status == 200 host = urllib.parse.urlparse(uri).netloc assert content.startswith( """\ METHOD /?query HTTP/1.1\r\n\ Host: {host}\r\n""".format( host=host ).encode() ), content def test_pickle_http(): http = httplib2.Http(cache=tests.get_cache_path()) new_http = pickle.loads(pickle.dumps(http)) assert tuple(sorted(new_http.__dict__)) == tuple(sorted(http.__dict__)) assert new_http.credentials.credentials == http.credentials.credentials assert new_http.certificates.credentials == http.certificates.credentials assert new_http.cache.cache == http.cache.cache for key in new_http.__dict__: if key not in ("cache", "certificates", "credentials"): assert getattr(new_http, key) == getattr(http, key) def test_pickle_http_with_connection(): http = httplib2.Http() http.request("http://random-domain:81/", connection_type=tests.MockHTTPConnection) new_http = pickle.loads(pickle.dumps(http)) assert tuple(http.connections) == ("http:random-domain:81",) assert new_http.connections == {} def test_pickle_custom_request_http(): http = httplib2.Http() http.request = lambda: None http.request.dummy_attr = "dummy_value" new_http = pickle.loads(pickle.dumps(http)) assert getattr(new_http.request, "dummy_attr", None) is None @pytest.mark.xfail( sys.version_info >= (3,), reason=( "FIXME: for unknown reason global timeout test fails in Python3 " "with response 200" ), ) def test_timeout_global(): def handler(request): time.sleep(0.5) return tests.http_response_bytes() try: socket.setdefaulttimeout(0.1) except Exception: pytest.skip("cannot set global socket timeout") try: http = httplib2.Http() http.force_exception_to_status_code = True with tests.server_request(handler) as uri: response, content = http.request(uri) assert response.status == 408 assert response.reason.startswith("Request Timeout") finally: socket.setdefaulttimeout(None) def test_timeout_individual(): def handler(request): time.sleep(0.5) return tests.http_response_bytes() http = httplib2.Http(timeout=0.1) http.force_exception_to_status_code = True with tests.server_request(handler) as uri: response, content = http.request(uri) assert response.status == 408 assert response.reason.startswith("Request Timeout") def test_timeout_https(): c = httplib2.HTTPSConnectionWithTimeout("localhost", 80, timeout=47) assert 47 == c.timeout # @pytest.mark.xfail( # sys.version_info >= (3,), # reason='[py3] last request should open new connection, but client does not realize socket was closed by server', # ) def test_connection_close(): http = httplib2.Http() g = [] def handler(request): g.append(request.number) return tests.http_response_bytes(proto="HTTP/1.1") with tests.server_request(handler, request_count=3) as uri: http.request(uri, "GET") # conn1 req1 for c in http.connections.values(): assert c.sock is not None http.request(uri, "GET", headers={"connection": "close"}) time.sleep(0.7) http.request(uri, "GET") # conn2 req1 assert g == [1, 2, 1] def test_get_end2end_headers(): # one end to end header response = {"content-type": "application/atom+xml", "te": "deflate"} end2end = httplib2._get_end2end_headers(response) assert "content-type" in end2end assert "te" not in end2end assert "connection" not in end2end # one end to end header that gets eliminated response = { "connection": "content-type", "content-type": "application/atom+xml", "te": "deflate", } end2end = httplib2._get_end2end_headers(response) assert "content-type" not in end2end assert "te" not in end2end assert "connection" not in end2end # Degenerate case of no headers response = {} end2end = httplib2._get_end2end_headers(response) assert len(end2end) == 0 # Degenerate case of connection referrring to a header not passed in response = {"connection": "content-type"} end2end = httplib2._get_end2end_headers(response) assert len(end2end) == 0 @pytest.mark.xfail( os.environ.get("TRAVIS_PYTHON_VERSION") in ("2.7", "pypy"), reason="FIXME: fail on Travis py27 and pypy, works elsewhere", ) @pytest.mark.parametrize("scheme", ("http", "https")) def test_ipv6(scheme): # Even if IPv6 isn't installed on a machine it should just raise socket.error uri = "{scheme}://[::1]:1/".format(scheme=scheme) try: httplib2.Http(timeout=0.1).request(uri) except socket.gaierror: assert False, "should get the address family right for IPv6" except socket.error: pass @pytest.mark.parametrize( "conn_type", (httplib2.HTTPConnectionWithTimeout, httplib2.HTTPSConnectionWithTimeout), ) def test_connection_proxy_info_attribute_error(conn_type): # HTTPConnectionWithTimeout did not initialize its .proxy_info attribute # https://github.com/httplib2/httplib2/pull/97 # Thanks to Joseph Ryan https://github.com/germanjoey conn = conn_type("no-such-hostname.", 80) # TODO: replace mock with dummy local server with tests.assert_raises(socket.gaierror): with mock.patch("socket.socket.connect", side_effect=socket.gaierror): conn.request("GET", "/") def test_http_443_forced_https(): http = httplib2.Http() http.force_exception_to_status_code = True uri = "http://localhost:443/" # sorry, using internal structure of Http to check chosen scheme with mock.patch("httplib2.Http._request") as m: http.request(uri) assert len(m.call_args) > 0, "expected Http._request() call" conn = m.call_args[0][0] assert isinstance(conn, httplib2.HTTPConnectionWithTimeout)
[]
[]
[ "TRAVIS_PYTHON_VERSION" ]
[]
["TRAVIS_PYTHON_VERSION"]
python
1
0
mne/bem.py
# Authors: Alexandre Gramfort <[email protected]> # Matti Hamalainen <[email protected]> # Eric Larson <[email protected]> # Lorenzo De Santis <[email protected]> # # License: BSD (3-clause) import sys import os import os.path as op import shutil import glob import numpy as np from scipy import linalg from .fixes import partial from .utils import verbose, logger, run_subprocess, get_subjects_dir, warn from .transforms import _ensure_trans, apply_trans from .io import Info from .io.constants import FIFF from .io.write import (start_file, start_block, write_float, write_int, write_float_matrix, write_int_matrix, end_block, end_file) from .io.tag import find_tag from .io.tree import dir_tree_find from .io.open import fiff_open from .externals.six import string_types # ############################################################################ # Compute BEM solution # define VEC_DIFF(from,to,diff) {\ # (diff)[X] = (to)[X] - (from)[X];\ # The following approach is based on: # # de Munck JC: "A linear discretization of the volume conductor boundary # integral equation using analytically integrated elements", # IEEE Trans Biomed Eng. 1992 39(9) : 986 - 990 # class ConductorModel(dict): """BEM or sphere model""" def __repr__(self): if self['is_sphere']: center = ', '.join('%0.1f' % (x * 1000.) for x in self['r0']) pl = '' if len(self['layers']) == 1 else 's' rad = self.radius if rad is None: # no radius / MEG only extra = 'Sphere (no layers): r0=[%s] mm' % center else: extra = ('Sphere (%s layer%s): r0=[%s] R=%1.f mm' % (len(self['layers']) - 1, pl, center, rad * 1000.)) else: pl = '' if len(self['surfs']) == 1 else 's' extra = ('BEM (%s layer%s)' % (len(self['surfs']), pl)) return '<ConductorModel | %s>' % extra @property def radius(self): if not self['is_sphere']: raise RuntimeError('radius undefined for BEM') return None if len(self['layers']) == 0 else self['layers'][-1]['rad'] def _calc_beta(rk, rk_norm, rk1, rk1_norm): """These coefficients are used to calculate the magic vector omega""" rkk1 = rk1[0] - rk[0] size = np.sqrt(np.dot(rkk1, rkk1)) rkk1 /= size num = rk_norm + np.dot(rk, rkk1) den = rk1_norm + np.dot(rk1, rkk1) res = np.log(num / den) / size return res def _lin_pot_coeff(fros, tri_rr, tri_nn, tri_area): """The linear potential matrix element computations""" from .source_space import _fast_cross_nd_sum omega = np.zeros((len(fros), 3)) # we replicate a little bit of the _get_solids code here for speed v1 = tri_rr[np.newaxis, 0, :] - fros v2 = tri_rr[np.newaxis, 1, :] - fros v3 = tri_rr[np.newaxis, 2, :] - fros triples = _fast_cross_nd_sum(v1, v2, v3) l1 = np.sqrt(np.sum(v1 * v1, axis=1)) l2 = np.sqrt(np.sum(v2 * v2, axis=1)) l3 = np.sqrt(np.sum(v3 * v3, axis=1)) ss = (l1 * l2 * l3 + np.sum(v1 * v2, axis=1) * l3 + np.sum(v1 * v3, axis=1) * l2 + np.sum(v2 * v3, axis=1) * l1) solids = np.arctan2(triples, ss) # We *could* subselect the good points from v1, v2, v3, triples, solids, # l1, l2, and l3, but there are *very* few bad points. So instead we do # some unnecessary calculations, and then omit them from the final # solution. These three lines ensure we don't get invalid values in # _calc_beta. bad_mask = np.abs(solids) < np.pi / 1e6 l1[bad_mask] = 1. l2[bad_mask] = 1. l3[bad_mask] = 1. # Calculate the magic vector vec_omega beta = [_calc_beta(v1, l1, v2, l2)[:, np.newaxis], _calc_beta(v2, l2, v3, l3)[:, np.newaxis], _calc_beta(v3, l3, v1, l1)[:, np.newaxis]] vec_omega = (beta[2] - beta[0]) * v1 vec_omega += (beta[0] - beta[1]) * v2 vec_omega += (beta[1] - beta[2]) * v3 area2 = 2.0 * tri_area n2 = 1.0 / (area2 * area2) # leave omega = 0 otherwise # Put it all together... yys = [v1, v2, v3] idx = [0, 1, 2, 0, 2] for k in range(3): diff = yys[idx[k - 1]] - yys[idx[k + 1]] zdots = _fast_cross_nd_sum(yys[idx[k + 1]], yys[idx[k - 1]], tri_nn) omega[:, k] = -n2 * (area2 * zdots * 2. * solids - triples * (diff * vec_omega).sum(axis=-1)) # omit the bad points from the solution omega[bad_mask] = 0. return omega def _correct_auto_elements(surf, mat): """Improve auto-element approximation...""" pi2 = 2.0 * np.pi tris_flat = surf['tris'].ravel() misses = pi2 - mat.sum(axis=1) for j, miss in enumerate(misses): # How much is missing? n_memb = len(surf['neighbor_tri'][j]) # The node itself receives one half mat[j, j] = miss / 2.0 # The rest is divided evenly among the member nodes... miss /= (4.0 * n_memb) members = np.where(j == tris_flat)[0] mods = members % 3 offsets = np.array([[1, 2], [-1, 1], [-1, -2]]) tri_1 = members + offsets[mods, 0] tri_2 = members + offsets[mods, 1] for t1, t2 in zip(tri_1, tri_2): mat[j, tris_flat[t1]] += miss mat[j, tris_flat[t2]] += miss return def _fwd_bem_lin_pot_coeff(surfs): """Calculate the coefficients for linear collocation approach""" # taken from fwd_bem_linear_collocation.c nps = [surf['np'] for surf in surfs] np_tot = sum(nps) coeff = np.zeros((np_tot, np_tot)) offsets = np.cumsum(np.concatenate(([0], nps))) for si_1, surf1 in enumerate(surfs): rr_ord = np.arange(nps[si_1]) for si_2, surf2 in enumerate(surfs): logger.info(" %s (%d) -> %s (%d) ..." % (_bem_explain_surface(surf1['id']), nps[si_1], _bem_explain_surface(surf2['id']), nps[si_2])) tri_rr = surf2['rr'][surf2['tris']] tri_nn = surf2['tri_nn'] tri_area = surf2['tri_area'] submat = coeff[offsets[si_1]:offsets[si_1 + 1], offsets[si_2]:offsets[si_2 + 1]] # view for k in range(surf2['ntri']): tri = surf2['tris'][k] if si_1 == si_2: skip_idx = ((rr_ord == tri[0]) | (rr_ord == tri[1]) | (rr_ord == tri[2])) else: skip_idx = list() # No contribution from a triangle that # this vertex belongs to # if sidx1 == sidx2 and (tri == j).any(): # continue # Otherwise do the hard job coeffs = _lin_pot_coeff(surf1['rr'], tri_rr[k], tri_nn[k], tri_area[k]) coeffs[skip_idx] = 0. submat[:, tri] -= coeffs if si_1 == si_2: _correct_auto_elements(surf1, submat) return coeff def _fwd_bem_multi_solution(solids, gamma, nps): """Do multi surface solution * Invert I - solids/(2*M_PI) * Take deflation into account * The matrix is destroyed after inversion * This is the general multilayer case """ pi2 = 1.0 / (2 * np.pi) n_tot = np.sum(nps) assert solids.shape == (n_tot, n_tot) nsurf = len(nps) defl = 1.0 / n_tot # Modify the matrix offsets = np.cumsum(np.concatenate(([0], nps))) for si_1 in range(nsurf): for si_2 in range(nsurf): mult = pi2 if gamma is None else pi2 * gamma[si_1, si_2] slice_j = slice(offsets[si_1], offsets[si_1 + 1]) slice_k = slice(offsets[si_2], offsets[si_2 + 1]) solids[slice_j, slice_k] = defl - solids[slice_j, slice_k] * mult solids += np.eye(n_tot) return linalg.inv(solids, overwrite_a=True) def _fwd_bem_homog_solution(solids, nps): """Helper to make a homogeneous solution""" return _fwd_bem_multi_solution(solids, None, nps) def _fwd_bem_ip_modify_solution(solution, ip_solution, ip_mult, n_tri): """Modify the solution according to the IP approach""" n_last = n_tri[-1] mult = (1.0 + ip_mult) / ip_mult logger.info(' Combining...') offsets = np.cumsum(np.concatenate(([0], n_tri))) for si in range(len(n_tri)): # Pick the correct submatrix (right column) and multiply sub = solution[offsets[si]:offsets[si + 1], np.sum(n_tri[:-1]):] # Multiply sub -= 2 * np.dot(sub, ip_solution) # The lower right corner is a special case sub[-n_last:, -n_last:] += mult * ip_solution # Final scaling logger.info(' Scaling...') solution *= ip_mult return def _fwd_bem_linear_collocation_solution(m): """Compute the linear collocation potential solution""" # first, add surface geometries from .surface import _complete_surface_info for surf in m['surfs']: _complete_surface_info(surf, verbose=False) logger.info('Computing the linear collocation solution...') logger.info(' Matrix coefficients...') coeff = _fwd_bem_lin_pot_coeff(m['surfs']) m['nsol'] = len(coeff) logger.info(" Inverting the coefficient matrix...") nps = [surf['np'] for surf in m['surfs']] m['solution'] = _fwd_bem_multi_solution(coeff, m['gamma'], nps) if len(m['surfs']) == 3: ip_mult = m['sigma'][1] / m['sigma'][2] if ip_mult <= FIFF.FWD_BEM_IP_APPROACH_LIMIT: logger.info('IP approach required...') logger.info(' Matrix coefficients (homog)...') coeff = _fwd_bem_lin_pot_coeff([m['surfs'][-1]]) logger.info(' Inverting the coefficient matrix (homog)...') ip_solution = _fwd_bem_homog_solution(coeff, [m['surfs'][-1]['np']]) logger.info(' Modify the original solution to incorporate ' 'IP approach...') _fwd_bem_ip_modify_solution(m['solution'], ip_solution, ip_mult, nps) m['bem_method'] = FIFF.FWD_BEM_LINEAR_COLL logger.info("Solution ready.") @verbose def make_bem_solution(surfs, verbose=None): """Create a BEM solution using the linear collocation approach Parameters ---------- surfs : list of dict The BEM surfaces to use (`from make_bem_model`) verbose : bool, str, int, or None If not None, override default verbose level (see mne.verbose). Returns ------- bem : instance of ConductorModel The BEM solution. Notes ----- .. versionadded:: 0.10.0 See Also -------- make_bem_model read_bem_surfaces write_bem_surfaces read_bem_solution write_bem_solution """ logger.info('Approximation method : Linear collocation\n') if isinstance(surfs, string_types): # Load the surfaces logger.info('Loading surfaces...') surfs = read_bem_surfaces(surfs) bem = ConductorModel(is_sphere=False, surfs=surfs) _add_gamma_multipliers(bem) if len(bem['surfs']) == 3: logger.info('Three-layer model surfaces loaded.') elif len(bem['surfs']) == 1: logger.info('Homogeneous model surface loaded.') else: raise RuntimeError('Only 1- or 3-layer BEM computations supported') _fwd_bem_linear_collocation_solution(bem) logger.info('BEM geometry computations complete.') return bem # ############################################################################ # Make BEM model def _ico_downsample(surf, dest_grade): """Downsample the surface if isomorphic to a subdivided icosahedron""" from .surface import _get_ico_surface n_tri = surf['ntri'] found = -1 bad_msg = ("A surface with %d triangles cannot be isomorphic with a " "subdivided icosahedron." % surf['ntri']) if n_tri % 20 != 0: raise RuntimeError(bad_msg) n_tri = n_tri // 20 found = int(round(np.log(n_tri) / np.log(4))) if n_tri != 4 ** found: raise RuntimeError(bad_msg) del n_tri if dest_grade > found: raise RuntimeError('For this surface, decimation grade should be %d ' 'or less, not %s.' % (found, dest_grade)) source = _get_ico_surface(found) dest = _get_ico_surface(dest_grade, patch_stats=True) del dest['tri_cent'] del dest['tri_nn'] del dest['neighbor_tri'] del dest['tri_area'] if not np.array_equal(source['tris'], surf['tris']): raise RuntimeError('The source surface has a matching number of ' 'triangles but ordering is wrong') logger.info('Going from %dth to %dth subdivision of an icosahedron ' '(n_tri: %d -> %d)' % (found, dest_grade, surf['ntri'], dest['ntri'])) # Find the mapping dest['rr'] = surf['rr'][_get_ico_map(source, dest)] return dest def _get_ico_map(fro, to): """Helper to get a mapping between ico surfaces""" from .surface import _compute_nearest nearest, dists = _compute_nearest(fro['rr'], to['rr'], return_dists=True) n_bads = (dists > 5e-3).sum() if n_bads > 0: raise RuntimeError('No matching vertex for %d destination vertices' % (n_bads)) return nearest def _order_surfaces(surfs): """Reorder the surfaces""" if len(surfs) != 3: return surfs # we have three surfaces surf_order = [FIFF.FIFFV_BEM_SURF_ID_HEAD, FIFF.FIFFV_BEM_SURF_ID_SKULL, FIFF.FIFFV_BEM_SURF_ID_BRAIN] ids = np.array([surf['id'] for surf in surfs]) if set(ids) != set(surf_order): raise RuntimeError('bad surface ids: %s' % ids) order = [np.where(ids == id_)[0][0] for id_ in surf_order] surfs = [surfs[idx] for idx in order] return surfs def _assert_complete_surface(surf): """Check the sum of solid angles as seen from inside""" # from surface_checks.c from .source_space import _get_solids tot_angle = 0. # Center of mass.... cm = surf['rr'].mean(axis=0) logger.info('%s CM is %6.2f %6.2f %6.2f mm' % (_surf_name[surf['id']], 1000 * cm[0], 1000 * cm[1], 1000 * cm[2])) tot_angle = _get_solids(surf['rr'][surf['tris']], cm[np.newaxis, :])[0] if np.abs(tot_angle / (2 * np.pi) - 1.0) > 1e-5: raise RuntimeError('Surface %s is not complete (sum of solid angles ' '= %g * 4*PI instead).' % (_surf_name[surf['id']], tot_angle)) _surf_name = { FIFF.FIFFV_BEM_SURF_ID_HEAD: 'outer skin ', FIFF.FIFFV_BEM_SURF_ID_SKULL: 'outer skull', FIFF.FIFFV_BEM_SURF_ID_BRAIN: 'inner skull', FIFF.FIFFV_BEM_SURF_ID_UNKNOWN: 'unknown ', } def _assert_inside(fro, to): """Helper to check one set of points is inside a surface""" # this is "is_inside" in surface_checks.c from .source_space import _get_solids tot_angle = _get_solids(to['rr'][to['tris']], fro['rr']) if (np.abs(tot_angle / (2 * np.pi) - 1.0) > 1e-5).any(): raise RuntimeError('Surface %s is not completely inside surface %s' % (_surf_name[fro['id']], _surf_name[to['id']])) def _check_surfaces(surfs): """Check that the surfaces are complete and non-intersecting""" for surf in surfs: _assert_complete_surface(surf) # Then check the topology for surf_1, surf_2 in zip(surfs[:-1], surfs[1:]): logger.info('Checking that %s surface is inside %s surface...' % (_surf_name[surf_2['id']], _surf_name[surf_1['id']])) _assert_inside(surf_2, surf_1) def _check_surface_size(surf): """Check that the coordinate limits are reasonable""" sizes = surf['rr'].max(axis=0) - surf['rr'].min(axis=0) if (sizes < 0.05).any(): raise RuntimeError('Dimensions of the surface %s seem too small ' '(%9.5f mm). Maybe the the unit of measure is ' 'meters instead of mm' % (_surf_name[surf['id']], 1000 * sizes.min())) def _check_thicknesses(surfs): """How close are we?""" from .surface import _compute_nearest for surf_1, surf_2 in zip(surfs[:-1], surfs[1:]): min_dist = _compute_nearest(surf_1['rr'], surf_2['rr'], return_dists=True)[0] min_dist = min_dist.min() logger.info('Checking distance between %s and %s surfaces...' % (_surf_name[surf_1['id']], _surf_name[surf_2['id']])) logger.info('Minimum distance between the %s and %s surfaces is ' 'approximately %6.1f mm' % (_surf_name[surf_1['id']], _surf_name[surf_2['id']], 1000 * min_dist)) def _surfaces_to_bem(fname_surfs, ids, sigmas, ico=None): """Convert surfaces to a BEM """ from .surface import _read_surface_geom # equivalent of mne_surf2bem surfs = list() assert len(fname_surfs) in (1, 3) for fname in fname_surfs: surfs.append(_read_surface_geom(fname, patch_stats=False, verbose=False)) surfs[-1]['rr'] /= 1000. # Downsampling if the surface is isomorphic with a subdivided icosahedron if ico is not None: for si, surf in enumerate(surfs): surfs[si] = _ico_downsample(surf, ico) for surf, id_ in zip(surfs, ids): surf['id'] = id_ # Shifting surfaces is not implemented here # Order the surfaces for the benefit of the topology checks for surf, sigma in zip(surfs, sigmas): surf['sigma'] = sigma surfs = _order_surfaces(surfs) # Check topology as best we can _check_surfaces(surfs) for surf in surfs: _check_surface_size(surf) _check_thicknesses(surfs) logger.info('Surfaces passed the basic topology checks.') return surfs @verbose def make_bem_model(subject, ico=4, conductivity=(0.3, 0.006, 0.3), subjects_dir=None, verbose=None): """Create a BEM model for a subject .. note:: To get a single layer bem corresponding to the --homog flag in the command line tool set the ``conductivity`` parameter to a list/tuple with a single value (e.g. [0.3]). Parameters ---------- subject : str The subject. ico : int | None The surface ico downsampling to use, e.g. 5=20484, 4=5120, 3=1280. If None, no subsampling is applied. conductivity : array of int, shape (3,) or (1,) The conductivities to use for each shell. Should be a single element for a one-layer model, or three elements for a three-layer model. Defaults to ``[0.3, 0.006, 0.3]``. The MNE-C default for a single-layer model would be ``[0.3]``. subjects_dir : string, or None Path to SUBJECTS_DIR if it is not set in the environment. verbose : bool, str, int, or None If not None, override default verbose level (see mne.verbose). Returns ------- surfaces : list of dict The BEM surfaces. Use `make_bem_solution` to turn these into a `ConductorModel` suitable for forward calculation. Notes ----- .. versionadded:: 0.10.0 See Also -------- make_bem_solution make_sphere_model read_bem_surfaces write_bem_surfaces """ conductivity = np.array(conductivity, float) if conductivity.ndim != 1 or conductivity.size not in (1, 3): raise ValueError('conductivity must be 1D array-like with 1 or 3 ' 'elements') subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) subject_dir = op.join(subjects_dir, subject) bem_dir = op.join(subject_dir, 'bem') inner_skull = op.join(bem_dir, 'inner_skull.surf') outer_skull = op.join(bem_dir, 'outer_skull.surf') outer_skin = op.join(bem_dir, 'outer_skin.surf') surfaces = [inner_skull, outer_skull, outer_skin] ids = [FIFF.FIFFV_BEM_SURF_ID_BRAIN, FIFF.FIFFV_BEM_SURF_ID_SKULL, FIFF.FIFFV_BEM_SURF_ID_HEAD] logger.info('Creating the BEM geometry...') if len(conductivity) == 1: surfaces = surfaces[:1] ids = ids[:1] surfaces = _surfaces_to_bem(surfaces, ids, conductivity, ico) logger.info('Complete.\n') return surfaces # ############################################################################ # Compute EEG sphere model def _fwd_eeg_get_multi_sphere_model_coeffs(m, n_terms): """Get the model depended weighting factor for n""" nlayer = len(m['layers']) if nlayer in (0, 1): return 1. # Initialize the arrays c1 = np.zeros(nlayer - 1) c2 = np.zeros(nlayer - 1) cr = np.zeros(nlayer - 1) cr_mult = np.zeros(nlayer - 1) for k in range(nlayer - 1): c1[k] = m['layers'][k]['sigma'] / m['layers'][k + 1]['sigma'] c2[k] = c1[k] - 1.0 cr_mult[k] = m['layers'][k]['rel_rad'] cr[k] = cr_mult[k] cr_mult[k] *= cr_mult[k] coeffs = np.zeros(n_terms - 1) for n in range(1, n_terms): # Increment the radius coefficients for k in range(nlayer - 1): cr[k] *= cr_mult[k] # Multiply the matrices M = np.eye(2) n1 = n + 1.0 for k in range(nlayer - 2, -1, -1): M = np.dot([[n + n1 * c1[k], n1 * c2[k] / cr[k]], [n * c2[k] * cr[k], n1 + n * c1[k]]], M) num = n * (2.0 * n + 1.0) ** (nlayer - 1) coeffs[n - 1] = num / (n * M[1, 1] + n1 * M[1, 0]) return coeffs def _compose_linear_fitting_data(mu, u): # y is the data to be fitted (nterms-1 x 1) # M is the model matrix (nterms-1 x nfit-1) for k in range(u['nterms'] - 1): k1 = k + 1 mu1n = np.power(mu[0], k1) u['y'][k] = u['w'][k] * (u['fn'][k1] - mu1n * u['fn'][0]) for p in range(u['nfit'] - 1): u['M'][k][p] = u['w'][k] * (np.power(mu[p + 1], k1) - mu1n) def _compute_linear_parameters(mu, u): """Compute the best-fitting linear parameters""" _compose_linear_fitting_data(mu, u) uu, sing, vv = linalg.svd(u['M'], full_matrices=False) # Compute the residuals u['resi'] = u['y'].copy() vec = np.empty(u['nfit'] - 1) for p in range(u['nfit'] - 1): vec[p] = np.dot(uu[:, p], u['y']) for k in range(u['nterms'] - 1): u['resi'][k] -= uu[k, p] * vec[p] vec[p] = vec[p] / sing[p] lambda_ = np.zeros(u['nfit']) for p in range(u['nfit'] - 1): sum_ = 0. for q in range(u['nfit'] - 1): sum_ += vv[q, p] * vec[q] lambda_[p + 1] = sum_ lambda_[0] = u['fn'][0] - np.sum(lambda_[1:]) rv = np.dot(u['resi'], u['resi']) / np.dot(u['y'], u['y']) return rv, lambda_ def _one_step(mu, u): """Evaluate the residual sum of squares fit for one set of mu values""" if np.abs(mu).max() > 1.0: return 1.0 # Compose the data for the linear fitting, compute SVD, then residuals _compose_linear_fitting_data(mu, u) u['uu'], u['sing'], u['vv'] = linalg.svd(u['M']) u['resi'][:] = u['y'][:] for p in range(u['nfit'] - 1): dot = np.dot(u['uu'][p], u['y']) for k in range(u['nterms'] - 1): u['resi'][k] = u['resi'][k] - u['uu'][p, k] * dot # Return their sum of squares return np.dot(u['resi'], u['resi']) def _fwd_eeg_fit_berg_scherg(m, nterms, nfit): """Fit the Berg-Scherg equivalent spherical model dipole parameters""" from scipy.optimize import fmin_cobyla assert nfit >= 2 u = dict(y=np.zeros(nterms - 1), resi=np.zeros(nterms - 1), nfit=nfit, nterms=nterms, M=np.zeros((nterms - 1, nfit - 1))) # (1) Calculate the coefficients of the true expansion u['fn'] = _fwd_eeg_get_multi_sphere_model_coeffs(m, nterms + 1) # (2) Calculate the weighting f = (min([layer['rad'] for layer in m['layers']]) / max([layer['rad'] for layer in m['layers']])) # correct weighting k = np.arange(1, nterms + 1) u['w'] = np.sqrt((2.0 * k + 1) * (3.0 * k + 1.0) / k) * np.power(f, (k - 1.0)) u['w'][-1] = 0 # Do the nonlinear minimization, constraining mu to the interval [-1, +1] mu_0 = np.random.RandomState(0).rand(nfit) * f fun = partial(_one_step, u=u) max_ = 1. - 2e-4 # adjust for fmin_cobyla "catol" that not all scipy have cons = [(lambda x: max_ - np.abs(x[ii])) for ii in range(nfit)] mu = fmin_cobyla(fun, mu_0, cons, rhobeg=0.5, rhoend=5e-3, disp=0) # (6) Do the final step: calculation of the linear parameters rv, lambda_ = _compute_linear_parameters(mu, u) order = np.argsort(mu)[::-1] mu, lambda_ = mu[order], lambda_[order] # sort: largest mu first m['mu'] = mu # This division takes into account the actual conductivities m['lambda'] = lambda_ / m['layers'][-1]['sigma'] m['nfit'] = nfit return rv @verbose def make_sphere_model(r0=(0., 0., 0.04), head_radius=0.09, info=None, relative_radii=(0.90, 0.92, 0.97, 1.0), sigmas=(0.33, 1.0, 0.004, 0.33), verbose=None): """Create a spherical model for forward solution calculation Parameters ---------- r0 : array-like | str Head center to use (in head coordinates). If 'auto', the head center will be calculated from the digitization points in info. head_radius : float | str | None If float, compute spherical shells for EEG using the given radius. If 'auto', estimate an approriate radius from the dig points in Info, If None, exclude shells. info : instance of Info | None Measurement info. Only needed if ``r0`` or ``head_radius`` are ``'auto'``. relative_radii : array-like Relative radii for the spherical shells. sigmas : array-like Sigma values for the spherical shells. verbose : bool, str, int, or None If not None, override default verbose level (see mne.verbose). Returns ------- sphere : instance of ConductorModel The resulting spherical conductor model. Notes ----- .. versionadded:: 0.9.0 See Also -------- make_bem_model make_bem_solution """ for name in ('r0', 'head_radius'): param = locals()[name] if isinstance(param, string_types): if param != 'auto': raise ValueError('%s, if str, must be "auto" not "%s"' % (name, param)) relative_radii = np.array(relative_radii, float).ravel() sigmas = np.array(sigmas, float).ravel() if len(relative_radii) != len(sigmas): raise ValueError('relative_radii length (%s) must match that of ' 'sigmas (%s)' % (len(relative_radii), len(sigmas))) if len(sigmas) == 0 and head_radius is not None: raise ValueError('sigmas must be supplied if head_radius is not ' 'None') if (isinstance(r0, string_types) and r0 == 'auto') or \ (isinstance(head_radius, string_types) and head_radius == 'auto'): if info is None: raise ValueError('Info must not be None for auto mode') head_radius_fit, r0_fit = fit_sphere_to_headshape(info, units='m')[:2] if isinstance(r0, string_types): r0 = r0_fit if isinstance(head_radius, string_types): head_radius = head_radius_fit sphere = ConductorModel(is_sphere=True, r0=np.array(r0), coord_frame=FIFF.FIFFV_COORD_HEAD) sphere['layers'] = list() if head_radius is not None: # Eventually these could be configurable... relative_radii = np.array(relative_radii, float) sigmas = np.array(sigmas, float) order = np.argsort(relative_radii) relative_radii = relative_radii[order] sigmas = sigmas[order] for rel_rad, sig in zip(relative_radii, sigmas): # sort layers by (relative) radius, and scale radii layer = dict(rad=rel_rad, sigma=sig) layer['rel_rad'] = layer['rad'] = rel_rad sphere['layers'].append(layer) # scale the radii R = sphere['layers'][-1]['rad'] rR = sphere['layers'][-1]['rel_rad'] for layer in sphere['layers']: layer['rad'] /= R layer['rel_rad'] /= rR # # Setup the EEG sphere model calculations # # Scale the relative radii for k in range(len(relative_radii)): sphere['layers'][k]['rad'] = (head_radius * sphere['layers'][k]['rel_rad']) rv = _fwd_eeg_fit_berg_scherg(sphere, 200, 3) logger.info('\nEquiv. model fitting -> RV = %g %%' % (100 * rv)) for k in range(3): logger.info('mu%d = %g lambda%d = %g' % (k + 1, sphere['mu'][k], k + 1, sphere['layers'][-1]['sigma'] * sphere['lambda'][k])) logger.info('Set up EEG sphere model with scalp radius %7.1f mm\n' % (1000 * head_radius,)) return sphere # ############################################################################# # Helpers _dig_kind_dict = { 'cardinal': FIFF.FIFFV_POINT_CARDINAL, 'hpi': FIFF.FIFFV_POINT_HPI, 'eeg': FIFF.FIFFV_POINT_EEG, 'extra': FIFF.FIFFV_POINT_EXTRA, } _dig_kind_rev = dict((val, key) for key, val in _dig_kind_dict.items()) _dig_kind_ints = tuple(_dig_kind_dict.values()) @verbose def fit_sphere_to_headshape(info, dig_kinds='auto', units='m', verbose=None): """Fit a sphere to the headshape points to determine head center Parameters ---------- info : instance of Info Measurement info. dig_kinds : list of str | str Kind of digitization points to use in the fitting. These can be any combination of ('cardinal', 'hpi', 'eeg', 'extra'). Can also be 'auto' (default), which will use only the 'extra' points if enough are available, and if not, uses 'extra' and 'eeg' points. units : str Can be "m" (default) or "mm". .. versionadded:: 0.12 verbose : bool, str, int, or None If not None, override default verbose level (see mne.verbose). Returns ------- radius : float Sphere radius. origin_head: ndarray, shape (3,) Head center in head coordinates. origin_device: ndarray, shape (3,) Head center in device coordinates. Notes ----- This function excludes any points that are low and frontal (``z < 0 and y > 0``) to improve the fit. """ if not isinstance(units, string_types) or units not in ('m', 'mm'): raise ValueError('units must be a "m" or "mm"') if not isinstance(info, Info): raise TypeError('info must be an instance of Info not %s' % type(info)) if info['dig'] is None: raise RuntimeError('Cannot fit headshape without digitization ' ', info["dig"] is None') if isinstance(dig_kinds, string_types): if dig_kinds == 'auto': # try "extra" first try: return fit_sphere_to_headshape(info, 'extra', units=units) except ValueError: pass return fit_sphere_to_headshape(info, ('extra', 'eeg'), units=units) else: dig_kinds = (dig_kinds,) # convert string args to ints (first make dig_kinds mutable in case tuple) dig_kinds = list(dig_kinds) for di, d in enumerate(dig_kinds): dig_kinds[di] = _dig_kind_dict.get(d, d) if dig_kinds[di] not in _dig_kind_ints: raise ValueError('dig_kinds[#%d] (%s) must be one of %s' % (di, d, sorted(list(_dig_kind_dict.keys())))) # get head digization points of the specified kind(s) hsp = [p['r'] for p in info['dig'] if p['kind'] in dig_kinds] if any(p['coord_frame'] != FIFF.FIFFV_COORD_HEAD for p in info['dig']): raise RuntimeError('Digitization points not in head coordinates, ' 'contact mne-python developers') # exclude some frontal points (nose etc.) hsp = [p for p in hsp if not (p[2] < 0 and p[1] > 0)] if len(hsp) <= 10: kinds_str = ', '.join(['"%s"' % _dig_kind_rev[d] for d in sorted(dig_kinds)]) msg = ('Only %s head digitization points of the specified kind%s (%s,)' % (len(hsp), 's' if len(dig_kinds) != 1 else '', kinds_str)) if len(hsp) < 4: raise ValueError(msg + ', at least 4 required') else: warn(msg + ', fitting may be inaccurate') radius, origin_head = _fit_sphere(np.array(hsp), disp=False) # compute origin in device coordinates head_to_dev = _ensure_trans(info['dev_head_t'], 'head', 'meg') origin_device = apply_trans(head_to_dev, origin_head) radius *= 1e3 origin_head *= 1e3 origin_device *= 1e3 logger.info('Fitted sphere radius:'.ljust(30) + '%0.1f mm' % radius) # 99th percentile on Wikipedia for Giabella to back of head is 21.7cm, # i.e. 108mm "radius", so let's go with 110mm # en.wikipedia.org/wiki/Human_head#/media/File:HeadAnthropometry.JPG if radius > 110.: warn('Estimated head size (%0.1f mm) exceeded 99th ' 'percentile for adult head size' % (radius,)) # > 2 cm away from head center in X or Y is strange if np.sqrt(np.sum(origin_head[:2] ** 2)) > 20: warn('(X, Y) fit (%0.1f, %0.1f) more than 20 mm from ' 'head frame origin' % tuple(origin_head[:2])) logger.info('Origin head coordinates:'.ljust(30) + '%0.1f %0.1f %0.1f mm' % tuple(origin_head)) logger.info('Origin device coordinates:'.ljust(30) + '%0.1f %0.1f %0.1f mm' % tuple(origin_device)) if units == 'm': radius /= 1e3 origin_head /= 1e3 origin_device /= 1e3 return radius, origin_head, origin_device def _fit_sphere(points, disp='auto'): """Aux function to fit a sphere to an arbitrary set of points""" from scipy.optimize import fmin_cobyla if isinstance(disp, string_types) and disp == 'auto': disp = True if logger.level <= 20 else False # initial guess for center and radius radii = (np.max(points, axis=1) - np.min(points, axis=1)) / 2. radius_init = radii.mean() center_init = np.median(points, axis=0) # optimization x0 = np.concatenate([center_init, [radius_init]]) def cost_fun(center_rad): d = points - center_rad[:3] d = (np.sqrt(np.sum(d * d, axis=1)) - center_rad[3]) return np.sum(d * d) def constraint(center_rad): return center_rad[3] # radius must be >= 0 x_opt = fmin_cobyla(cost_fun, x0, constraint, rhobeg=radius_init, rhoend=radius_init * 1e-6, disp=disp) origin = x_opt[:3] radius = x_opt[3] return radius, origin def _check_origin(origin, info, coord_frame='head', disp=False): """Helper to check or auto-determine the origin""" if isinstance(origin, string_types): if origin != 'auto': raise ValueError('origin must be a numerical array, or "auto", ' 'not %s' % (origin,)) if coord_frame == 'head': R, origin = fit_sphere_to_headshape(info, verbose=False, units='m')[:2] logger.info(' Automatic origin fit: head of radius %0.1f mm' % (R * 1000.,)) del R else: origin = (0., 0., 0.) origin = np.array(origin, float) if origin.shape != (3,): raise ValueError('origin must be a 3-element array') if disp: origin_str = ', '.join(['%0.1f' % (o * 1000) for o in origin]) logger.info(' Using origin %s mm in the %s frame' % (origin_str, coord_frame)) return origin # ############################################################################ # Create BEM surfaces @verbose def make_watershed_bem(subject, subjects_dir=None, overwrite=False, volume='T1', atlas=False, gcaatlas=False, preflood=None, show=False, verbose=None): """ Create BEM surfaces using the watershed algorithm included with FreeSurfer Parameters ---------- subject : str Subject name (required) subjects_dir : str Directory containing subjects data. If None use the Freesurfer SUBJECTS_DIR environment variable. overwrite : bool Write over existing files volume : str Defaults to T1 atlas : bool Specify the --atlas option for mri_watershed gcaatlas : bool Use the subcortical atlas preflood : int Change the preflood height show : bool Show surfaces to visually inspect all three BEM surfaces (recommended). .. versionadded:: 0.12 verbose : bool, str or None If not None, override default verbose level Notes ----- .. versionadded:: 0.10 """ from .surface import read_surface from .viz.misc import plot_bem env, mri_dir = _prepare_env(subject, subjects_dir, requires_freesurfer=True, requires_mne=True)[:2] subjects_dir = env['SUBJECTS_DIR'] subject_dir = op.join(subjects_dir, subject) mri_dir = op.join(subject_dir, 'mri') T1_dir = op.join(mri_dir, volume) T1_mgz = op.join(mri_dir, volume + '.mgz') bem_dir = op.join(subject_dir, 'bem') ws_dir = op.join(subject_dir, 'bem', 'watershed') if not op.isdir(bem_dir): os.makedirs(bem_dir) if not op.isdir(T1_dir) and not op.isfile(T1_mgz): raise RuntimeError('Could not find the MRI data') if op.isdir(ws_dir): if not overwrite: raise RuntimeError('%s already exists. Use the --overwrite option' ' to recreate it.' % ws_dir) else: shutil.rmtree(ws_dir) # put together the command cmd = ['mri_watershed'] if preflood: cmd += ["-h", "%s" % int(preflood)] if gcaatlas: cmd += ['-atlas', '-T1', '-brain_atlas', env['FREESURFER_HOME'] + '/average/RB_all_withskull_2007-08-08.gca', subject_dir + '/mri/transforms/talairach_with_skull.lta'] elif atlas: cmd += ['-atlas'] if op.exists(T1_mgz): cmd += ['-useSRAS', '-surf', op.join(ws_dir, subject), T1_mgz, op.join(ws_dir, 'ws')] else: cmd += ['-useSRAS', '-surf', op.join(ws_dir, subject), T1_dir, op.join(ws_dir, 'ws')] # report and run logger.info('\nRunning mri_watershed for BEM segmentation with the ' 'following parameters:\n\n' 'SUBJECTS_DIR = %s\n' 'SUBJECT = %s\n' 'Results dir = %s\n' % (subjects_dir, subject, ws_dir)) os.makedirs(op.join(ws_dir, 'ws')) run_subprocess(cmd, env=env, stdout=sys.stdout, stderr=sys.stderr) if op.isfile(T1_mgz): # XXX : do this with python code surfs = ['brain', 'inner_skull', 'outer_skull', 'outer_skin'] for s in surfs: surf_ws_out = op.join(ws_dir, '%s_%s_surface' % (subject, s)) cmd = ['mne_convert_surface', '--surf', surf_ws_out, '--mghmri', T1_mgz, '--surfout', s, "--replacegeom"] run_subprocess(cmd, env=env, stdout=sys.stdout, stderr=sys.stderr) # Create symbolic links surf_out = op.join(bem_dir, '%s.surf' % s) if not overwrite and op.exists(surf_out): skip_symlink = True else: if op.exists(surf_out): os.remove(surf_out) _symlink(surf_ws_out, surf_out) skip_symlink = False if skip_symlink: logger.info("Unable to create all symbolic links to .surf files " "in bem folder. Use --overwrite option to recreate " "them.") dest = op.join(bem_dir, 'watershed') else: logger.info("Symbolic links to .surf files created in bem folder") dest = bem_dir logger.info("\nThank you for waiting.\nThe BEM triangulations for this " "subject are now available at:\n%s." % dest) # Write a head file for coregistration fname_head = op.join(bem_dir, subject + '-head.fif') if op.isfile(fname_head): os.remove(fname_head) # run the equivalent of mne_surf2bem points, tris = read_surface(op.join(ws_dir, subject + '_outer_skin_surface')) points *= 1e-3 surf = dict(coord_frame=5, id=4, nn=None, np=len(points), ntri=len(tris), rr=points, sigma=1, tris=tris) write_bem_surfaces(fname_head, surf) # Show computed BEM surfaces if show: plot_bem(subject=subject, subjects_dir=subjects_dir, orientation='coronal', slices=None, show=True) logger.info('Created %s\n\nComplete.' % (fname_head,)) # ############################################################################ # Read @verbose def read_bem_surfaces(fname, patch_stats=False, s_id=None, verbose=None): """Read the BEM surfaces from a FIF file Parameters ---------- fname : string The name of the file containing the surfaces. patch_stats : bool, optional (default False) Calculate and add cortical patch statistics to the surfaces. s_id : int | None If int, only read and return the surface with the given s_id. An error will be raised if it doesn't exist. If None, all surfaces are read and returned. verbose : bool, str, int, or None If not None, override default verbose level (see mne.verbose). Returns ------- surf: list | dict A list of dictionaries that each contain a surface. If s_id is not None, only the requested surface will be returned. See Also -------- write_bem_surfaces, write_bem_solution, make_bem_model """ from .surface import _complete_surface_info # Default coordinate frame coord_frame = FIFF.FIFFV_COORD_MRI # Open the file, create directory f, tree, _ = fiff_open(fname) with f as fid: # Find BEM bem = dir_tree_find(tree, FIFF.FIFFB_BEM) if bem is None or len(bem) == 0: raise ValueError('BEM data not found') bem = bem[0] # Locate all surfaces bemsurf = dir_tree_find(bem, FIFF.FIFFB_BEM_SURF) if bemsurf is None: raise ValueError('BEM surface data not found') logger.info(' %d BEM surfaces found' % len(bemsurf)) # Coordinate frame possibly at the top level tag = find_tag(fid, bem, FIFF.FIFF_BEM_COORD_FRAME) if tag is not None: coord_frame = tag.data # Read all surfaces if s_id is not None: surf = [_read_bem_surface(fid, bsurf, coord_frame, s_id) for bsurf in bemsurf] surf = [s for s in surf if s is not None] if not len(surf) == 1: raise ValueError('surface with id %d not found' % s_id) else: surf = list() for bsurf in bemsurf: logger.info(' Reading a surface...') this = _read_bem_surface(fid, bsurf, coord_frame) surf.append(this) logger.info('[done]') logger.info(' %d BEM surfaces read' % len(surf)) if patch_stats: for this in surf: _complete_surface_info(this) return surf[0] if s_id is not None else surf def _read_bem_surface(fid, this, def_coord_frame, s_id=None): """Read one bem surface """ # fid should be open as a context manager here res = dict() # Read all the interesting stuff tag = find_tag(fid, this, FIFF.FIFF_BEM_SURF_ID) if tag is None: res['id'] = FIFF.FIFFV_BEM_SURF_ID_UNKNOWN else: res['id'] = int(tag.data) if s_id is not None and res['id'] != s_id: return None tag = find_tag(fid, this, FIFF.FIFF_BEM_SIGMA) res['sigma'] = 1.0 if tag is None else float(tag.data) tag = find_tag(fid, this, FIFF.FIFF_BEM_SURF_NNODE) if tag is None: raise ValueError('Number of vertices not found') res['np'] = int(tag.data) tag = find_tag(fid, this, FIFF.FIFF_BEM_SURF_NTRI) if tag is None: raise ValueError('Number of triangles not found') res['ntri'] = int(tag.data) tag = find_tag(fid, this, FIFF.FIFF_MNE_COORD_FRAME) if tag is None: tag = find_tag(fid, this, FIFF.FIFF_BEM_COORD_FRAME) if tag is None: res['coord_frame'] = def_coord_frame else: res['coord_frame'] = tag.data else: res['coord_frame'] = tag.data # Vertices, normals, and triangles tag = find_tag(fid, this, FIFF.FIFF_BEM_SURF_NODES) if tag is None: raise ValueError('Vertex data not found') res['rr'] = tag.data.astype(np.float) # XXX : double because of mayavi bug if res['rr'].shape[0] != res['np']: raise ValueError('Vertex information is incorrect') tag = find_tag(fid, this, FIFF.FIFF_MNE_SOURCE_SPACE_NORMALS) if tag is None: tag = tag = find_tag(fid, this, FIFF.FIFF_BEM_SURF_NORMALS) if tag is None: res['nn'] = list() else: res['nn'] = tag.data if res['nn'].shape[0] != res['np']: raise ValueError('Vertex normal information is incorrect') tag = find_tag(fid, this, FIFF.FIFF_BEM_SURF_TRIANGLES) if tag is None: raise ValueError('Triangulation not found') res['tris'] = tag.data - 1 # index start at 0 in Python if res['tris'].shape[0] != res['ntri']: raise ValueError('Triangulation information is incorrect') return res @verbose def read_bem_solution(fname, verbose=None): """Read the BEM solution from a file Parameters ---------- fname : string The file containing the BEM solution. verbose : bool, str, int, or None If not None, override default verbose level (see mne.verbose). Returns ------- bem : instance of ConductorModel The BEM solution. See Also -------- write_bem_solution, read_bem_surfaces, write_bem_surfaces, make_bem_solution """ # mirrors fwd_bem_load_surfaces from fwd_bem_model.c logger.info('Loading surfaces...') bem_surfs = read_bem_surfaces(fname, patch_stats=True, verbose=False) if len(bem_surfs) == 3: logger.info('Three-layer model surfaces loaded.') needed = np.array([FIFF.FIFFV_BEM_SURF_ID_HEAD, FIFF.FIFFV_BEM_SURF_ID_SKULL, FIFF.FIFFV_BEM_SURF_ID_BRAIN]) if not all(x['id'] in needed for x in bem_surfs): raise RuntimeError('Could not find necessary BEM surfaces') # reorder surfaces as necessary (shouldn't need to?) reorder = [None] * 3 for x in bem_surfs: reorder[np.where(x['id'] == needed)[0][0]] = x bem_surfs = reorder elif len(bem_surfs) == 1: if not bem_surfs[0]['id'] == FIFF.FIFFV_BEM_SURF_ID_BRAIN: raise RuntimeError('BEM Surfaces not found') logger.info('Homogeneous model surface loaded.') # convert from surfaces to solution bem = ConductorModel(is_sphere=False, surfs=bem_surfs) logger.info('\nLoading the solution matrix...\n') f, tree, _ = fiff_open(fname) with f as fid: # Find the BEM data nodes = dir_tree_find(tree, FIFF.FIFFB_BEM) if len(nodes) == 0: raise RuntimeError('No BEM data in %s' % fname) bem_node = nodes[0] # Approximation method tag = find_tag(f, bem_node, FIFF.FIFF_BEM_APPROX) if tag is None: raise RuntimeError('No BEM solution found in %s' % fname) method = tag.data[0] if method not in (FIFF.FIFFV_BEM_APPROX_CONST, FIFF.FIFFV_BEM_APPROX_LINEAR): raise RuntimeError('Cannot handle BEM approximation method : %d' % method) tag = find_tag(fid, bem_node, FIFF.FIFF_BEM_POT_SOLUTION) dims = tag.data.shape if len(dims) != 2: raise RuntimeError('Expected a two-dimensional solution matrix ' 'instead of a %d dimensional one' % dims[0]) dim = 0 for surf in bem['surfs']: if method == FIFF.FIFFV_BEM_APPROX_LINEAR: dim += surf['np'] else: # method == FIFF.FIFFV_BEM_APPROX_CONST dim += surf['ntri'] if dims[0] != dim or dims[1] != dim: raise RuntimeError('Expected a %d x %d solution matrix instead of ' 'a %d x %d one' % (dim, dim, dims[1], dims[0])) sol = tag.data nsol = dims[0] bem['solution'] = sol bem['nsol'] = nsol bem['bem_method'] = method # Gamma factors and multipliers _add_gamma_multipliers(bem) kind = { FIFF.FIFFV_BEM_APPROX_CONST: 'constant collocation', FIFF.FIFFV_BEM_APPROX_LINEAR: 'linear_collocation', }[bem['bem_method']] logger.info('Loaded %s BEM solution from %s', kind, fname) return bem def _add_gamma_multipliers(bem): """Helper to add gamma and multipliers in-place""" bem['sigma'] = np.array([surf['sigma'] for surf in bem['surfs']]) # Dirty trick for the zero conductivity outside sigma = np.r_[0.0, bem['sigma']] bem['source_mult'] = 2.0 / (sigma[1:] + sigma[:-1]) bem['field_mult'] = sigma[1:] - sigma[:-1] # make sure subsequent "zip"s work correctly assert len(bem['surfs']) == len(bem['field_mult']) bem['gamma'] = ((sigma[1:] - sigma[:-1])[np.newaxis, :] / (sigma[1:] + sigma[:-1])[:, np.newaxis]) _surf_dict = {'inner_skull': FIFF.FIFFV_BEM_SURF_ID_BRAIN, 'outer_skull': FIFF.FIFFV_BEM_SURF_ID_SKULL, 'head': FIFF.FIFFV_BEM_SURF_ID_HEAD} def _bem_find_surface(bem, id_): """Find surface from already-loaded BEM""" if isinstance(id_, string_types): name = id_ id_ = _surf_dict[id_] else: name = _bem_explain_surface(id_) idx = np.where(np.array([s['id'] for s in bem['surfs']]) == id_)[0] if len(idx) != 1: raise RuntimeError('BEM model does not have the %s triangulation' % name.replace('_', ' ')) return bem['surfs'][idx[0]] def _bem_explain_surface(id_): """Return a string corresponding to the given surface ID""" _rev_dict = dict((val, key) for key, val in _surf_dict.items()) return _rev_dict[id_] # ############################################################################ # Write def write_bem_surfaces(fname, surfs): """Write BEM surfaces to a fiff file Parameters ---------- fname : str Filename to write. surfs : dict | list of dict The surfaces, or a single surface. """ if isinstance(surfs, dict): surfs = [surfs] with start_file(fname) as fid: start_block(fid, FIFF.FIFFB_BEM) write_int(fid, FIFF.FIFF_BEM_COORD_FRAME, surfs[0]['coord_frame']) _write_bem_surfaces_block(fid, surfs) end_block(fid, FIFF.FIFFB_BEM) end_file(fid) def _write_bem_surfaces_block(fid, surfs): """Helper to actually write bem surfaces""" for surf in surfs: start_block(fid, FIFF.FIFFB_BEM_SURF) write_float(fid, FIFF.FIFF_BEM_SIGMA, surf['sigma']) write_int(fid, FIFF.FIFF_BEM_SURF_ID, surf['id']) write_int(fid, FIFF.FIFF_MNE_COORD_FRAME, surf['coord_frame']) write_int(fid, FIFF.FIFF_BEM_SURF_NNODE, surf['np']) write_int(fid, FIFF.FIFF_BEM_SURF_NTRI, surf['ntri']) write_float_matrix(fid, FIFF.FIFF_BEM_SURF_NODES, surf['rr']) # index start at 0 in Python write_int_matrix(fid, FIFF.FIFF_BEM_SURF_TRIANGLES, surf['tris'] + 1) if 'nn' in surf and surf['nn'] is not None and len(surf['nn']) > 0: write_float_matrix(fid, FIFF.FIFF_BEM_SURF_NORMALS, surf['nn']) end_block(fid, FIFF.FIFFB_BEM_SURF) def write_bem_solution(fname, bem): """Write a BEM model with solution Parameters ---------- fname : str The filename to use. bem : instance of ConductorModel The BEM model with solution to save. See Also -------- read_bem_solution """ with start_file(fname) as fid: start_block(fid, FIFF.FIFFB_BEM) # Coordinate frame (mainly for backward compatibility) write_int(fid, FIFF.FIFF_BEM_COORD_FRAME, bem['surfs'][0]['coord_frame']) # Surfaces _write_bem_surfaces_block(fid, bem['surfs']) # The potential solution if 'solution' in bem: if bem['bem_method'] != FIFF.FWD_BEM_LINEAR_COLL: raise RuntimeError('Only linear collocation supported') write_int(fid, FIFF.FIFF_BEM_APPROX, FIFF.FIFFV_BEM_APPROX_LINEAR) write_float_matrix(fid, FIFF.FIFF_BEM_POT_SOLUTION, bem['solution']) end_block(fid, FIFF.FIFFB_BEM) end_file(fid) # ############################################################################# # Create 3-Layers BEM model from Flash MRI images def _prepare_env(subject, subjects_dir, requires_freesurfer, requires_mne): """Helper to prepare an env object for subprocess calls""" env = os.environ.copy() if requires_freesurfer and not os.environ.get('FREESURFER_HOME'): raise RuntimeError('I cannot find freesurfer. The FREESURFER_HOME ' 'environment variable is not set.') if requires_mne and not os.environ.get('MNE_ROOT'): raise RuntimeError('I cannot find the MNE command line tools. The ' 'MNE_ROOT environment variable is not set.') if not isinstance(subject, string_types): raise TypeError('The subject argument must be set') subjects_dir = get_subjects_dir(subjects_dir, raise_error=True) if not op.isdir(subjects_dir): raise RuntimeError('Could not find the MRI data directory "%s"' % subjects_dir) subject_dir = op.join(subjects_dir, subject) if not op.isdir(subject_dir): raise RuntimeError('Could not find the subject data directory "%s"' % (subject_dir,)) env['SUBJECT'] = subject env['SUBJECTS_DIR'] = subjects_dir mri_dir = op.join(subject_dir, 'mri') bem_dir = op.join(subject_dir, 'bem') return env, mri_dir, bem_dir @verbose def convert_flash_mris(subject, flash30=True, convert=True, unwarp=False, subjects_dir=None, verbose=None): """Convert DICOM files for use with make_flash_bem Parameters ---------- subject : str Subject name. flash30 : bool Use 30-degree flip angle data. convert : bool Assume that the Flash MRI images have already been converted to mgz files. unwarp : bool Run grad_unwarp with -unwarp option on each of the converted data sets. It requires FreeSurfer's MATLAB toolbox to be properly installed. subjects_dir : string, or None Path to SUBJECTS_DIR if it is not set in the environment. verbose : bool, str, int, or None If not None, override default verbose level (see mne.verbose). Notes ----- Before running this script do the following: (unless convert=False is specified) 1. Copy all of your FLASH images in a single directory <source> and create a directory <dest> to hold the output of mne_organize_dicom 2. cd to <dest> and run $ mne_organize_dicom <source> to create an appropriate directory structure 3. Create symbolic links to make flash05 and flash30 point to the appropriate series: $ ln -s <FLASH 5 series dir> flash05 $ ln -s <FLASH 30 series dir> flash30 Some partition formats (e.g. FAT32) do not support symbolic links. In this case, copy the file to the appropriate series: $ cp <FLASH 5 series dir> flash05 $ cp <FLASH 30 series dir> flash30 4. cd to the directory where flash05 and flash30 links are 5. Set SUBJECTS_DIR and SUBJECT environment variables appropriately 6. Run this script This function assumes that the Freesurfer segmentation of the subject has been completed. In particular, the T1.mgz and brain.mgz MRI volumes should be, as usual, in the subject's mri directory. """ env, mri_dir = _prepare_env(subject, subjects_dir, requires_freesurfer=True, requires_mne=False)[:2] curdir = os.getcwd() # Step 1a : Data conversion to mgz format if not op.exists(op.join(mri_dir, 'flash', 'parameter_maps')): os.makedirs(op.join(mri_dir, 'flash', 'parameter_maps')) echos_done = 0 if convert: logger.info("\n---- Converting Flash images ----") echos = ['001', '002', '003', '004', '005', '006', '007', '008'] if flash30: flashes = ['05'] else: flashes = ['05', '30'] # missing = False for flash in flashes: for echo in echos: if not op.isdir(op.join('flash' + flash, echo)): missing = True if missing: echos = ['002', '003', '004', '005', '006', '007', '008', '009'] for flash in flashes: for echo in echos: if not op.isdir(op.join('flash' + flash, echo)): raise RuntimeError("Directory %s is missing." % op.join('flash' + flash, echo)) # for flash in flashes: for echo in echos: if not op.isdir(op.join('flash' + flash, echo)): raise RuntimeError("Directory %s is missing." % op.join('flash' + flash, echo)) sample_file = glob.glob(op.join('flash' + flash, echo, '*'))[0] dest_file = op.join(mri_dir, 'flash', 'mef' + flash + '_' + echo + '.mgz') # do not redo if already present if op.isfile(dest_file): logger.info("The file %s is already there") else: cmd = ['mri_convert', sample_file, dest_file] run_subprocess(cmd, env=env, stdout=sys.stdout, stderr=sys.stderr) echos_done += 1 # Step 1b : Run grad_unwarp on converted files os.chdir(op.join(mri_dir, "flash")) files = glob.glob("mef*.mgz") if unwarp: logger.info("\n---- Unwarp mgz data sets ----") for infile in files: outfile = infile.replace(".mgz", "u.mgz") cmd = ['grad_unwarp', '-i', infile, '-o', outfile, '-unwarp', 'true'] run_subprocess(cmd, env=env, stdout=sys.stdout, stderr=sys.stderr) # Clear parameter maps if some of the data were reconverted if echos_done > 0 and op.exists("parameter_maps"): shutil.rmtree("parameter_maps") logger.info("\nParameter maps directory cleared") if not op.exists("parameter_maps"): os.makedirs("parameter_maps") # Step 2 : Create the parameter maps if flash30: logger.info("\n---- Creating the parameter maps ----") if unwarp: files = glob.glob("mef05*u.mgz") if len(os.listdir('parameter_maps')) == 0: cmd = ['mri_ms_fitparms'] + files + ['parameter_maps'] run_subprocess(cmd, env=env, stdout=sys.stdout, stderr=sys.stderr) else: logger.info("Parameter maps were already computed") # Step 3 : Synthesize the flash 5 images logger.info("\n---- Synthesizing flash 5 images ----") os.chdir('parameter_maps') if not op.exists('flash5.mgz'): cmd = ['mri_synthesize', '20 5 5', 'T1.mgz', 'PD.mgz', 'flash5.mgz'] run_subprocess(cmd, env=env, stdout=sys.stdout, stderr=sys.stderr) os.remove('flash5_reg.mgz') else: logger.info("Synthesized flash 5 volume is already there") else: logger.info("\n---- Averaging flash5 echoes ----") os.chdir('parameter_maps') if unwarp: files = glob.glob("mef05*u.mgz") else: files = glob.glob("mef05*.mgz") cmd = ['mri_average', '-noconform', files, 'flash5.mgz'] run_subprocess(cmd, env=env, stdout=sys.stdout) if op.exists('flash5_reg.mgz'): os.remove('flash5_reg.mgz') # Go back to initial directory os.chdir(curdir) @verbose def make_flash_bem(subject, overwrite=False, show=True, subjects_dir=None, verbose=None): """Create 3-Layer BEM model from prepared flash MRI images Parameters ----------- subject : str Subject name. overwrite : bool Write over existing .surf files in bem folder. show : bool Show surfaces to visually inspect all three BEM surfaces (recommended). subjects_dir : string, or None Path to SUBJECTS_DIR if it is not set in the environment. verbose : bool, str, int, or None If not None, override default verbose level (see mne.verbose). Notes ----- This program assumes that FreeSurfer and MNE are installed and sourced properly. This function extracts the BEM surfaces (outer skull, inner skull, and outer skin) from multiecho FLASH MRI data with spin angles of 5 and 30 degrees, in mgz format. This function assumes that the flash images are available in the folder mri/bem/flash within the freesurfer subject reconstruction. See Also -------- convert_flash_mris """ from .viz.misc import plot_bem env, mri_dir, bem_dir = _prepare_env(subject, subjects_dir, requires_freesurfer=True, requires_mne=True) curdir = os.getcwd() subjects_dir = env['SUBJECTS_DIR'] logger.info('\nProcessing the flash MRI data to produce BEM meshes with ' 'the following parameters:\n' 'SUBJECTS_DIR = %s\n' 'SUBJECT = %s\n' 'Result dir = %s\n' % (subjects_dir, subject, op.join(bem_dir, 'flash'))) # Step 4 : Register with MPRAGE logger.info("\n---- Registering flash 5 with MPRAGE ----") if not op.exists('flash5_reg.mgz'): if op.exists(op.join(mri_dir, 'T1.mgz')): ref_volume = op.join(mri_dir, 'T1.mgz') else: ref_volume = op.join(mri_dir, 'T1') cmd = ['fsl_rigid_register', '-r', ref_volume, '-i', 'flash5.mgz', '-o', 'flash5_reg.mgz'] run_subprocess(cmd, env=env, stdout=sys.stdout, stderr=sys.stderr) else: logger.info("Registered flash 5 image is already there") # Step 5a : Convert flash5 into COR logger.info("\n---- Converting flash5 volume into COR format ----") shutil.rmtree(op.join(mri_dir, 'flash5'), ignore_errors=True) os.makedirs(op.join(mri_dir, 'flash5')) cmd = ['mri_convert', 'flash5_reg.mgz', op.join(mri_dir, 'flash5')] run_subprocess(cmd, env=env, stdout=sys.stdout, stderr=sys.stderr) # Step 5b and c : Convert the mgz volumes into COR os.chdir(mri_dir) convert_T1 = False if not op.isdir('T1') or len(glob.glob(op.join('T1', 'COR*'))) == 0: convert_T1 = True convert_brain = False if not op.isdir('brain') or len(glob.glob(op.join('brain', 'COR*'))) == 0: convert_brain = True logger.info("\n---- Converting T1 volume into COR format ----") if convert_T1: if not op.isfile('T1.mgz'): raise RuntimeError("Both T1 mgz and T1 COR volumes missing.") os.makedirs('T1') cmd = ['mri_convert', 'T1.mgz', 'T1'] run_subprocess(cmd, env=env, stdout=sys.stdout, stderr=sys.stderr) else: logger.info("T1 volume is already in COR format") logger.info("\n---- Converting brain volume into COR format ----") if convert_brain: if not op.isfile('brain.mgz'): raise RuntimeError("Both brain mgz and brain COR volumes missing.") os.makedirs('brain') cmd = ['mri_convert', 'brain.mgz', 'brain'] run_subprocess(cmd, env=env, stdout=sys.stdout, stderr=sys.stderr) else: logger.info("Brain volume is already in COR format") # Finally ready to go logger.info("\n---- Creating the BEM surfaces ----") cmd = ['mri_make_bem_surfaces', subject] run_subprocess(cmd, env=env, stdout=sys.stdout, stderr=sys.stderr) logger.info("\n---- Converting the tri files into surf files ----") os.chdir(bem_dir) if not op.exists('flash'): os.makedirs('flash') os.chdir('flash') surfs = ['inner_skull', 'outer_skull', 'outer_skin'] for surf in surfs: shutil.move(op.join(bem_dir, surf + '.tri'), surf + '.tri') cmd = ['mne_convert_surface', '--tri', surf + '.tri', '--surfout', surf + '.surf', '--swap', '--mghmri', op.join(subjects_dir, subject, 'mri', 'flash', 'parameter_maps', 'flash5_reg.mgz')] run_subprocess(cmd, env=env, stdout=sys.stdout, stderr=sys.stderr) # Cleanup section logger.info("\n---- Cleaning up ----") os.chdir(bem_dir) os.remove('inner_skull_tmp.tri') os.chdir(mri_dir) if convert_T1: shutil.rmtree('T1') logger.info("Deleted the T1 COR volume") if convert_brain: shutil.rmtree('brain') logger.info("Deleted the brain COR volume") shutil.rmtree('flash5') logger.info("Deleted the flash5 COR volume") # Create symbolic links to the .surf files in the bem folder logger.info("\n---- Creating symbolic links ----") os.chdir(bem_dir) for surf in surfs: surf = surf + '.surf' if not overwrite and op.exists(surf): skip_symlink = True else: if op.exists(surf): os.remove(surf) _symlink(op.join('flash', surf), op.join(surf)) skip_symlink = False if skip_symlink: logger.info("Unable to create all symbolic links to .surf files " "in bem folder. Use --overwrite option to recreate them.") dest = op.join(bem_dir, 'flash') else: logger.info("Symbolic links to .surf files created in bem folder") dest = bem_dir logger.info("\nThank you for waiting.\nThe BEM triangulations for this " "subject are now available at:\n%s.\nWe hope the BEM meshes " "created will facilitate your MEG and EEG data analyses." % dest) # Show computed BEM surfaces if show: plot_bem(subject=subject, subjects_dir=subjects_dir, orientation='coronal', slices=None, show=True) # Go back to initial directory os.chdir(curdir) def _symlink(src, dest): try: os.symlink(src, dest) except OSError: warn('Could not create symbolic link %s. Check that your partition ' 'handles symbolic links. The file will be copied instead.' % dest) shutil.copy(src, dest)
[]
[]
[ "MNE_ROOT", "FREESURFER_HOME" ]
[]
["MNE_ROOT", "FREESURFER_HOME"]
python
2
0
iterative/resource_task.go
package iterative import ( "context" "errors" "fmt" "os" "strings" "time" "github.com/aohorodnyk/uid" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "terraform-provider-iterative/iterative/utils" "terraform-provider-iterative/task" "terraform-provider-iterative/task/common" ) var ( logTpl = "%s may take several minutes (consider increasing `timeout` https://registry.terraform.io/providers/iterative/iterative/latest/docs/resources/task#timeout). Please wait." ) func resourceTask() *schema.Resource { return &schema.Resource{ CreateContext: resourceTaskCreate, DeleteContext: resourceTaskDelete, ReadContext: resourceTaskRead, UpdateContext: resourceTaskRead, Schema: map[string]*schema.Schema{ "name": { Type: schema.TypeString, ForceNew: true, Optional: true, }, "cloud": { Type: schema.TypeString, ForceNew: true, Required: true, }, "region": { Type: schema.TypeString, ForceNew: true, Optional: true, Default: "us-west", }, "machine": { Type: schema.TypeString, ForceNew: true, Optional: true, Default: "m", }, "disk_size": { Type: schema.TypeInt, ForceNew: true, Optional: true, Default: 30, }, "spot": { Type: schema.TypeFloat, ForceNew: true, Optional: true, Default: -1, }, "image": { Type: schema.TypeString, ForceNew: true, Optional: true, Default: "ubuntu", }, "ssh_public_key": { Type: schema.TypeString, Computed: true, Sensitive: true, }, "ssh_private_key": { Type: schema.TypeString, Computed: true, Sensitive: true, }, "addresses": { Type: schema.TypeList, Computed: true, Elem: &schema.Schema{ Type: schema.TypeString, }, }, "status": { Type: schema.TypeMap, Computed: true, Elem: &schema.Schema{ Type: schema.TypeInt, }, }, "events": { Type: schema.TypeList, Computed: true, Elem: &schema.Schema{ Type: schema.TypeString, }, }, "logs": { Type: schema.TypeList, Computed: true, Elem: &schema.Schema{ Type: schema.TypeString, }, }, "script": { Type: schema.TypeString, ForceNew: true, Required: true, }, "storage": { Optional: true, Type: schema.TypeSet, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "workdir": { Type: schema.TypeString, ForceNew: true, Optional: true, Default: "", }, "output": { Type: schema.TypeString, ForceNew: false, Optional: true, Default: "", }, }, }, }, "parallelism": { Type: schema.TypeInt, ForceNew: true, Optional: true, Default: 1, }, "environment": { Type: schema.TypeMap, ForceNew: true, Optional: true, Elem: &schema.Schema{ Type: schema.TypeString, }, }, "timeout": { Type: schema.TypeInt, ForceNew: true, Optional: true, Default: 24 * time.Hour / time.Second, }, }, Timeouts: &schema.ResourceTimeout{ Create: schema.DefaultTimeout(15 * time.Minute), Read: schema.DefaultTimeout(3 * time.Minute), Update: schema.DefaultTimeout(3 * time.Minute), Delete: schema.DefaultTimeout(15 * time.Minute), }, } } func resourceTaskCreate(ctx context.Context, d *schema.ResourceData, m interface{}) (diags diag.Diagnostics) { logger := utils.TpiLogger(d) logger.Info(fmt.Sprintf(logTpl, "Creation")) spot := d.Get("spot").(float64) if spot > 0 { logger.Warn(fmt.Sprintf("Setting a maximum price `spot=%f` USD/h. Consider using auto-pricing (`spot=0`) instead.", spot)) } task, err := resourceTaskBuild(ctx, d, m) if err != nil { return diagnostic(diags, err, diag.Error) } d.SetId(task.GetIdentifier(ctx).Long()) if err := task.Create(ctx); err != nil { diags = diagnostic(diags, err, diag.Error) if err := task.Delete(ctx); err != nil { diags = diagnostic(diags, err, diag.Error) } else { diags = diagnostic(diags, errors.New("failed to create"), diag.Error) d.SetId("") } } return } func resourceTaskRead(ctx context.Context, d *schema.ResourceData, m interface{}) (diags diag.Diagnostics) { task, err := resourceTaskBuild(ctx, d, m) if err != nil { return diagnostic(diags, err, diag.Warning) } if err := task.Read(ctx); err != nil { return diagnostic(diags, err, diag.Warning) } keyPair, err := task.GetKeyPair(ctx) if err != nil { return diagnostic(diags, err, diag.Warning) } publicKey, err := keyPair.PublicString() if err != nil { return diagnostic(diags, err, diag.Warning) } d.Set("ssh_public_key", publicKey) privateKey, err := keyPair.PrivateString() if err != nil { return diagnostic(diags, err, diag.Warning) } d.Set("ssh_private_key", privateKey) var addresses []string for _, address := range task.GetAddresses(ctx) { addresses = append(addresses, address.String()) } d.Set("addresses", addresses) var events []string for _, event := range task.Events(ctx) { events = append(events, fmt.Sprintf( "%s: %s\n%s", event.Time.Format("2006-01-02 15:04:05"), event.Code, strings.Join(event.Description, "\n"), )) } d.Set("events", events) status, err := task.Status(ctx) if err != nil { return diagnostic(diags, err, diag.Warning) } d.Set("status", status) logs, err := task.Logs(ctx) if err != nil { return diagnostic(diags, err, diag.Warning) } d.Set("logs", logs) d.SetId(task.GetIdentifier(ctx).Long()) logger := utils.TpiLogger(d) logger.Info("instance") logger.Info("logs") logger.Info("status") return diags } func resourceTaskDelete(ctx context.Context, d *schema.ResourceData, m interface{}) (diags diag.Diagnostics) { logger := utils.TpiLogger(d) logger.Info(fmt.Sprintf(logTpl, "Destruction")) task, err := resourceTaskBuild(ctx, d, m) if err != nil { return diagnostic(diags, err, diag.Error) } if err := task.Delete(ctx); err != nil { return diagnostic(diags, err, diag.Error) } return } func resourceTaskBuild(ctx context.Context, d *schema.ResourceData, m interface{}) (task.Task, error) { v := make(map[string]*string) for name, value := range d.Get("environment").(map[string]interface{}) { v[name] = nil if contents := value.(string); contents != "" { v[name] = &contents } } val := "true" v["TPI_TASK"] = &val v["CI"] = nil v["CI_*"] = nil v["GITHUB_*"] = nil v["BITBUCKET_*"] = nil v["CML_*"] = nil v["REPO_TOKEN"] = nil c := common.Cloud{ Provider: common.Provider(d.Get("cloud").(string)), Region: common.Region(d.Get("region").(string)), Timeouts: common.Timeouts{ Create: d.Timeout(schema.TimeoutCreate), Read: d.Timeout(schema.TimeoutRead), Update: d.Timeout(schema.TimeoutUpdate), Delete: d.Timeout(schema.TimeoutDelete), }, } directory := "" directory_out := "" if d.Get("storage").(*schema.Set).Len() > 0 { storage := d.Get("storage").(*schema.Set).List()[0].(map[string]interface{}) directory = storage["workdir"].(string) directory_out = storage["output"].(string) } t := common.Task{ Size: common.Size{ Machine: d.Get("machine").(string), Storage: d.Get("disk_size").(int), }, Environment: common.Environment{ Image: d.Get("image").(string), Script: d.Get("script").(string), Variables: v, Directory: directory, DirectoryOut: directory_out, Timeout: time.Duration(d.Get("timeout").(int)) * time.Second, }, Firewall: common.Firewall{ Ingress: common.FirewallRule{ Ports: &[]uint16{22, 80}, // FIXME: just for testing Jupyter }, // Egress is open on every port }, Spot: common.Spot(d.Get("spot").(float64)), Parallelism: uint16(d.Get("parallelism").(int)), } name := d.Id() if name == "" { if identifier := d.Get("name").(string); identifier != "" { name = identifier } else if identifier := os.Getenv("GITHUB_RUN_ID"); identifier != "" { name = identifier } else if identifier := os.Getenv("CI_PIPELINE_ID"); identifier != "" { name = identifier } else if identifier := os.Getenv("BITBUCKET_STEP_TRIGGERER_UUID"); identifier != "" { name = identifier } else { name = uid.NewProvider36Size(8).MustGenerate().String() } } return task.New(ctx, c, common.Identifier(name), t) } func diagnostic(diags diag.Diagnostics, err error, severity diag.Severity) diag.Diagnostics { return append(diags, diag.Diagnostic{ Severity: severity, Summary: err.Error(), }) }
[ "\"GITHUB_RUN_ID\"", "\"CI_PIPELINE_ID\"", "\"BITBUCKET_STEP_TRIGGERER_UUID\"" ]
[]
[ "CI_PIPELINE_ID", "GITHUB_RUN_ID", "BITBUCKET_STEP_TRIGGERER_UUID" ]
[]
["CI_PIPELINE_ID", "GITHUB_RUN_ID", "BITBUCKET_STEP_TRIGGERER_UUID"]
go
3
0
ml-agents/mlagents/trainers/ppo/trainer.py
# # Unity ML-Agents Toolkit # ## ML-Agent Learning (PPO) # Contains an implementation of PPO as described in: https://arxiv.org/abs/1707.06347 from collections import defaultdict from typing import cast import numpy as np from mlagents_envs.logging_util import get_logger from mlagents_envs.base_env import BehaviorSpec from mlagents.trainers.policy.nn_policy import NNPolicy from mlagents.trainers.trainer.rl_trainer import RLTrainer from mlagents.trainers.policy.tf_policy import TFPolicy from mlagents.trainers.ppo.optimizer import PPOOptimizer from mlagents.trainers.trajectory import Trajectory from mlagents.trainers.behavior_id_utils import BehaviorIdentifiers from mlagents.trainers.settings import TrainerSettings, PPOSettings logger = get_logger(__name__) class PPOTrainer(RLTrainer): """The PPOTrainer is an implementation of the PPO algorithm.""" def __init__( self, brain_name: str, reward_buff_cap: int, trainer_settings: TrainerSettings, training: bool, load: bool, seed: int, artifact_path: str, ): """ Responsible for collecting experiences and training PPO model. :param brain_name: The name of the brain associated with trainer config :param reward_buff_cap: Max reward history to track in the reward buffer :param trainer_settings: The parameters for the trainer. :param training: Whether the trainer is set for training. :param load: Whether the model should be loaded. :param seed: The seed the model will be initialized with :param artifact_path: The directory within which to store artifacts from this trainer. """ super(PPOTrainer, self).__init__( brain_name, trainer_settings, training, artifact_path, reward_buff_cap ) self.hyperparameters: PPOSettings = cast( PPOSettings, self.trainer_settings.hyperparameters ) self.load = load self.seed = seed self.policy: NNPolicy = None # type: ignore def _process_trajectory(self, trajectory: Trajectory) -> None: """ Takes a trajectory and processes it, putting it into the update buffer. Processing involves calculating value and advantage targets for model updating step. :param trajectory: The Trajectory tuple containing the steps to be processed. """ super()._process_trajectory(trajectory) agent_id = trajectory.agent_id # All the agents should have the same ID agent_buffer_trajectory = trajectory.to_agentbuffer() # Update the normalization if self.is_training: self.policy.update_normalization(agent_buffer_trajectory["vector_obs"]) # Get all value estimates value_estimates, value_next = self.optimizer.get_trajectory_value_estimates( agent_buffer_trajectory, trajectory.next_obs, trajectory.done_reached and not trajectory.interrupted, ) for name, v in value_estimates.items(): agent_buffer_trajectory["{}_value_estimates".format(name)].extend(v) self._stats_reporter.add_stat( self.optimizer.reward_signals[name].value_name, np.mean(v) ) # Evaluate all reward functions self.collected_rewards["environment"][agent_id] += np.sum( agent_buffer_trajectory["environment_rewards"] ) for name, reward_signal in self.optimizer.reward_signals.items(): evaluate_result = reward_signal.evaluate_batch( agent_buffer_trajectory ).scaled_reward agent_buffer_trajectory["{}_rewards".format(name)].extend(evaluate_result) # Report the reward signals self.collected_rewards[name][agent_id] += np.sum(evaluate_result) # Compute GAE and returns tmp_advantages = [] tmp_returns = [] for name in self.optimizer.reward_signals: bootstrap_value = value_next[name] local_rewards = agent_buffer_trajectory[ "{}_rewards".format(name) ].get_batch() local_value_estimates = agent_buffer_trajectory[ "{}_value_estimates".format(name) ].get_batch() local_advantage = get_gae( rewards=local_rewards, value_estimates=local_value_estimates, value_next=bootstrap_value, gamma=self.optimizer.reward_signals[name].gamma, lambd=self.hyperparameters.lambd, ) local_return = local_advantage + local_value_estimates # This is later use as target for the different value estimates agent_buffer_trajectory["{}_returns".format(name)].set(local_return) agent_buffer_trajectory["{}_advantage".format(name)].set(local_advantage) tmp_advantages.append(local_advantage) tmp_returns.append(local_return) # Get global advantages global_advantages = list( np.mean(np.array(tmp_advantages, dtype=np.float32), axis=0) ) global_returns = list(np.mean(np.array(tmp_returns, dtype=np.float32), axis=0)) agent_buffer_trajectory["advantages"].set(global_advantages) agent_buffer_trajectory["discounted_returns"].set(global_returns) # Append to update buffer agent_buffer_trajectory.resequence_and_append( self.update_buffer, training_length=self.policy.sequence_length ) # If this was a terminal trajectory, append stats and reset reward collection if trajectory.done_reached: self._update_end_episode_stats(agent_id, self.optimizer) def _is_ready_update(self): """ Returns whether or not the trainer has enough elements to run update model :return: A boolean corresponding to whether or not update_model() can be run """ size_of_buffer = self.update_buffer.num_experiences return size_of_buffer > self.hyperparameters.buffer_size def _update_policy(self): """ Uses demonstration_buffer to update the policy. The reward signal generators must be updated in this method at their own pace. """ buffer_length = self.update_buffer.num_experiences self.cumulative_returns_since_policy_update.clear() # Make sure batch_size is a multiple of sequence length. During training, we # will need to reshape the data into a batch_size x sequence_length tensor. batch_size = ( self.hyperparameters.batch_size - self.hyperparameters.batch_size % self.policy.sequence_length ) # Make sure there is at least one sequence batch_size = max(batch_size, self.policy.sequence_length) n_sequences = max( int(self.hyperparameters.batch_size / self.policy.sequence_length), 1 ) advantages = self.update_buffer["advantages"].get_batch() self.update_buffer["advantages"].set( (advantages - advantages.mean()) / (advantages.std() + 1e-10) ) num_epoch = self.hyperparameters.num_epoch batch_update_stats = defaultdict(list) for _ in range(num_epoch): self.update_buffer.shuffle(sequence_length=self.policy.sequence_length) buffer = self.update_buffer max_num_batch = buffer_length // batch_size for i in range(0, max_num_batch * batch_size, batch_size): update_stats = self.optimizer.update( buffer.make_mini_batch(i, i + batch_size), n_sequences ) for stat_name, value in update_stats.items(): batch_update_stats[stat_name].append(value) for stat, stat_list in batch_update_stats.items(): self._stats_reporter.add_stat(stat, np.mean(stat_list)) if self.optimizer.bc_module: update_stats = self.optimizer.bc_module.update() for stat, val in update_stats.items(): self._stats_reporter.add_stat(stat, val) self._clear_update_buffer() return True def create_policy( self, parsed_behavior_id: BehaviorIdentifiers, behavior_spec: BehaviorSpec ) -> TFPolicy: """ Creates a PPO policy to trainers list of policies. :param behavior_spec: specifications for policy construction :return policy """ policy = NNPolicy( self.seed, behavior_spec, self.trainer_settings, self.is_training, self.artifact_path, self.load, condition_sigma_on_obs=False, # Faster training for PPO create_tf_graph=False, # We will create the TF graph in the Optimizer ) return policy def add_policy( self, parsed_behavior_id: BehaviorIdentifiers, policy: TFPolicy ) -> None: """ Adds policy to trainer. :param parsed_behavior_id: Behavior identifiers that the policy should belong to. :param policy: Policy to associate with name_behavior_id. """ if self.policy: logger.warning( "Your environment contains multiple teams, but {} doesn't support adversarial games. Enable self-play to \ train adversarial games.".format( self.__class__.__name__ ) ) if not isinstance(policy, NNPolicy): raise RuntimeError("Non-NNPolicy passed to PPOTrainer.add_policy()") self.policy = policy self.policies[parsed_behavior_id.behavior_id] = policy self.optimizer = PPOOptimizer(self.policy, self.trainer_settings) for _reward_signal in self.optimizer.reward_signals.keys(): self.collected_rewards[_reward_signal] = defaultdict(lambda: 0) # Needed to resume loads properly self.step = policy.get_current_step() def get_policy(self, name_behavior_id: str) -> TFPolicy: """ Gets policy from trainer associated with name_behavior_id :param name_behavior_id: full identifier of policy """ return self.policy def discount_rewards(r, gamma=0.99, value_next=0.0): """ Computes discounted sum of future rewards for use in updating value estimate. :param r: List of rewards. :param gamma: Discount factor. :param value_next: T+1 value estimate for returns calculation. :return: discounted sum of future rewards as list. """ discounted_r = np.zeros_like(r) running_add = value_next for t in reversed(range(0, r.size)): running_add = running_add * gamma + r[t] discounted_r[t] = running_add return discounted_r def get_gae(rewards, value_estimates, value_next=0.0, gamma=0.99, lambd=0.95): """ Computes generalized advantage estimate for use in updating policy. :param rewards: list of rewards for time-steps t to T. :param value_next: Value estimate for time-step T+1. :param value_estimates: list of value estimates for time-steps t to T. :param gamma: Discount factor. :param lambd: GAE weighing factor. :return: list of advantage estimates for time-steps t to T. """ value_estimates = np.append(value_estimates, value_next) delta_t = rewards + gamma * value_estimates[1:] - value_estimates[:-1] advantage = discount_rewards(r=delta_t, gamma=gamma * lambd) return advantage
[]
[]
[]
[]
[]
python
null
null
null
manage.py
"""Django's command-line utility for administrative tasks.""" import os import sys def main(): """Run administrative tasks.""" os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'translate.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) if __name__ == '__main__': main()
[]
[]
[]
[]
[]
python
0
0
app/app/settings.py
""" Django settings for app project. Generated by 'django-admin startproject' using Django 2.1.15. For more information on this file, see https://docs.djangoproject.com/en/2.1/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.1/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '+4+^#4d34ma2jmf+dj=fh%+m%9q!xti9e0&4#@ifk53^*23$o&' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', 'rest_framework.authtoken', 'core', 'user', 'recipe', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'app.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'app.wsgi.application' # Database # https://docs.djangoproject.com/en/2.1/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'HOST': os.environ.get('DB_HOST'), 'NAME': os.environ.get('DB_NAME'), 'USER': os.environ.get('DB_USER'), 'PASSWORD': os.environ.get('DB_PASS'), } } PASSWORD_HASHERS = [ 'django.contrib.auth.hashers.PBKDF2PasswordHasher', 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher', 'django.contrib.auth.hashers.Argon2PasswordHasher', 'django.contrib.auth.hashers.BCryptSHA256PasswordHasher', ] # Password validation # https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.1/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.1/howto/static-files/ STATIC_URL = '/static/' MEDIA_URL = '/media/' MEDIA_ROOT = '/vol/web/media' STATIC_ROOT = '/vol/web/static' AUTH_USER_MODEL = 'core.User'
[]
[]
[ "DB_PASS", "DB_USER", "DB_NAME", "DB_HOST" ]
[]
["DB_PASS", "DB_USER", "DB_NAME", "DB_HOST"]
python
4
0
clients/estafetteciapi/client_test.go
package estafetteciapi import ( "context" "os" "testing" "github.com/stretchr/testify/assert" ) func TestGetToken(t *testing.T) { t.Run("ReturnsToken", func(t *testing.T) { if testing.Short() { t.Skip("skipping test in short mode.") } ctx := context.Background() getBaseURL := os.Getenv("API_BASE_URL") clientID := os.Getenv("CLIENT_ID") clientSecret := os.Getenv("CLIENT_SECRET") client, err := NewClient(getBaseURL, clientID, clientSecret) assert.Nil(t, err) // act token, err := client.GetToken(ctx) assert.Nil(t, err) assert.True(t, len(token) > 0) }) }
[ "\"API_BASE_URL\"", "\"CLIENT_ID\"", "\"CLIENT_SECRET\"" ]
[]
[ "API_BASE_URL", "CLIENT_SECRET", "CLIENT_ID" ]
[]
["API_BASE_URL", "CLIENT_SECRET", "CLIENT_ID"]
go
3
0
codeanalysis/codeanalysis_test.go
package codeanalysis import ( "testing" "github.com/stvp/assert" "fmt" log "github.com/sirupsen/logrus" "os" ) var gopathDir = os.Getenv("GOPATH") var testdataPath = gopathDir + "/src/git.oschina.net/jscode/go-package-plantuml/testdata" func Test_findGoPackageNameInDirPath(t *testing.T) { assert.Equal(t, "b", findGoPackageNameInDirPath(testdataPath + "/b")) assert.Equal(t, "sub2", findGoPackageNameInDirPath(testdataPath + "/b/sub")) } func Test_InterfacesSign(t *testing.T) { config := Config{ CodeDir: testdataPath + "/a", GopathDir :gopathDir, IgnoreDirs:[]string{}, } result := AnalysisCode(config) analysisTool1, _ := result.(*analysisTool) assert.Equal(t, 1, len(analysisTool1.interfaceMetas)) interfaceMeta := analysisTool1.interfaceMetas[0] assert.Equal(t, "IA", interfaceMeta.Name) assert.Equal(t, testdataPath + "/a/a.go", interfaceMeta.FilePath) assert.Equal(t, "git.oschina.net/jscode/go-package-plantuml/testdata/a", interfaceMeta.PackagePath, "error in interfaceMeta") fmt.Println(interfaceMeta.MethodSigns) assert.Equal(t, 4, len(interfaceMeta.MethodSigns)) assert.Equal(t, "Add()", interfaceMeta.MethodSigns[0]) assert.Equal(t, "Add2(int)int", interfaceMeta.MethodSigns[1]) assert.Equal(t, "Add3(int,int,int)(int,int)", interfaceMeta.MethodSigns[2]) assert.Equal(t, "Add4(int)int", interfaceMeta.MethodSigns[3]) assert.Equal(t, 1, len(analysisTool1.structMetas)) structmeta := analysisTool1.structMetas[0] assert.Equal(t, "SA", structmeta.Name) assert.Equal(t, testdataPath + "/a/a.go", structmeta.FilePath) assert.Equal(t, "git.oschina.net/jscode/go-package-plantuml/testdata/a", structmeta.PackagePath, "error in structmeta") fmt.Println(structmeta.MethodSigns) assert.Equal(t, 4, len(structmeta.MethodSigns)) assert.Equal(t, "Add()", structmeta.MethodSigns[0]) assert.Equal(t, "Add2(int)int", structmeta.MethodSigns[1]) assert.Equal(t, "Add3(int,int,int)(int,int)", structmeta.MethodSigns[2]) assert.Equal(t, "Add4(int)int", structmeta.MethodSigns[3]) structMetas := analysisTool1.findInterfaceImpls(interfaceMeta) assert.Equal(t, 1, len(structMetas)) assert.Equal(t, "SA", structMetas[0].Name) } /** * 测试Struct的方法声明不使用指针 * 测试Struct使用路径引用; 路径引用使用别名; 标准库引用; * 测试import . 包路径 */ func Test_complex(t *testing.T) { log.SetLevel(log.WarnLevel) config := Config{ CodeDir: testdataPath + "/b", GopathDir :gopathDir, IgnoreDirs:[]string{}, } result := AnalysisCode(config) analysisTool1, _ := result.(*analysisTool) assert.Equal(t, 1, len(analysisTool1.interfaceMetas)) interfaceMeta := analysisTool1.interfaceMetas[0] assert.Equal(t, "IA", interfaceMeta.Name) assert.Equal(t, "Add(git.oschina.net/jscode/go-package-plantuml/testdata/b/sub.SubSA,sync.Locker,git.oschina.net/jscode/go-package-plantuml/testdata/b.B,git.oschina.net/jscode/go-package-plantuml/testdata/b/suba.SubSa1)", interfaceMeta.MethodSigns[0]) assert.Equal(t, 4, len(analysisTool1.structMetas)) structMetas := analysisTool1.findInterfaceImpls(interfaceMeta) assert.Equal(t, 1, len(structMetas)) assert.Equal(t, "SB", structMetas[0].Name) } func Test_uml(t *testing.T) { config := Config{ CodeDir: testdataPath + "/uml", GopathDir :gopathDir, IgnoreDirs:[]string{}, } result := AnalysisCode(config) analysisTool1, _ := result.(*analysisTool) fmt.Println(analysisTool1.UML()) assert.Equal(t, 3, len(analysisTool1.interfaceMetas)) interfaceMeta := analysisTool1.interfaceMetas[0] assert.Equal(t, "namespace git.oschina.net\\\\jscode\\\\go_package_plantuml\\\\testdata\\\\uml {\n interface IA {\n Add()\n} \n}", interfaceMeta.UML) assert.Equal(t, 3, len(analysisTool1.structMetas)) structMeta1 := analysisTool1.structMetas[0] assert.Equal(t, "namespace git.oschina.net\\\\jscode\\\\go_package_plantuml\\\\testdata\\\\uml {\n class SA {\n a int\n b sync.Mutex\n c sub2.Sub2A\n m map[string]sub2.Sub2A\n} \n}", structMeta1.UML) interfaceImpls := analysisTool1.findInterfaceImpls(interfaceMeta) assert.Equal(t, 2, len(interfaceImpls)) assert.Equal(t, "git.oschina.net\\\\jscode\\\\go_package_plantuml\\\\testdata\\\\uml.IA <|- git.oschina.net\\\\jscode\\\\go_package_plantuml\\\\testdata\\\\uml.SA\n", interfaceImpls[0].implInterfaceUML(interfaceMeta)) assert.Equal(t, 2, len(analysisTool1.dependencyRelations)) assert.Equal(t, "git.oschina.net\\\\jscode\\\\go_package_plantuml\\\\testdata\\\\uml.SA ---> git.oschina.net\\\\jscode\\\\go_package_plantuml\\\\testdata\\\\uml\\\\sub2.Sub2A : c", analysisTool1.dependencyRelations[0].uml) }
[ "\"GOPATH\"" ]
[]
[ "GOPATH" ]
[]
["GOPATH"]
go
1
0
tests/utils.py
import asyncio import contextlib import email.utils import functools import logging import os import time import unittest DATE = email.utils.formatdate(usegmt=True) class GeneratorTestCase(unittest.TestCase): def assertGeneratorRunning(self, gen): """ Check that a generator-based coroutine hasn't completed yet. """ next(gen) def assertGeneratorReturns(self, gen): """ Check that a generator-based coroutine completes and return its value. """ with self.assertRaises(StopIteration) as raised: next(gen) return raised.exception.value class AsyncioTestCase(unittest.TestCase): """ Base class for tests that sets up an isolated event loop for each test. """ def __init_subclass__(cls, **kwargs): """ Convert test coroutines to test functions. This supports asychronous tests transparently. """ super().__init_subclass__(**kwargs) for name in unittest.defaultTestLoader.getTestCaseNames(cls): test = getattr(cls, name) if asyncio.iscoroutinefunction(test): setattr(cls, name, cls.convert_async_to_sync(test)) @staticmethod def convert_async_to_sync(test): """ Convert a test coroutine to a test function. """ @functools.wraps(test) def test_func(self, *args, **kwargs): return self.loop.run_until_complete(test(self, *args, **kwargs)) return test_func def setUp(self): super().setUp() self.loop = asyncio.new_event_loop() asyncio.set_event_loop(self.loop) def tearDown(self): self.loop.close() super().tearDown() def run_loop_once(self): # Process callbacks scheduled with call_soon by appending a callback # to stop the event loop then running it until it hits that callback. self.loop.call_soon(self.loop.stop) self.loop.run_forever() @contextlib.contextmanager def assertNoLogs(self, logger="websockets", level=logging.ERROR): """ No message is logged on the given logger with at least the given level. """ with self.assertLogs(logger, level) as logs: # We want to test that no log message is emitted # but assertLogs expects at least one log message. logging.getLogger(logger).log(level, "dummy") yield level_name = logging.getLevelName(level) self.assertEqual(logs.output, [f"{level_name}:{logger}:dummy"]) def assertDeprecationWarnings(self, recorded_warnings, expected_warnings): """ Check recorded deprecation warnings match a list of expected messages. """ self.assertEqual(len(recorded_warnings), len(expected_warnings)) for recorded, expected in zip(recorded_warnings, expected_warnings): actual = recorded.message self.assertEqual(str(actual), expected) self.assertEqual(type(actual), DeprecationWarning) # Unit for timeouts. May be increased on slow machines by setting the # WEBSOCKETS_TESTS_TIMEOUT_FACTOR environment variable. MS = 0.001 * int(os.environ.get("WEBSOCKETS_TESTS_TIMEOUT_FACTOR", 1)) # asyncio's debug mode has a 10x performance penalty for this test suite. if os.environ.get("PYTHONASYNCIODEBUG"): # pragma: no cover MS *= 10 # Ensure that timeouts are larger than the clock's resolution (for Windows). MS = max(MS, 2.5 * time.get_clock_info("monotonic").resolution)
[]
[]
[ "WEBSOCKETS_TESTS_TIMEOUT_FACTOR", "PYTHONASYNCIODEBUG" ]
[]
["WEBSOCKETS_TESTS_TIMEOUT_FACTOR", "PYTHONASYNCIODEBUG"]
python
2
0
tests/server_helpers.go
// This package is a set of convenience helpers and structs to make integration testing easier package tests import ( "bytes" "encoding/json" "fmt" "io" "io/ioutil" "net/http" "net/url" "os" "regexp" "strings" "sync" "testing" "time" "github.com/influxdata/influxdb/cmd/influxd/run" "github.com/influxdata/influxdb/models" "github.com/influxdata/influxdb/services/httpd" "github.com/influxdata/influxdb/services/meta" "github.com/influxdata/influxdb/toml" ) // Server represents a test wrapper for run.Server. type Server interface { URL() string Open() error SetLogOutput(w io.Writer) Close() Closed() bool CreateDatabase(db string) (*meta.DatabaseInfo, error) CreateDatabaseAndRetentionPolicy(db string, rp *meta.RetentionPolicySpec, makeDefault bool) error CreateSubscription(database, rp, name, mode string, destinations []string) error Reset() error Query(query string) (results string, err error) QueryWithParams(query string, values url.Values) (results string, err error) Write(db, rp, body string, params url.Values) (results string, err error) MustWrite(db, rp, body string, params url.Values) string WritePoints(database, retentionPolicy string, consistencyLevel models.ConsistencyLevel, user meta.User, points []models.Point) error } // RemoteServer is a Server that is accessed remotely via the HTTP API type RemoteServer struct { *client url string } func (s *RemoteServer) URL() string { return s.url } func (s *RemoteServer) Open() error { resp, err := http.Get(s.URL() + "/ping") if err != nil { return err } body := strings.TrimSpace(string(MustReadAll(resp.Body))) if resp.StatusCode != http.StatusNoContent { return fmt.Errorf("unexpected status code: code=%d, body=%s", resp.StatusCode, body) } return nil } func (s *RemoteServer) Close() { // ignore, we can't shutdown a remote server } func (s *RemoteServer) SetLogOutput(w io.Writer) { // ignore, we can't change the logging of a remote server } func (s *RemoteServer) Closed() bool { return true } func (s *RemoteServer) CreateDatabase(db string) (*meta.DatabaseInfo, error) { stmt := fmt.Sprintf("CREATE+DATABASE+%s", db) _, err := s.HTTPPost(s.URL()+"/query?q="+stmt, nil) if err != nil { return nil, err } return &meta.DatabaseInfo{}, nil } func (s *RemoteServer) CreateDatabaseAndRetentionPolicy(db string, rp *meta.RetentionPolicySpec, makeDefault bool) error { if _, err := s.CreateDatabase(db); err != nil { return err } stmt := fmt.Sprintf("CREATE+RETENTION+POLICY+%s+ON+\"%s\"+DURATION+%s+REPLICATION+%v+SHARD+DURATION+%s", rp.Name, db, rp.Duration, *rp.ReplicaN, rp.ShardGroupDuration) if makeDefault { stmt += "+DEFAULT" } _, err := s.HTTPPost(s.URL()+"/query?q="+stmt, nil) if err != nil { return err } return nil } func (s *RemoteServer) CreateSubscription(database, rp, name, mode string, destinations []string) error { dests := make([]string, 0, len(destinations)) for _, d := range destinations { dests = append(dests, "'"+d+"'") } stmt := fmt.Sprintf("CREATE+SUBSCRIPTION+%s+ON+\"%s\".\"%s\"+DESTINATIONS+%v+%s", name, database, rp, mode, strings.Join(dests, ",")) _, err := s.HTTPPost(s.URL()+"/query?q="+stmt, nil) if err != nil { return err } return nil } func (s *RemoteServer) DropDatabase(db string) error { stmt := fmt.Sprintf("DROP+DATABASE+%s", db) _, err := s.HTTPPost(s.URL()+"/query?q="+stmt, nil) if err != nil { return err } return nil } // Reset attempts to remove all database state by dropping everything func (s *RemoteServer) Reset() error { stmt := fmt.Sprintf("SHOW+DATABASES") results, err := s.HTTPPost(s.URL()+"/query?q="+stmt, nil) if err != nil { return err } resp := &httpd.Response{} if resp.UnmarshalJSON([]byte(results)); err != nil { return err } for _, db := range resp.Results[0].Series[0].Values { if err := s.DropDatabase(fmt.Sprintf("%s", db[0])); err != nil { return err } } return nil } func (s *RemoteServer) WritePoints(database, retentionPolicy string, consistencyLevel models.ConsistencyLevel, user meta.User, points []models.Point) error { panic("WritePoints not implemented") } // NewServer returns a new instance of Server. func NewServer(c *run.Config) Server { buildInfo := &run.BuildInfo{ Version: "testServer", Commit: "testCommit", Branch: "testBranch", } // If URL exists, create a server that will run against a remote endpoint if url := os.Getenv("URL"); url != "" { s := &RemoteServer{ url: url, client: &client{ URLFn: func() string { return url }, }, } if err := s.Reset(); err != nil { panic(err.Error()) } return s } // Otherwise create a local server srv, _ := run.NewServer(c, buildInfo) s := LocalServer{ client: &client{}, Server: srv, Config: c, } s.client.URLFn = s.URL return &s } // OpenServer opens a test server. func OpenServer(c *run.Config) Server { s := NewServer(c) configureLogging(s) if err := s.Open(); err != nil { panic(err.Error()) } return s } // OpenServerWithVersion opens a test server with a specific version. func OpenServerWithVersion(c *run.Config, version string) Server { // We can't change the versino of a remote server. The test needs to // be skipped if using this func. if RemoteEnabled() { panic("OpenServerWithVersion not support with remote server") } buildInfo := &run.BuildInfo{ Version: version, Commit: "", Branch: "", } srv, _ := run.NewServer(c, buildInfo) s := LocalServer{ client: &client{}, Server: srv, Config: c, } s.client.URLFn = s.URL if err := s.Open(); err != nil { panic(err.Error()) } configureLogging(&s) return &s } // OpenDefaultServer opens a test server with a default database & retention policy. func OpenDefaultServer(c *run.Config) Server { s := OpenServer(c) if err := s.CreateDatabaseAndRetentionPolicy("db0", newRetentionPolicySpec("rp0", 1, 0), true); err != nil { panic(err) } return s } // LocalServer is a Server that is running in-process and can be accessed directly type LocalServer struct { mu sync.RWMutex *run.Server *client Config *run.Config } // Close shuts down the server and removes all temporary paths. func (s *LocalServer) Close() { s.mu.Lock() defer s.mu.Unlock() if err := s.Server.Close(); err != nil { panic(err.Error()) } if err := os.RemoveAll(s.Config.Meta.Dir); err != nil { panic(err.Error()) } if err := os.RemoveAll(s.Config.Data.Dir); err != nil { panic(err.Error()) } // Nil the server so our deadlock detector goroutine can determine if we completed writes // without timing out s.Server = nil } func (s *LocalServer) Closed() bool { s.mu.RLock() defer s.mu.RUnlock() return s.Server == nil } // URL returns the base URL for the httpd endpoint. func (s *LocalServer) URL() string { s.mu.RLock() defer s.mu.RUnlock() for _, service := range s.Services { if service, ok := service.(*httpd.Service); ok { return "http://" + service.Addr().String() } } panic("httpd server not found in services") } func (s *LocalServer) CreateDatabase(db string) (*meta.DatabaseInfo, error) { s.mu.RLock() defer s.mu.RUnlock() return s.MetaClient.CreateDatabase(db) } // CreateDatabaseAndRetentionPolicy will create the database and retention policy. func (s *LocalServer) CreateDatabaseAndRetentionPolicy(db string, rp *meta.RetentionPolicySpec, makeDefault bool) error { s.mu.RLock() defer s.mu.RUnlock() if _, err := s.MetaClient.CreateDatabase(db); err != nil { return err } else if _, err := s.MetaClient.CreateRetentionPolicy(db, rp, makeDefault); err != nil { return err } return nil } func (s *LocalServer) CreateSubscription(database, rp, name, mode string, destinations []string) error { s.mu.RLock() defer s.mu.RUnlock() return s.MetaClient.CreateSubscription(database, rp, name, mode, destinations) } func (s *LocalServer) DropDatabase(db string) error { s.mu.RLock() defer s.mu.RUnlock() return s.MetaClient.DropDatabase(db) } func (s *LocalServer) Reset() error { s.mu.RLock() defer s.mu.RUnlock() for _, db := range s.MetaClient.Databases() { if err := s.DropDatabase(db.Name); err != nil { return err } } return nil } func (s *LocalServer) WritePoints(database, retentionPolicy string, consistencyLevel models.ConsistencyLevel, user meta.User, points []models.Point) error { s.mu.RLock() defer s.mu.RUnlock() return s.PointsWriter.WritePoints(database, retentionPolicy, consistencyLevel, user, points) } // client abstract querying and writing to a Server using HTTP type client struct { URLFn func() string } func (c *client) URL() string { return c.URLFn() } // Query executes a query against the server and returns the results. func (s *client) Query(query string) (results string, err error) { return s.QueryWithParams(query, nil) } // MustQuery executes a query against the server and returns the results. func (s *client) MustQuery(query string) string { results, err := s.Query(query) if err != nil { panic(err) } return results } // Query executes a query against the server and returns the results. func (s *client) QueryWithParams(query string, values url.Values) (results string, err error) { var v url.Values if values == nil { v = url.Values{} } else { v, _ = url.ParseQuery(values.Encode()) } v.Set("q", query) return s.HTTPPost(s.URL()+"/query?"+v.Encode(), nil) } // MustQueryWithParams executes a query against the server and returns the results. func (s *client) MustQueryWithParams(query string, values url.Values) string { results, err := s.QueryWithParams(query, values) if err != nil { panic(err) } return results } // HTTPGet makes an HTTP GET request to the server and returns the response. func (s *client) HTTPGet(url string) (results string, err error) { resp, err := http.Get(url) if err != nil { return "", err } body := strings.TrimSpace(string(MustReadAll(resp.Body))) switch resp.StatusCode { case http.StatusBadRequest: if !expectPattern(".*error parsing query*.", body) { return "", fmt.Errorf("unexpected status code: code=%d, body=%s", resp.StatusCode, body) } return body, nil case http.StatusOK: return body, nil default: return "", fmt.Errorf("unexpected status code: code=%d, body=%s", resp.StatusCode, body) } } // HTTPPost makes an HTTP POST request to the server and returns the response. func (s *client) HTTPPost(url string, content []byte) (results string, err error) { buf := bytes.NewBuffer(content) resp, err := http.Post(url, "application/json", buf) if err != nil { return "", err } body := strings.TrimSpace(string(MustReadAll(resp.Body))) switch resp.StatusCode { case http.StatusBadRequest: if !expectPattern(".*error parsing query*.", body) { return "", fmt.Errorf("unexpected status code: code=%d, body=%s", resp.StatusCode, body) } return body, nil case http.StatusOK, http.StatusNoContent: return body, nil default: return "", fmt.Errorf("unexpected status code: code=%d, body=%s", resp.StatusCode, body) } } type WriteError struct { body string statusCode int } func (wr WriteError) StatusCode() int { return wr.statusCode } func (wr WriteError) Body() string { return wr.body } func (wr WriteError) Error() string { return fmt.Sprintf("invalid status code: code=%d, body=%s", wr.statusCode, wr.body) } // Write executes a write against the server and returns the results. func (s *client) Write(db, rp, body string, params url.Values) (results string, err error) { if params == nil { params = url.Values{} } if params.Get("db") == "" { params.Set("db", db) } if params.Get("rp") == "" { params.Set("rp", rp) } resp, err := http.Post(s.URL()+"/write?"+params.Encode(), "", strings.NewReader(body)) if err != nil { return "", err } else if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusNoContent { return "", WriteError{statusCode: resp.StatusCode, body: string(MustReadAll(resp.Body))} } return string(MustReadAll(resp.Body)), nil } // MustWrite executes a write to the server. Panic on error. func (s *client) MustWrite(db, rp, body string, params url.Values) string { results, err := s.Write(db, rp, body, params) if err != nil { panic(err) } return results } // NewConfig returns the default config with temporary paths. func NewConfig() *run.Config { c := run.NewConfig() c.BindAddress = "127.0.0.1:0" c.ReportingDisabled = true c.Coordinator.WriteTimeout = toml.Duration(30 * time.Second) c.Meta.Dir = MustTempFile() if !testing.Verbose() { c.Meta.LoggingEnabled = false } c.Data.Dir = MustTempFile() c.Data.WALDir = MustTempFile() indexVersion := os.Getenv("INFLUXDB_DATA_INDEX_VERSION") if indexVersion != "" { c.Data.Index = indexVersion } c.HTTPD.Enabled = true c.HTTPD.BindAddress = "127.0.0.1:0" c.HTTPD.LogEnabled = testing.Verbose() c.Monitor.StoreEnabled = false return c } func newRetentionPolicySpec(name string, rf int, duration time.Duration) *meta.RetentionPolicySpec { return &meta.RetentionPolicySpec{Name: name, ReplicaN: &rf, Duration: &duration} } func maxInt64() string { maxInt64, _ := json.Marshal(^int64(0)) return string(maxInt64) } func now() time.Time { return time.Now().UTC() } func yesterday() time.Time { return now().Add(-1 * time.Hour * 24) } func mustParseTime(layout, value string) time.Time { tm, err := time.Parse(layout, value) if err != nil { panic(err) } return tm } func mustParseLocation(tzname string) *time.Location { loc, err := time.LoadLocation(tzname) if err != nil { panic(err) } return loc } var LosAngeles = mustParseLocation("America/Los_Angeles") // MustReadAll reads r. Panic on error. func MustReadAll(r io.Reader) []byte { b, err := ioutil.ReadAll(r) if err != nil { panic(err) } return b } // MustTempFile returns a path to a temporary file. func MustTempFile() string { f, err := ioutil.TempFile("", "influxd-") if err != nil { panic(err) } f.Close() os.Remove(f.Name()) return f.Name() } func RemoteEnabled() bool { return os.Getenv("URL") != "" } func expectPattern(exp, act string) bool { re := regexp.MustCompile(exp) if !re.MatchString(act) { return false } return true } type Query struct { name string command string params url.Values exp, act string pattern bool skip bool repeat int once bool } // Execute runs the command and returns an err if it fails func (q *Query) Execute(s Server) (err error) { if q.params == nil { q.act, err = s.Query(q.command) return } q.act, err = s.QueryWithParams(q.command, q.params) return } func (q *Query) success() bool { if q.pattern { return expectPattern(q.exp, q.act) } return q.exp == q.act } func (q *Query) Error(err error) string { return fmt.Sprintf("%s: %v", q.name, err) } func (q *Query) failureMessage() string { return fmt.Sprintf("%s: unexpected results\nquery: %s\nparams: %v\nexp: %s\nactual: %s\n", q.name, q.command, q.params, q.exp, q.act) } type Write struct { db string rp string data string } func (w *Write) duplicate() *Write { return &Write{ db: w.db, rp: w.rp, data: w.data, } } type Writes []*Write func (a Writes) duplicate() Writes { writes := make(Writes, 0, len(a)) for _, w := range a { writes = append(writes, w.duplicate()) } return writes } type Tests map[string]Test type Test struct { initialized bool writes Writes params url.Values db string rp string exp string queries []*Query } func NewTest(db, rp string) Test { return Test{ db: db, rp: rp, } } func (t Test) duplicate() Test { test := Test{ initialized: t.initialized, writes: t.writes.duplicate(), db: t.db, rp: t.rp, exp: t.exp, queries: make([]*Query, len(t.queries)), } if t.params != nil { t.params = url.Values{} for k, a := range t.params { vals := make([]string, len(a)) copy(vals, a) test.params[k] = vals } } copy(test.queries, t.queries) return test } func (t *Test) addQueries(q ...*Query) { t.queries = append(t.queries, q...) } func (t *Test) database() string { if t.db != "" { return t.db } return "db0" } func (t *Test) retentionPolicy() string { if t.rp != "" { return t.rp } return "default" } func (t *Test) init(s Server) error { if len(t.writes) == 0 || t.initialized { return nil } if t.db == "" { t.db = "db0" } if t.rp == "" { t.rp = "rp0" } if err := writeTestData(s, t); err != nil { return err } t.initialized = true return nil } func writeTestData(s Server, t *Test) error { for i, w := range t.writes { if w.db == "" { w.db = t.database() } if w.rp == "" { w.rp = t.retentionPolicy() } if err := s.CreateDatabaseAndRetentionPolicy(w.db, newRetentionPolicySpec(w.rp, 1, 0), true); err != nil { return err } if res, err := s.Write(w.db, w.rp, w.data, t.params); err != nil { return fmt.Errorf("write #%d: %s", i, err) } else if t.exp != res { return fmt.Errorf("unexpected results\nexp: %s\ngot: %s\n", t.exp, res) } } return nil } func configureLogging(s Server) { // Set the logger to discard unless verbose is on if !testing.Verbose() { s.SetLogOutput(ioutil.Discard) } }
[ "\"URL\"", "\"INFLUXDB_DATA_INDEX_VERSION\"", "\"URL\"" ]
[]
[ "URL", "INFLUXDB_DATA_INDEX_VERSION" ]
[]
["URL", "INFLUXDB_DATA_INDEX_VERSION"]
go
2
0
android/config.go
// Copyright 2015 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package android // This is the primary location to write and read all configuration values and // product variables necessary for soong_build's operation. import ( "encoding/json" "errors" "fmt" "io/ioutil" "os" "path/filepath" "runtime" "strconv" "strings" "sync" "github.com/google/blueprint" "github.com/google/blueprint/bootstrap" "github.com/google/blueprint/pathtools" "github.com/google/blueprint/proptools" "android/soong/android/soongconfig" "android/soong/remoteexec" ) // Bool re-exports proptools.Bool for the android package. var Bool = proptools.Bool // String re-exports proptools.String for the android package. var String = proptools.String // StringDefault re-exports proptools.StringDefault for the android package. var StringDefault = proptools.StringDefault // FutureApiLevelInt is a placeholder constant for unreleased API levels. const FutureApiLevelInt = 10000 // FutureApiLevel represents unreleased API levels. var FutureApiLevel = ApiLevel{ value: "current", number: FutureApiLevelInt, isPreview: true, } // The product variables file name, containing product config from Kati. const productVariablesFileName = "soong.variables" // A Config object represents the entire build configuration for Android. type Config struct { *config } // BuildDir returns the build output directory for the configuration. func (c Config) BuildDir() string { return c.buildDir } func (c Config) NinjaBuildDir() string { return c.buildDir } func (c Config) DebugCompilation() bool { return false // Never compile Go code in the main build for debugging } func (c Config) SrcDir() string { return c.srcDir } // A DeviceConfig object represents the configuration for a particular device // being built. For now there will only be one of these, but in the future there // may be multiple devices being built. type DeviceConfig struct { *deviceConfig } // VendorConfig represents the configuration for vendor-specific behavior. type VendorConfig soongconfig.SoongConfig // Definition of general build configuration for soong_build. Some of these // product configuration values are read from Kati-generated soong.variables. type config struct { // Options configurable with soong.variables productVariables productVariables // Only available on configs created by TestConfig TestProductVariables *productVariables // A specialized context object for Bazel/Soong mixed builds and migration // purposes. BazelContext BazelContext ProductVariablesFileName string Targets map[OsType][]Target BuildOSTarget Target // the Target for tools run on the build machine BuildOSCommonTarget Target // the Target for common (java) tools run on the build machine AndroidCommonTarget Target // the Target for common modules for the Android device AndroidFirstDeviceTarget Target // the first Target for modules for the Android device // multilibConflicts for an ArchType is true if there is earlier configured // device architecture with the same multilib value. multilibConflicts map[ArchType]bool deviceConfig *deviceConfig srcDir string // the path of the root source directory buildDir string // the path of the build output directory moduleListFile string // the path to the file which lists blueprint files to parse. env map[string]string envLock sync.Mutex envDeps map[string]string envFrozen bool // Changes behavior based on whether Kati runs after soong_build, or if soong_build // runs standalone. katiEnabled bool captureBuild bool // true for tests, saves build parameters for each module ignoreEnvironment bool // true for tests, returns empty from all Getenv calls stopBefore bootstrap.StopBefore fs pathtools.FileSystem mockBpList string bp2buildPackageConfig Bp2BuildConfig bp2buildModuleTypeConfig map[string]bool // If testAllowNonExistentPaths is true then PathForSource and PathForModuleSrc won't error // in tests when a path doesn't exist. TestAllowNonExistentPaths bool // The list of files that when changed, must invalidate soong_build to // regenerate build.ninja. ninjaFileDepsSet sync.Map OncePer } type deviceConfig struct { config *config OncePer } type jsonConfigurable interface { SetDefaultConfig() } func loadConfig(config *config) error { return loadFromConfigFile(&config.productVariables, absolutePath(config.ProductVariablesFileName)) } // loadFromConfigFile loads and decodes configuration options from a JSON file // in the current working directory. func loadFromConfigFile(configurable jsonConfigurable, filename string) error { // Try to open the file configFileReader, err := os.Open(filename) defer configFileReader.Close() if os.IsNotExist(err) { // Need to create a file, so that blueprint & ninja don't get in // a dependency tracking loop. // Make a file-configurable-options with defaults, write it out using // a json writer. configurable.SetDefaultConfig() err = saveToConfigFile(configurable, filename) if err != nil { return err } } else if err != nil { return fmt.Errorf("config file: could not open %s: %s", filename, err.Error()) } else { // Make a decoder for it jsonDecoder := json.NewDecoder(configFileReader) err = jsonDecoder.Decode(configurable) if err != nil { return fmt.Errorf("config file: %s did not parse correctly: %s", filename, err.Error()) } } // No error return nil } // atomically writes the config file in case two copies of soong_build are running simultaneously // (for example, docs generation and ninja manifest generation) func saveToConfigFile(config jsonConfigurable, filename string) error { data, err := json.MarshalIndent(&config, "", " ") if err != nil { return fmt.Errorf("cannot marshal config data: %s", err.Error()) } f, err := ioutil.TempFile(filepath.Dir(filename), "config") if err != nil { return fmt.Errorf("cannot create empty config file %s: %s", filename, err.Error()) } defer os.Remove(f.Name()) defer f.Close() _, err = f.Write(data) if err != nil { return fmt.Errorf("default config file: %s could not be written: %s", filename, err.Error()) } _, err = f.WriteString("\n") if err != nil { return fmt.Errorf("default config file: %s could not be written: %s", filename, err.Error()) } f.Close() os.Rename(f.Name(), filename) return nil } // NullConfig returns a mostly empty Config for use by standalone tools like dexpreopt_gen that // use the android package. func NullConfig(buildDir string) Config { return Config{ config: &config{ buildDir: buildDir, fs: pathtools.OsFs, }, } } // TestConfig returns a Config object for testing. func TestConfig(buildDir string, env map[string]string, bp string, fs map[string][]byte) Config { envCopy := make(map[string]string) for k, v := range env { envCopy[k] = v } // Copy the real PATH value to the test environment, it's needed by // NonHermeticHostSystemTool() used in x86_darwin_host.go envCopy["PATH"] = os.Getenv("PATH") config := &config{ productVariables: productVariables{ DeviceName: stringPtr("test_device"), Platform_sdk_version: intPtr(30), Platform_sdk_codename: stringPtr("S"), Platform_version_active_codenames: []string{"S"}, DeviceSystemSdkVersions: []string{"14", "15"}, Platform_systemsdk_versions: []string{"29", "30"}, AAPTConfig: []string{"normal", "large", "xlarge", "hdpi", "xhdpi", "xxhdpi"}, AAPTPreferredConfig: stringPtr("xhdpi"), AAPTCharacteristics: stringPtr("nosdcard"), AAPTPrebuiltDPI: []string{"xhdpi", "xxhdpi"}, UncompressPrivAppDex: boolPtr(true), ShippingApiLevel: stringPtr("30"), }, buildDir: buildDir, captureBuild: true, env: envCopy, // Set testAllowNonExistentPaths so that test contexts don't need to specify every path // passed to PathForSource or PathForModuleSrc. TestAllowNonExistentPaths: true, BazelContext: noopBazelContext{}, } config.deviceConfig = &deviceConfig{ config: config, } config.TestProductVariables = &config.productVariables config.mockFileSystem(bp, fs) config.bp2buildModuleTypeConfig = map[string]bool{} return Config{config} } func fuchsiaTargets() map[OsType][]Target { return map[OsType][]Target{ Fuchsia: { {Fuchsia, Arch{ArchType: Arm64, ArchVariant: "", Abi: []string{"arm64-v8a"}}, NativeBridgeDisabled, "", "", false}, }, BuildOs: { {BuildOs, Arch{ArchType: X86_64}, NativeBridgeDisabled, "", "", false}, }, } } var PrepareForTestSetDeviceToFuchsia = FixtureModifyConfig(func(config Config) { config.Targets = fuchsiaTargets() }) func modifyTestConfigToSupportArchMutator(testConfig Config) { config := testConfig.config config.Targets = map[OsType][]Target{ Android: []Target{ {Android, Arch{ArchType: Arm64, ArchVariant: "armv8-a", Abi: []string{"arm64-v8a"}}, NativeBridgeDisabled, "", "", false}, {Android, Arch{ArchType: Arm, ArchVariant: "armv7-a-neon", Abi: []string{"armeabi-v7a"}}, NativeBridgeDisabled, "", "", false}, }, BuildOs: []Target{ {BuildOs, Arch{ArchType: X86_64}, NativeBridgeDisabled, "", "", false}, {BuildOs, Arch{ArchType: X86}, NativeBridgeDisabled, "", "", false}, }, } if runtime.GOOS == "darwin" { config.Targets[BuildOs] = config.Targets[BuildOs][:1] } config.BuildOSTarget = config.Targets[BuildOs][0] config.BuildOSCommonTarget = getCommonTargets(config.Targets[BuildOs])[0] config.AndroidCommonTarget = getCommonTargets(config.Targets[Android])[0] config.AndroidFirstDeviceTarget = firstTarget(config.Targets[Android], "lib64", "lib32")[0] config.TestProductVariables.DeviceArch = proptools.StringPtr("arm64") config.TestProductVariables.DeviceArchVariant = proptools.StringPtr("armv8-a") config.TestProductVariables.DeviceSecondaryArch = proptools.StringPtr("arm") config.TestProductVariables.DeviceSecondaryArchVariant = proptools.StringPtr("armv7-a-neon") } // TestArchConfig returns a Config object suitable for using for tests that // need to run the arch mutator. func TestArchConfig(buildDir string, env map[string]string, bp string, fs map[string][]byte) Config { testConfig := TestConfig(buildDir, env, bp, fs) modifyTestConfigToSupportArchMutator(testConfig) return testConfig } // ConfigForAdditionalRun is a config object which is "reset" for another // bootstrap run. Only per-run data is reset. Data which needs to persist across // multiple runs in the same program execution is carried over (such as Bazel // context or environment deps). func ConfigForAdditionalRun(c Config) (Config, error) { newConfig, err := NewConfig(c.srcDir, c.buildDir, c.moduleListFile, c.env) if err != nil { return Config{}, err } newConfig.BazelContext = c.BazelContext newConfig.envDeps = c.envDeps return newConfig, nil } // NewConfig creates a new Config object. The srcDir argument specifies the path // to the root source directory. It also loads the config file, if found. func NewConfig(srcDir, buildDir string, moduleListFile string, availableEnv map[string]string) (Config, error) { // Make a config with default options. config := &config{ ProductVariablesFileName: filepath.Join(buildDir, productVariablesFileName), env: availableEnv, srcDir: srcDir, buildDir: buildDir, multilibConflicts: make(map[ArchType]bool), moduleListFile: moduleListFile, fs: pathtools.NewOsFs(absSrcDir), } config.deviceConfig = &deviceConfig{ config: config, } // Soundness check of the build and source directories. This won't catch strange // configurations with symlinks, but at least checks the obvious case. absBuildDir, err := filepath.Abs(buildDir) if err != nil { return Config{}, err } absSrcDir, err := filepath.Abs(srcDir) if err != nil { return Config{}, err } if strings.HasPrefix(absSrcDir, absBuildDir) { return Config{}, fmt.Errorf("Build dir must not contain source directory") } // Load any configurable options from the configuration file err = loadConfig(config) if err != nil { return Config{}, err } KatiEnabledMarkerFile := filepath.Join(buildDir, ".soong.kati_enabled") if _, err := os.Stat(absolutePath(KatiEnabledMarkerFile)); err == nil { config.katiEnabled = true } // Sets up the map of target OSes to the finer grained compilation targets // that are configured from the product variables. targets, err := decodeTargetProductVariables(config) if err != nil { return Config{}, err } // Make the CommonOS OsType available for all products. targets[CommonOS] = []Target{commonTargetMap[CommonOS.Name]} var archConfig []archConfig if config.NdkAbis() { archConfig = getNdkAbisConfig() } else if config.AmlAbis() { archConfig = getAmlAbisConfig() } if archConfig != nil { androidTargets, err := decodeArchSettings(Android, archConfig) if err != nil { return Config{}, err } targets[Android] = androidTargets } multilib := make(map[string]bool) for _, target := range targets[Android] { if seen := multilib[target.Arch.ArchType.Multilib]; seen { config.multilibConflicts[target.Arch.ArchType] = true } multilib[target.Arch.ArchType.Multilib] = true } // Map of OS to compilation targets. config.Targets = targets // Compilation targets for host tools. config.BuildOSTarget = config.Targets[BuildOs][0] config.BuildOSCommonTarget = getCommonTargets(config.Targets[BuildOs])[0] // Compilation targets for Android. if len(config.Targets[Android]) > 0 { config.AndroidCommonTarget = getCommonTargets(config.Targets[Android])[0] config.AndroidFirstDeviceTarget = firstTarget(config.Targets[Android], "lib64", "lib32")[0] } if Bool(config.productVariables.GcovCoverage) && Bool(config.productVariables.ClangCoverage) { return Config{}, fmt.Errorf("GcovCoverage and ClangCoverage cannot both be set") } config.productVariables.Native_coverage = proptools.BoolPtr( Bool(config.productVariables.GcovCoverage) || Bool(config.productVariables.ClangCoverage)) config.BazelContext, err = NewBazelContext(config) config.bp2buildPackageConfig = bp2buildDefaultConfig config.bp2buildModuleTypeConfig = make(map[string]bool) return Config{config}, err } // mockFileSystem replaces all reads with accesses to the provided map of // filenames to contents stored as a byte slice. func (c *config) mockFileSystem(bp string, fs map[string][]byte) { mockFS := map[string][]byte{} if _, exists := mockFS["Android.bp"]; !exists { mockFS["Android.bp"] = []byte(bp) } for k, v := range fs { mockFS[k] = v } // no module list file specified; find every file named Blueprints or Android.bp pathsToParse := []string{} for candidate := range mockFS { base := filepath.Base(candidate) if base == "Blueprints" || base == "Android.bp" { pathsToParse = append(pathsToParse, candidate) } } if len(pathsToParse) < 1 { panic(fmt.Sprintf("No Blueprint or Android.bp files found in mock filesystem: %v\n", mockFS)) } mockFS[blueprint.MockModuleListFile] = []byte(strings.Join(pathsToParse, "\n")) c.fs = pathtools.MockFs(mockFS) c.mockBpList = blueprint.MockModuleListFile } func (c *config) StopBefore() bootstrap.StopBefore { return c.stopBefore } // SetStopBefore configures soong_build to exit earlier at a specific point. func (c *config) SetStopBefore(stopBefore bootstrap.StopBefore) { c.stopBefore = stopBefore } func (c *config) SetAllowMissingDependencies() { c.productVariables.Allow_missing_dependencies = proptools.BoolPtr(true) } var _ bootstrap.ConfigStopBefore = (*config)(nil) // BlueprintToolLocation returns the directory containing build system tools // from Blueprint, like soong_zip and merge_zips. func (c *config) BlueprintToolLocation() string { return filepath.Join(c.buildDir, "host", c.PrebuiltOS(), "bin") } var _ bootstrap.ConfigBlueprintToolLocation = (*config)(nil) func (c *config) HostToolPath(ctx PathContext, tool string) Path { return PathForOutput(ctx, "host", c.PrebuiltOS(), "bin", tool) } func (c *config) HostJNIToolPath(ctx PathContext, path string) Path { ext := ".so" if runtime.GOOS == "darwin" { ext = ".dylib" } return PathForOutput(ctx, "host", c.PrebuiltOS(), "lib64", path+ext) } func (c *config) HostJavaToolPath(ctx PathContext, path string) Path { return PathForOutput(ctx, "host", c.PrebuiltOS(), "framework", path) } // PrebuiltOS returns the name of the host OS used in prebuilts directories. func (c *config) PrebuiltOS() string { switch runtime.GOOS { case "linux": return "linux-x86" case "darwin": return "darwin-x86" default: panic("Unknown GOOS") } } // GoRoot returns the path to the root directory of the Go toolchain. func (c *config) GoRoot() string { return fmt.Sprintf("%s/prebuilts/go/%s", c.srcDir, c.PrebuiltOS()) } // PrebuiltBuildTool returns the path to a tool in the prebuilts directory containing // checked-in tools, like Kati, Ninja or Toybox, for the current host OS. func (c *config) PrebuiltBuildTool(ctx PathContext, tool string) Path { return PathForSource(ctx, "prebuilts/build-tools", c.PrebuiltOS(), "bin", tool) } // CpPreserveSymlinksFlags returns the host-specific flag for the cp(1) command // to preserve symlinks. func (c *config) CpPreserveSymlinksFlags() string { switch runtime.GOOS { case "darwin": return "-R" case "linux": return "-d" default: return "" } } func (c *config) Getenv(key string) string { var val string var exists bool c.envLock.Lock() defer c.envLock.Unlock() if c.envDeps == nil { c.envDeps = make(map[string]string) } if val, exists = c.envDeps[key]; !exists { if c.envFrozen { panic("Cannot access new environment variables after envdeps are frozen") } val, _ = c.env[key] c.envDeps[key] = val } return val } func (c *config) GetenvWithDefault(key string, defaultValue string) string { ret := c.Getenv(key) if ret == "" { return defaultValue } return ret } func (c *config) IsEnvTrue(key string) bool { value := c.Getenv(key) return value == "1" || value == "y" || value == "yes" || value == "on" || value == "true" } func (c *config) IsEnvFalse(key string) bool { value := c.Getenv(key) return value == "0" || value == "n" || value == "no" || value == "off" || value == "false" } // EnvDeps returns the environment variables this build depends on. The first // call to this function blocks future reads from the environment. func (c *config) EnvDeps() map[string]string { c.envLock.Lock() defer c.envLock.Unlock() c.envFrozen = true return c.envDeps } func (c *config) KatiEnabled() bool { return c.katiEnabled } func (c *config) BuildId() string { return String(c.productVariables.BuildId) } // BuildNumberFile returns the path to a text file containing metadata // representing the current build's number. // // Rules that want to reference the build number should read from this file // without depending on it. They will run whenever their other dependencies // require them to run and get the current build number. This ensures they don't // rebuild on every incremental build when the build number changes. func (c *config) BuildNumberFile(ctx PathContext) Path { return PathForOutput(ctx, String(c.productVariables.BuildNumberFile)) } // DeviceName returns the name of the current device target. // TODO: take an AndroidModuleContext to select the device name for multi-device builds func (c *config) DeviceName() string { return *c.productVariables.DeviceName } func (c *config) DeviceResourceOverlays() []string { return c.productVariables.DeviceResourceOverlays } func (c *config) ProductResourceOverlays() []string { return c.productVariables.ProductResourceOverlays } func (c *config) PlatformVersionName() string { return String(c.productVariables.Platform_version_name) } func (c *config) PlatformSdkVersion() ApiLevel { return uncheckedFinalApiLevel(*c.productVariables.Platform_sdk_version) } func (c *config) PlatformSdkCodename() string { return String(c.productVariables.Platform_sdk_codename) } func (c *config) PlatformSecurityPatch() string { return String(c.productVariables.Platform_security_patch) } func (c *config) PlatformPreviewSdkVersion() string { return String(c.productVariables.Platform_preview_sdk_version) } func (c *config) PlatformMinSupportedTargetSdkVersion() string { return String(c.productVariables.Platform_min_supported_target_sdk_version) } func (c *config) PlatformBaseOS() string { return String(c.productVariables.Platform_base_os) } func (c *config) MinSupportedSdkVersion() ApiLevel { return uncheckedFinalApiLevel(16) } func (c *config) FinalApiLevels() []ApiLevel { var levels []ApiLevel for i := 1; i <= c.PlatformSdkVersion().FinalOrFutureInt(); i++ { levels = append(levels, uncheckedFinalApiLevel(i)) } return levels } func (c *config) PreviewApiLevels() []ApiLevel { var levels []ApiLevel for i, codename := range c.PlatformVersionActiveCodenames() { levels = append(levels, ApiLevel{ value: codename, number: i, isPreview: true, }) } return levels } func (c *config) AllSupportedApiLevels() []ApiLevel { var levels []ApiLevel levels = append(levels, c.FinalApiLevels()...) return append(levels, c.PreviewApiLevels()...) } // DefaultAppTargetSdk returns the API level that platform apps are targeting. // This converts a codename to the exact ApiLevel it represents. func (c *config) DefaultAppTargetSdk(ctx EarlyModuleContext) ApiLevel { if Bool(c.productVariables.Platform_sdk_final) { return c.PlatformSdkVersion() } codename := c.PlatformSdkCodename() if codename == "" { return NoneApiLevel } if codename == "REL" { panic("Platform_sdk_codename should not be REL when Platform_sdk_final is true") } return ApiLevelOrPanic(ctx, codename) } func (c *config) AppsDefaultVersionName() string { return String(c.productVariables.AppsDefaultVersionName) } // Codenames that are active in the current lunch target. func (c *config) PlatformVersionActiveCodenames() []string { return c.productVariables.Platform_version_active_codenames } func (c *config) ProductAAPTConfig() []string { return c.productVariables.AAPTConfig } func (c *config) ProductAAPTPreferredConfig() string { return String(c.productVariables.AAPTPreferredConfig) } func (c *config) ProductAAPTCharacteristics() string { return String(c.productVariables.AAPTCharacteristics) } func (c *config) ProductAAPTPrebuiltDPI() []string { return c.productVariables.AAPTPrebuiltDPI } func (c *config) DefaultAppCertificateDir(ctx PathContext) SourcePath { defaultCert := String(c.productVariables.DefaultAppCertificate) if defaultCert != "" { return PathForSource(ctx, filepath.Dir(defaultCert)) } return PathForSource(ctx, "build/make/target/product/security") } func (c *config) DefaultAppCertificate(ctx PathContext) (pem, key SourcePath) { defaultCert := String(c.productVariables.DefaultAppCertificate) if defaultCert != "" { return PathForSource(ctx, defaultCert+".x509.pem"), PathForSource(ctx, defaultCert+".pk8") } defaultDir := c.DefaultAppCertificateDir(ctx) return defaultDir.Join(ctx, "testkey.x509.pem"), defaultDir.Join(ctx, "testkey.pk8") } func (c *config) ApexKeyDir(ctx ModuleContext) SourcePath { // TODO(b/121224311): define another variable such as TARGET_APEX_KEY_OVERRIDE defaultCert := String(c.productVariables.DefaultAppCertificate) if defaultCert == "" || filepath.Dir(defaultCert) == "build/make/target/product/security" { // When defaultCert is unset or is set to the testkeys path, use the APEX keys // that is under the module dir return pathForModuleSrc(ctx) } // If not, APEX keys are under the specified directory return PathForSource(ctx, filepath.Dir(defaultCert)) } // AllowMissingDependencies configures Blueprint/Soong to not fail when modules // are configured to depend on non-existent modules. Note that this does not // affect missing input dependencies at the Ninja level. func (c *config) AllowMissingDependencies() bool { return Bool(c.productVariables.Allow_missing_dependencies) } // Returns true if a full platform source tree cannot be assumed. func (c *config) UnbundledBuild() bool { return Bool(c.productVariables.Unbundled_build) } // Returns true if building apps that aren't bundled with the platform. // UnbundledBuild() is always true when this is true. func (c *config) UnbundledBuildApps() bool { return Bool(c.productVariables.Unbundled_build_apps) } // Returns true if building modules against prebuilt SDKs. func (c *config) AlwaysUsePrebuiltSdks() bool { return Bool(c.productVariables.Always_use_prebuilt_sdks) } // Returns true if the boot jars check should be skipped. func (c *config) SkipBootJarsCheck() bool { return Bool(c.productVariables.Skip_boot_jars_check) } func (c *config) Fuchsia() bool { return Bool(c.productVariables.Fuchsia) } func (c *config) MinimizeJavaDebugInfo() bool { return Bool(c.productVariables.MinimizeJavaDebugInfo) && !Bool(c.productVariables.Eng) } func (c *config) Debuggable() bool { return Bool(c.productVariables.Debuggable) } func (c *config) Eng() bool { return Bool(c.productVariables.Eng) } func (c *config) DevicePrimaryArchType() ArchType { return c.Targets[Android][0].Arch.ArchType } func (c *config) SanitizeHost() []string { return append([]string(nil), c.productVariables.SanitizeHost...) } func (c *config) SanitizeDevice() []string { return append([]string(nil), c.productVariables.SanitizeDevice...) } func (c *config) SanitizeDeviceDiag() []string { return append([]string(nil), c.productVariables.SanitizeDeviceDiag...) } func (c *config) SanitizeDeviceArch() []string { return append([]string(nil), c.productVariables.SanitizeDeviceArch...) } func (c *config) EnableCFI() bool { if c.productVariables.EnableCFI == nil { return true } return *c.productVariables.EnableCFI } func (c *config) DisableScudo() bool { return Bool(c.productVariables.DisableScudo) } func (c *config) Android64() bool { for _, t := range c.Targets[Android] { if t.Arch.ArchType.Multilib == "lib64" { return true } } return false } func (c *config) UseGoma() bool { return Bool(c.productVariables.UseGoma) } func (c *config) UseRBE() bool { return Bool(c.productVariables.UseRBE) } func (c *config) UseRBEJAVAC() bool { return Bool(c.productVariables.UseRBEJAVAC) } func (c *config) UseRBER8() bool { return Bool(c.productVariables.UseRBER8) } func (c *config) UseRBED8() bool { return Bool(c.productVariables.UseRBED8) } func (c *config) UseRemoteBuild() bool { return c.UseGoma() || c.UseRBE() } func (c *config) RunErrorProne() bool { return c.IsEnvTrue("RUN_ERROR_PRONE") } // XrefCorpusName returns the Kythe cross-reference corpus name. func (c *config) XrefCorpusName() string { return c.Getenv("XREF_CORPUS") } // XrefCuEncoding returns the compilation unit encoding to use for Kythe code // xrefs. Can be 'json' (default), 'proto' or 'all'. func (c *config) XrefCuEncoding() string { if enc := c.Getenv("KYTHE_KZIP_ENCODING"); enc != "" { return enc } return "json" } // XrefCuJavaSourceMax returns the maximum number of the Java source files // in a single compilation unit const xrefJavaSourceFileMaxDefault = "1000" func (c Config) XrefCuJavaSourceMax() string { v := c.Getenv("KYTHE_JAVA_SOURCE_BATCH_SIZE") if v == "" { return xrefJavaSourceFileMaxDefault } if _, err := strconv.ParseUint(v, 0, 0); err != nil { fmt.Fprintf(os.Stderr, "bad KYTHE_JAVA_SOURCE_BATCH_SIZE value: %s, will use %s", err, xrefJavaSourceFileMaxDefault) return xrefJavaSourceFileMaxDefault } return v } func (c *config) EmitXrefRules() bool { return c.XrefCorpusName() != "" } func (c *config) ClangTidy() bool { return Bool(c.productVariables.ClangTidy) } func (c *config) TidyChecks() string { if c.productVariables.TidyChecks == nil { return "" } return *c.productVariables.TidyChecks } func (c *config) LibartImgHostBaseAddress() string { return "0x60000000" } func (c *config) LibartImgDeviceBaseAddress() string { return "0x70000000" } func (c *config) ArtUseReadBarrier() bool { return Bool(c.productVariables.ArtUseReadBarrier) } // Enforce Runtime Resource Overlays for a module. RROs supersede static RROs, // but some modules still depend on it. // // More info: https://source.android.com/devices/architecture/rros func (c *config) EnforceRROForModule(name string) bool { enforceList := c.productVariables.EnforceRROTargets if len(enforceList) > 0 { if InList("*", enforceList) { return true } return InList(name, enforceList) } return false } func (c *config) EnforceRROExcludedOverlay(path string) bool { excluded := c.productVariables.EnforceRROExcludedOverlays if len(excluded) > 0 { return HasAnyPrefix(path, excluded) } return false } func (c *config) ExportedNamespaces() []string { return append([]string(nil), c.productVariables.NamespacesToExport...) } func (c *config) HostStaticBinaries() bool { return Bool(c.productVariables.HostStaticBinaries) } func (c *config) UncompressPrivAppDex() bool { return Bool(c.productVariables.UncompressPrivAppDex) } func (c *config) ModulesLoadedByPrivilegedModules() []string { return c.productVariables.ModulesLoadedByPrivilegedModules } // DexpreoptGlobalConfigPath returns the path to the dexpreopt.config file in // the output directory, if it was created during the product configuration // phase by Kati. func (c *config) DexpreoptGlobalConfigPath(ctx PathContext) OptionalPath { if c.productVariables.DexpreoptGlobalConfig == nil { return OptionalPathForPath(nil) } return OptionalPathForPath( pathForBuildToolDep(ctx, *c.productVariables.DexpreoptGlobalConfig)) } // DexpreoptGlobalConfig returns the raw byte contents of the dexpreopt global // configuration. Since the configuration file was created by Kati during // product configuration (externally of soong_build), it's not tracked, so we // also manually add a Ninja file dependency on the configuration file to the // rule that creates the main build.ninja file. This ensures that build.ninja is // regenerated correctly if dexpreopt.config changes. func (c *config) DexpreoptGlobalConfig(ctx PathContext) ([]byte, error) { path := c.DexpreoptGlobalConfigPath(ctx) if !path.Valid() { return nil, nil } ctx.AddNinjaFileDeps(path.String()) return ioutil.ReadFile(absolutePath(path.String())) } func (c *deviceConfig) WithDexpreopt() bool { return c.config.productVariables.WithDexpreopt } func (c *config) FrameworksBaseDirExists(ctx PathContext) bool { return ExistentPathForSource(ctx, "frameworks", "base", "Android.bp").Valid() } func (c *config) VndkSnapshotBuildArtifacts() bool { return Bool(c.productVariables.VndkSnapshotBuildArtifacts) } func (c *config) HasMultilibConflict(arch ArchType) bool { return c.multilibConflicts[arch] } func (c *config) PrebuiltHiddenApiDir(ctx PathContext) string { return String(c.productVariables.PrebuiltHiddenApiDir) } func (c *deviceConfig) Arches() []Arch { var arches []Arch for _, target := range c.config.Targets[Android] { arches = append(arches, target.Arch) } return arches } func (c *deviceConfig) BinderBitness() string { is32BitBinder := c.config.productVariables.Binder32bit if is32BitBinder != nil && *is32BitBinder { return "32" } return "64" } func (c *deviceConfig) VendorPath() string { if c.config.productVariables.VendorPath != nil { return *c.config.productVariables.VendorPath } return "vendor" } func (c *deviceConfig) VndkVersion() string { return String(c.config.productVariables.DeviceVndkVersion) } func (c *deviceConfig) RecoverySnapshotVersion() string { return String(c.config.productVariables.RecoverySnapshotVersion) } func (c *deviceConfig) CurrentApiLevelForVendorModules() string { return StringDefault(c.config.productVariables.DeviceCurrentApiLevelForVendorModules, "current") } func (c *deviceConfig) PlatformVndkVersion() string { return String(c.config.productVariables.Platform_vndk_version) } func (c *deviceConfig) ProductVndkVersion() string { return String(c.config.productVariables.ProductVndkVersion) } func (c *deviceConfig) ExtraVndkVersions() []string { return c.config.productVariables.ExtraVndkVersions } func (c *deviceConfig) VndkUseCoreVariant() bool { return Bool(c.config.productVariables.VndkUseCoreVariant) } func (c *deviceConfig) SystemSdkVersions() []string { return c.config.productVariables.DeviceSystemSdkVersions } func (c *deviceConfig) PlatformSystemSdkVersions() []string { return c.config.productVariables.Platform_systemsdk_versions } func (c *deviceConfig) OdmPath() string { if c.config.productVariables.OdmPath != nil { return *c.config.productVariables.OdmPath } return "odm" } func (c *deviceConfig) ProductPath() string { if c.config.productVariables.ProductPath != nil { return *c.config.productVariables.ProductPath } return "product" } func (c *deviceConfig) SystemExtPath() string { if c.config.productVariables.SystemExtPath != nil { return *c.config.productVariables.SystemExtPath } return "system_ext" } func (c *deviceConfig) BtConfigIncludeDir() string { return String(c.config.productVariables.BtConfigIncludeDir) } func (c *deviceConfig) DeviceKernelHeaderDirs() []string { return c.config.productVariables.DeviceKernelHeaders } func (c *deviceConfig) SamplingPGO() bool { return Bool(c.config.productVariables.SamplingPGO) } // JavaCoverageEnabledForPath returns whether Java code coverage is enabled for // path. Coverage is enabled by default when the product variable // JavaCoveragePaths is empty. If JavaCoveragePaths is not empty, coverage is // enabled for any path which is part of this variable (and not part of the // JavaCoverageExcludePaths product variable). Value "*" in JavaCoveragePaths // represents any path. func (c *deviceConfig) JavaCoverageEnabledForPath(path string) bool { coverage := false if len(c.config.productVariables.JavaCoveragePaths) == 0 || InList("*", c.config.productVariables.JavaCoveragePaths) || HasAnyPrefix(path, c.config.productVariables.JavaCoveragePaths) { coverage = true } if coverage && len(c.config.productVariables.JavaCoverageExcludePaths) > 0 { if HasAnyPrefix(path, c.config.productVariables.JavaCoverageExcludePaths) { coverage = false } } return coverage } // Returns true if gcov or clang coverage is enabled. func (c *deviceConfig) NativeCoverageEnabled() bool { return Bool(c.config.productVariables.GcovCoverage) || Bool(c.config.productVariables.ClangCoverage) } func (c *deviceConfig) ClangCoverageEnabled() bool { return Bool(c.config.productVariables.ClangCoverage) } func (c *deviceConfig) GcovCoverageEnabled() bool { return Bool(c.config.productVariables.GcovCoverage) } // NativeCoverageEnabledForPath returns whether (GCOV- or Clang-based) native // code coverage is enabled for path. By default, coverage is not enabled for a // given path unless it is part of the NativeCoveragePaths product variable (and // not part of the NativeCoverageExcludePaths product variable). Value "*" in // NativeCoveragePaths represents any path. func (c *deviceConfig) NativeCoverageEnabledForPath(path string) bool { coverage := false if len(c.config.productVariables.NativeCoveragePaths) > 0 { if InList("*", c.config.productVariables.NativeCoveragePaths) || HasAnyPrefix(path, c.config.productVariables.NativeCoveragePaths) { coverage = true } } if coverage && len(c.config.productVariables.NativeCoverageExcludePaths) > 0 { if HasAnyPrefix(path, c.config.productVariables.NativeCoverageExcludePaths) { coverage = false } } return coverage } func (c *deviceConfig) PgoAdditionalProfileDirs() []string { return c.config.productVariables.PgoAdditionalProfileDirs } func (c *deviceConfig) VendorSepolicyDirs() []string { return c.config.productVariables.BoardVendorSepolicyDirs } func (c *deviceConfig) OdmSepolicyDirs() []string { return c.config.productVariables.BoardOdmSepolicyDirs } func (c *deviceConfig) SystemExtPublicSepolicyDirs() []string { return c.config.productVariables.SystemExtPublicSepolicyDirs } func (c *deviceConfig) SystemExtPrivateSepolicyDirs() []string { return c.config.productVariables.SystemExtPrivateSepolicyDirs } func (c *deviceConfig) SepolicyM4Defs() []string { return c.config.productVariables.BoardSepolicyM4Defs } func (c *deviceConfig) OverrideManifestPackageNameFor(name string) (manifestName string, overridden bool) { return findOverrideValue(c.config.productVariables.ManifestPackageNameOverrides, name, "invalid override rule %q in PRODUCT_MANIFEST_PACKAGE_NAME_OVERRIDES should be <module_name>:<manifest_name>") } func (c *deviceConfig) OverrideCertificateFor(name string) (certificatePath string, overridden bool) { return findOverrideValue(c.config.productVariables.CertificateOverrides, name, "invalid override rule %q in PRODUCT_CERTIFICATE_OVERRIDES should be <module_name>:<certificate_module_name>") } func (c *deviceConfig) OverridePackageNameFor(name string) string { newName, overridden := findOverrideValue( c.config.productVariables.PackageNameOverrides, name, "invalid override rule %q in PRODUCT_PACKAGE_NAME_OVERRIDES should be <module_name>:<package_name>") if overridden { return newName } return name } func findOverrideValue(overrides []string, name string, errorMsg string) (newValue string, overridden bool) { if overrides == nil || len(overrides) == 0 { return "", false } for _, o := range overrides { split := strings.Split(o, ":") if len(split) != 2 { // This shouldn't happen as this is first checked in make, but just in case. panic(fmt.Errorf(errorMsg, o)) } if matchPattern(split[0], name) { return substPattern(split[0], split[1], name), true } } return "", false } func (c *config) IntegerOverflowDisabledForPath(path string) bool { if len(c.productVariables.IntegerOverflowExcludePaths) == 0 { return false } return HasAnyPrefix(path, c.productVariables.IntegerOverflowExcludePaths) } func (c *config) CFIDisabledForPath(path string) bool { if len(c.productVariables.CFIExcludePaths) == 0 { return false } return HasAnyPrefix(path, c.productVariables.CFIExcludePaths) } func (c *config) CFIEnabledForPath(path string) bool { if len(c.productVariables.CFIIncludePaths) == 0 { return false } return HasAnyPrefix(path, c.productVariables.CFIIncludePaths) && !c.CFIDisabledForPath(path) } func (c *config) MemtagHeapDisabledForPath(path string) bool { if len(c.productVariables.MemtagHeapExcludePaths) == 0 { return false } return HasAnyPrefix(path, c.productVariables.MemtagHeapExcludePaths) } func (c *config) MemtagHeapAsyncEnabledForPath(path string) bool { if len(c.productVariables.MemtagHeapAsyncIncludePaths) == 0 { return false } return HasAnyPrefix(path, c.productVariables.MemtagHeapAsyncIncludePaths) && !c.MemtagHeapDisabledForPath(path) } func (c *config) MemtagHeapSyncEnabledForPath(path string) bool { if len(c.productVariables.MemtagHeapSyncIncludePaths) == 0 { return false } return HasAnyPrefix(path, c.productVariables.MemtagHeapSyncIncludePaths) && !c.MemtagHeapDisabledForPath(path) } func (c *config) VendorConfig(name string) VendorConfig { return soongconfig.Config(c.productVariables.VendorVars[name]) } func (c *config) NdkAbis() bool { return Bool(c.productVariables.Ndk_abis) } func (c *config) AmlAbis() bool { return Bool(c.productVariables.Aml_abis) } func (c *config) FlattenApex() bool { return Bool(c.productVariables.Flatten_apex) } func (c *config) ForceApexSymlinkOptimization() bool { return Bool(c.productVariables.ForceApexSymlinkOptimization) } func (c *config) CompressedApex() bool { return Bool(c.productVariables.CompressedApex) } func (c *config) EnforceSystemCertificate() bool { return Bool(c.productVariables.EnforceSystemCertificate) } func (c *config) EnforceSystemCertificateAllowList() []string { return c.productVariables.EnforceSystemCertificateAllowList } func (c *config) EnforceProductPartitionInterface() bool { return Bool(c.productVariables.EnforceProductPartitionInterface) } func (c *config) EnforceInterPartitionJavaSdkLibrary() bool { return Bool(c.productVariables.EnforceInterPartitionJavaSdkLibrary) } func (c *config) InterPartitionJavaLibraryAllowList() []string { return c.productVariables.InterPartitionJavaLibraryAllowList } func (c *config) InstallExtraFlattenedApexes() bool { return Bool(c.productVariables.InstallExtraFlattenedApexes) } func (c *config) ProductHiddenAPIStubs() []string { return c.productVariables.ProductHiddenAPIStubs } func (c *config) ProductHiddenAPIStubsSystem() []string { return c.productVariables.ProductHiddenAPIStubsSystem } func (c *config) ProductHiddenAPIStubsTest() []string { return c.productVariables.ProductHiddenAPIStubsTest } func (c *deviceConfig) TargetFSConfigGen() []string { return c.config.productVariables.TargetFSConfigGen } func (c *config) ProductPublicSepolicyDirs() []string { return c.productVariables.ProductPublicSepolicyDirs } func (c *config) ProductPrivateSepolicyDirs() []string { return c.productVariables.ProductPrivateSepolicyDirs } func (c *config) MissingUsesLibraries() []string { return c.productVariables.MissingUsesLibraries } func (c *deviceConfig) DeviceArch() string { return String(c.config.productVariables.DeviceArch) } func (c *deviceConfig) DeviceArchVariant() string { return String(c.config.productVariables.DeviceArchVariant) } func (c *deviceConfig) DeviceSecondaryArch() string { return String(c.config.productVariables.DeviceSecondaryArch) } func (c *deviceConfig) DeviceSecondaryArchVariant() string { return String(c.config.productVariables.DeviceSecondaryArchVariant) } func (c *deviceConfig) BoardUsesRecoveryAsBoot() bool { return Bool(c.config.productVariables.BoardUsesRecoveryAsBoot) } func (c *deviceConfig) BoardKernelBinaries() []string { return c.config.productVariables.BoardKernelBinaries } func (c *deviceConfig) BoardKernelModuleInterfaceVersions() []string { return c.config.productVariables.BoardKernelModuleInterfaceVersions } func (c *deviceConfig) BoardMoveRecoveryResourcesToVendorBoot() bool { return Bool(c.config.productVariables.BoardMoveRecoveryResourcesToVendorBoot) } func (c *deviceConfig) PlatformSepolicyVersion() string { return String(c.config.productVariables.PlatformSepolicyVersion) } func (c *deviceConfig) BoardSepolicyVers() string { if ver := String(c.config.productVariables.BoardSepolicyVers); ver != "" { return ver } return c.PlatformSepolicyVersion() } func (c *deviceConfig) BoardReqdMaskPolicy() []string { return c.config.productVariables.BoardReqdMaskPolicy } func (c *deviceConfig) DirectedVendorSnapshot() bool { return c.config.productVariables.DirectedVendorSnapshot } func (c *deviceConfig) VendorSnapshotModules() map[string]bool { return c.config.productVariables.VendorSnapshotModules } func (c *deviceConfig) DirectedRecoverySnapshot() bool { return c.config.productVariables.DirectedRecoverySnapshot } func (c *deviceConfig) RecoverySnapshotModules() map[string]bool { return c.config.productVariables.RecoverySnapshotModules } func createDirsMap(previous map[string]bool, dirs []string) (map[string]bool, error) { var ret = make(map[string]bool) for _, dir := range dirs { clean := filepath.Clean(dir) if previous[clean] || ret[clean] { return nil, fmt.Errorf("Duplicate entry %s", dir) } ret[clean] = true } return ret, nil } func (c *deviceConfig) createDirsMapOnce(onceKey OnceKey, previous map[string]bool, dirs []string) map[string]bool { dirMap := c.Once(onceKey, func() interface{} { ret, err := createDirsMap(previous, dirs) if err != nil { panic(fmt.Errorf("%s: %w", onceKey.key, err)) } return ret }) if dirMap == nil { return nil } return dirMap.(map[string]bool) } var vendorSnapshotDirsExcludedKey = NewOnceKey("VendorSnapshotDirsExcludedMap") func (c *deviceConfig) VendorSnapshotDirsExcludedMap() map[string]bool { return c.createDirsMapOnce(vendorSnapshotDirsExcludedKey, nil, c.config.productVariables.VendorSnapshotDirsExcluded) } var vendorSnapshotDirsIncludedKey = NewOnceKey("VendorSnapshotDirsIncludedMap") func (c *deviceConfig) VendorSnapshotDirsIncludedMap() map[string]bool { excludedMap := c.VendorSnapshotDirsExcludedMap() return c.createDirsMapOnce(vendorSnapshotDirsIncludedKey, excludedMap, c.config.productVariables.VendorSnapshotDirsIncluded) } var recoverySnapshotDirsExcludedKey = NewOnceKey("RecoverySnapshotDirsExcludedMap") func (c *deviceConfig) RecoverySnapshotDirsExcludedMap() map[string]bool { return c.createDirsMapOnce(recoverySnapshotDirsExcludedKey, nil, c.config.productVariables.RecoverySnapshotDirsExcluded) } var recoverySnapshotDirsIncludedKey = NewOnceKey("RecoverySnapshotDirsIncludedMap") func (c *deviceConfig) RecoverySnapshotDirsIncludedMap() map[string]bool { excludedMap := c.RecoverySnapshotDirsExcludedMap() return c.createDirsMapOnce(recoverySnapshotDirsIncludedKey, excludedMap, c.config.productVariables.RecoverySnapshotDirsIncluded) } func (c *deviceConfig) ShippingApiLevel() ApiLevel { if c.config.productVariables.ShippingApiLevel == nil { return NoneApiLevel } apiLevel, _ := strconv.Atoi(*c.config.productVariables.ShippingApiLevel) return uncheckedFinalApiLevel(apiLevel) } func (c *deviceConfig) BuildBrokenEnforceSyspropOwner() bool { return c.config.productVariables.BuildBrokenEnforceSyspropOwner } func (c *deviceConfig) BuildBrokenTrebleSyspropNeverallow() bool { return c.config.productVariables.BuildBrokenTrebleSyspropNeverallow } func (c *deviceConfig) BuildDebugfsRestrictionsEnabled() bool { return c.config.productVariables.BuildDebugfsRestrictionsEnabled } func (c *deviceConfig) BuildBrokenVendorPropertyNamespace() bool { return c.config.productVariables.BuildBrokenVendorPropertyNamespace } func (c *deviceConfig) RequiresInsecureExecmemForSwiftshader() bool { return c.config.productVariables.RequiresInsecureExecmemForSwiftshader } func (c *config) SelinuxIgnoreNeverallows() bool { return c.productVariables.SelinuxIgnoreNeverallows } func (c *deviceConfig) SepolicySplit() bool { return c.config.productVariables.SepolicySplit } // The ConfiguredJarList struct provides methods for handling a list of (apex, jar) pairs. // Such lists are used in the build system for things like bootclasspath jars or system server jars. // The apex part is either an apex name, or a special names "platform" or "system_ext". Jar is a // module name. The pairs come from Make product variables as a list of colon-separated strings. // // Examples: // - "com.android.art:core-oj" // - "platform:framework" // - "system_ext:foo" // type ConfiguredJarList struct { // A list of apex components, which can be an apex name, // or special names like "platform" or "system_ext". apexes []string // A list of jar module name components. jars []string } // Len returns the length of the list of jars. func (l *ConfiguredJarList) Len() int { return len(l.jars) } // Jar returns the idx-th jar component of (apex, jar) pairs. func (l *ConfiguredJarList) Jar(idx int) string { return l.jars[idx] } // Apex returns the idx-th apex component of (apex, jar) pairs. func (l *ConfiguredJarList) Apex(idx int) string { return l.apexes[idx] } // ContainsJar returns true if the (apex, jar) pairs contains a pair with the // given jar module name. func (l *ConfiguredJarList) ContainsJar(jar string) bool { return InList(jar, l.jars) } // If the list contains the given (apex, jar) pair. func (l *ConfiguredJarList) containsApexJarPair(apex, jar string) bool { for i := 0; i < l.Len(); i++ { if apex == l.apexes[i] && jar == l.jars[i] { return true } } return false } // ApexOfJar returns the apex component of the first pair with the given jar name on the list, or // an empty string if not found. func (l *ConfiguredJarList) ApexOfJar(jar string) string { if idx := IndexList(jar, l.jars); idx != -1 { return l.Apex(IndexList(jar, l.jars)) } return "" } // IndexOfJar returns the first pair with the given jar name on the list, or -1 // if not found. func (l *ConfiguredJarList) IndexOfJar(jar string) int { return IndexList(jar, l.jars) } func copyAndAppend(list []string, item string) []string { // Create the result list to be 1 longer than the input. result := make([]string, len(list)+1) // Copy the whole input list into the result. count := copy(result, list) // Insert the extra item at the end. result[count] = item return result } // Append an (apex, jar) pair to the list. func (l *ConfiguredJarList) Append(apex string, jar string) ConfiguredJarList { // Create a copy of the backing arrays before appending to avoid sharing backing // arrays that are mutated across instances. apexes := copyAndAppend(l.apexes, apex) jars := copyAndAppend(l.jars, jar) return ConfiguredJarList{apexes, jars} } // RemoveList filters out a list of (apex, jar) pairs from the receiving list of pairs. func (l *ConfiguredJarList) RemoveList(list ConfiguredJarList) ConfiguredJarList { apexes := make([]string, 0, l.Len()) jars := make([]string, 0, l.Len()) for i, jar := range l.jars { apex := l.apexes[i] if !list.containsApexJarPair(apex, jar) { apexes = append(apexes, apex) jars = append(jars, jar) } } return ConfiguredJarList{apexes, jars} } // CopyOfJars returns a copy of the list of strings containing jar module name // components. func (l *ConfiguredJarList) CopyOfJars() []string { return CopyOf(l.jars) } // CopyOfApexJarPairs returns a copy of the list of strings with colon-separated // (apex, jar) pairs. func (l *ConfiguredJarList) CopyOfApexJarPairs() []string { pairs := make([]string, 0, l.Len()) for i, jar := range l.jars { apex := l.apexes[i] pairs = append(pairs, apex+":"+jar) } return pairs } // BuildPaths returns a list of build paths based on the given directory prefix. func (l *ConfiguredJarList) BuildPaths(ctx PathContext, dir OutputPath) WritablePaths { paths := make(WritablePaths, l.Len()) for i, jar := range l.jars { paths[i] = dir.Join(ctx, ModuleStem(jar)+".jar") } return paths } // UnmarshalJSON converts JSON configuration from raw bytes into a // ConfiguredJarList structure. func (l *ConfiguredJarList) UnmarshalJSON(b []byte) error { // Try and unmarshal into a []string each item of which contains a pair // <apex>:<jar>. var list []string err := json.Unmarshal(b, &list) if err != nil { // Did not work so return return err } apexes, jars, err := splitListOfPairsIntoPairOfLists(list) if err != nil { return err } l.apexes = apexes l.jars = jars return nil } func (l *ConfiguredJarList) MarshalJSON() ([]byte, error) { if len(l.apexes) != len(l.jars) { return nil, errors.New(fmt.Sprintf("Inconsistent ConfiguredJarList: apexes: %q, jars: %q", l.apexes, l.jars)) } list := make([]string, 0, len(l.apexes)) for i := 0; i < len(l.apexes); i++ { list = append(list, l.apexes[i]+":"+l.jars[i]) } return json.Marshal(list) } // ModuleStem hardcodes the stem of framework-minus-apex to return "framework". // // TODO(b/139391334): hard coded until we find a good way to query the stem of a // module before any other mutators are run. func ModuleStem(module string) string { if module == "framework-minus-apex" { return "framework" } return module } // DevicePaths computes the on-device paths for the list of (apex, jar) pairs, // based on the operating system. func (l *ConfiguredJarList) DevicePaths(cfg Config, ostype OsType) []string { paths := make([]string, l.Len()) for i, jar := range l.jars { apex := l.apexes[i] name := ModuleStem(jar) + ".jar" var subdir string if apex == "platform" { subdir = "system/framework" } else if apex == "system_ext" { subdir = "system_ext/framework" } else { subdir = filepath.Join("apex", apex, "javalib") } if ostype.Class == Host { paths[i] = filepath.Join(cfg.Getenv("OUT_DIR"), "host", cfg.PrebuiltOS(), subdir, name) } else { paths[i] = filepath.Join("/", subdir, name) } } return paths } func (l *ConfiguredJarList) String() string { var pairs []string for i := 0; i < l.Len(); i++ { pairs = append(pairs, l.apexes[i]+":"+l.jars[i]) } return strings.Join(pairs, ",") } func splitListOfPairsIntoPairOfLists(list []string) ([]string, []string, error) { // Now we need to populate this list by splitting each item in the slice of // pairs and appending them to the appropriate list of apexes or jars. apexes := make([]string, len(list)) jars := make([]string, len(list)) for i, apexjar := range list { apex, jar, err := splitConfiguredJarPair(apexjar) if err != nil { return nil, nil, err } apexes[i] = apex jars[i] = jar } return apexes, jars, nil } // Expected format for apexJarValue = <apex name>:<jar name> func splitConfiguredJarPair(str string) (string, string, error) { pair := strings.SplitN(str, ":", 2) if len(pair) == 2 { apex := pair[0] jar := pair[1] if apex == "" { return apex, jar, fmt.Errorf("invalid apex '%s' in <apex>:<jar> pair '%s', expected format: <apex>:<jar>", apex, str) } return apex, jar, nil } else { return "error-apex", "error-jar", fmt.Errorf("malformed (apex, jar) pair: '%s', expected format: <apex>:<jar>", str) } } // CreateTestConfiguredJarList is a function to create ConfiguredJarList for tests. func CreateTestConfiguredJarList(list []string) ConfiguredJarList { // Create the ConfiguredJarList in as similar way as it is created at runtime by marshalling to // a json list of strings and then unmarshalling into a ConfiguredJarList instance. b, err := json.Marshal(list) if err != nil { panic(err) } var jarList ConfiguredJarList err = json.Unmarshal(b, &jarList) if err != nil { panic(err) } return jarList } // EmptyConfiguredJarList returns an empty jar list. func EmptyConfiguredJarList() ConfiguredJarList { return ConfiguredJarList{} } var earlyBootJarsKey = NewOnceKey("earlyBootJars") func (c *config) BootJars() []string { return c.Once(earlyBootJarsKey, func() interface{} { list := c.productVariables.BootJars.CopyOfJars() return append(list, c.productVariables.UpdatableBootJars.CopyOfJars()...) }).([]string) } func (c *config) NonUpdatableBootJars() ConfiguredJarList { return c.productVariables.BootJars } func (c *config) UpdatableBootJars() ConfiguredJarList { return c.productVariables.UpdatableBootJars } func (c *config) RBEWrapper() string { return c.GetenvWithDefault("RBE_WRAPPER", remoteexec.DefaultWrapperPath) }
[ "\"PATH\"" ]
[]
[ "PATH" ]
[]
["PATH"]
go
1
0
backend/dan3103_1_1/wsgi.py
""" WSGI config for dan3103_1_1 project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dan3103_1_1.settings') application = get_wsgi_application()
[]
[]
[]
[]
[]
python
0
0
http/http_site/manage.py
#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "http_site.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
[]
[]
[]
[]
[]
python
0
0
eventlistener/lb_test.go
package eventlistener_test import ( "github.com/go-chassis/go-archaius" "github.com/go-chassis/go-archaius/core" "github.com/go-chassis/go-chassis/core/config" "github.com/go-chassis/go-chassis/core/lager" "github.com/go-chassis/go-chassis/core/loadbalancer" "github.com/go-chassis/go-chassis/eventlistener" "github.com/stretchr/testify/assert" "os" "testing" ) func TestLbEventError(t *testing.T) { gopath := os.Getenv("GOPATH") os.Setenv("CHASSIS_HOME", gopath+"/src/github.com/go-chassis/go-chassis/examples/discovery/server/") config.Init() lager.Initialize("", "INFO", "", "size", true, 1, 10, 7) eventlistener.Init() archaius.AddKeyValue("cse.loadbalance.strategy.name", "SessionStickiness") lbEventListener := &eventlistener.LoadbalanceEventListener{} e := &core.Event{EventType: "UPDATE", Key: "cse.loadbalance.strategy.name", Value: "SessionStickiness"} lbEventListener.Event(e) assert.Equal(t, loadbalancer.StrategySessionStickiness, archaius.GetString("cse.loadbalance.strategy.name", "")) assert.Equal(t, loadbalancer.StrategySessionStickiness, config.GetStrategyName("", "")) e2 := &core.Event{EventType: "DELETE", Key: "cse.loadbalance.strategy.name", Value: "RoundRobin"} lbEventListener.Event(e2) archaius.DeleteKeyValue("cse.loadbalance.strategy.name", "SessionStickiness") assert.NotEqual(t, loadbalancer.StrategySessionStickiness, archaius.GetString("cse.loadbalancer.strategy.name", "")) } func TestLbEvent(t *testing.T) { gopath := os.Getenv("GOPATH") os.Setenv("CHASSIS_HOME", gopath+"/src/github.com/go-chassis/go-chassis/examples/discovery/server/") config.Init() loadbalancer.Enable() lager.Initialize("", "INFO", "", "size", true, 1, 10, 7) eventlistener.Init() archaius.AddKeyValue("cse.loadbalance.strategy.name", "SessionStickiness") lbEventListener := &eventlistener.LoadbalanceEventListener{} e := &core.Event{EventType: "UPDATE", Key: "cse.loadbalance.strategy.name", Value: "SessionStickiness"} lbEventListener.Event(e) assert.Equal(t, loadbalancer.StrategySessionStickiness, archaius.GetString("cse.loadbalance.strategy.name", "")) assert.Equal(t, loadbalancer.StrategySessionStickiness, config.GetStrategyName("", "")) e2 := &core.Event{EventType: "DELETE", Key: "cse.loadbalance.strategy.name", Value: "RoundRobin"} lbEventListener.Event(e2) archaius.DeleteKeyValue("cse.loadbalance.strategy.name", "SessionStickiness") assert.NotEqual(t, loadbalancer.StrategySessionStickiness, archaius.GetString("cse.loadbalance.strategy.name", "")) }
[ "\"GOPATH\"", "\"GOPATH\"" ]
[]
[ "GOPATH" ]
[]
["GOPATH"]
go
1
0
lib/cache.go
package lib import ( "encoding/json" "io" "io/ioutil" "os" "os/exec" "path/filepath" "strings" "github.com/fatih/color" "github.com/syndtr/goleveldb/leveldb" de "github.com/syndtr/goleveldb/leveldb/errors" ) func OpenLocalDB() (*leveldb.DB, error) { dbDir := getDictDBDir() db, err := leveldb.OpenFile(dbDir, nil) if nil != err { return nil, err } return db, nil } func QueryLocalDB(key string, db *leveldb.DB) (*DictResult, error) { data, err := db.Get([]byte(key), nil) if de.ErrNotFound == err { // first query word always return NotFound return nil, nil } if nil != err { return nil, err } ret := DictResult{} if err := json.Unmarshal(data, &ret); nil != err { return nil, err } return &ret, nil } func ScanWords() (map[string]DictResult, error) { db, err := OpenLocalDB() if err != nil { color.Red("OpenLocalDb Fail! Cause: %s", err) return nil, err } defer db.Close() dict := map[string]DictResult{} iter := db.NewIterator(nil, nil) for iter.Next() { ret := DictResult{} if err := json.Unmarshal(iter.Value(), &ret); nil != err { return nil, err } dict[string(iter.Key())] = ret } defer iter.Release() return dict, nil } func DeleteWords(args []string) error { db, err := OpenLocalDB() if err != nil { color.Red("OpenLocalDb Fail! Cause: %s", err) return err } defer db.Close() if err := db.Delete([]byte(strings.Join(args, " ")), nil); err != nil { return err } return nil } func BackupCahceFiles() { dictDir := getDictDir() if _, err := os.Stat(dictDir); os.IsNotExist(err) { color.Red("Cannot find the DB path", err.Error()) return } fileName := GetBakFileName() dstFile := filepath.Join(dictDir, fileName) result := Execute(dictDir, "tar", "-czvf", dstFile, "./db") if result { color.Green("Local DB backup to: %s", dstFile) } } func ClearCahceFiles() { tmpDir := getDictDir() err := os.RemoveAll(tmpDir) if nil != err && !os.IsNotExist(err) { color.Red("ClearCacheFile Fail! Cause: %s", err.Error()) } color.Green("Clear Success! CacheDir: %s", tmpDir) } func getDictDir() string { dbDir := os.Getenv("YDICT_DB") if dbDir == "" { dbDir = filepath.Join(os.Getenv("HOME"), ".ydict") } return dbDir } func getDictDBDir() string { dbDir := os.Getenv("YDICT_DB") if dbDir == "" { dbDir = filepath.Join(os.Getenv("HOME"), ".ydict") } ydictDir := filepath.Join(dbDir, "db") return ydictDir } func getDictAudioDir() string { tmpDir := getDictDir() ydictDir := filepath.Join(tmpDir, "audio") return ydictDir } func SaveVoiceFile(name string, body io.ReadCloser) (string, error) { ydictDir := getDictAudioDir() tmpfile, err := ioutil.TempFile(ydictDir, name) if err != nil { if !os.IsNotExist(err) { return "", err } err = os.MkdirAll(ydictDir, 0700) if nil != err { return "", err } tmpfile, err = ioutil.TempFile(ydictDir, name) if nil != err { return "", err } } data, err := ioutil.ReadAll(body) if err != nil { return "", err } if _, err := tmpfile.Write(data); err != nil { return "", err } if err := tmpfile.Close(); err != nil { return "", err } aFile := tmpfile.Name() return aFile, err } func DoPlayFile(aFile string) error { cmd := exec.Command("mpg123", aFile) if _, err := exec.LookPath("mpv"); err == nil { // andoird termux only have mpv cmd = exec.Command("mpv", aFile) } if err := cmd.Start(); err != nil { return err } if err := cmd.Wait(); err != nil { return err } return nil }
[ "\"YDICT_DB\"", "\"HOME\"", "\"YDICT_DB\"", "\"HOME\"" ]
[]
[ "HOME", "YDICT_DB" ]
[]
["HOME", "YDICT_DB"]
go
2
0
web-api/anyberry/asgi.py
""" ASGI config for anyberry project. It exposes the ASGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/ """ import os from django.core.asgi import get_asgi_application os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'anyberry.settings') application = get_asgi_application()
[]
[]
[]
[]
[]
python
0
0
testhelpers/registry.go
package testhelpers import ( "bytes" "context" "encoding/base64" "fmt" "io" "io/ioutil" "os" "path/filepath" "testing" "time" "github.com/buildpack/pack/internal/archive" dockertypes "github.com/docker/docker/api/types" dockercontainer "github.com/docker/docker/api/types/container" "github.com/docker/go-connections/nat" ) var registryContainerName = "registry:2" type TestRegistryConfig struct { runRegistryName string RunRegistryPort string DockerConfigDir string username string password string } func RunRegistry(t *testing.T) *TestRegistryConfig { t.Log("run registry") t.Helper() runRegistryName := "test-registry-" + RandString(10) username := RandString(10) password := RandString(10) runRegistryPort := startRegistry(t, runRegistryName, username, password) dockerConfigDir := setupDockerConfigWithAuth(t, username, password, runRegistryPort) registryConfig := &TestRegistryConfig{ runRegistryName: runRegistryName, RunRegistryPort: runRegistryPort, DockerConfigDir: dockerConfigDir, username: username, password: password, } return registryConfig } func (rc *TestRegistryConfig) AuthConfig() dockertypes.AuthConfig { return dockertypes.AuthConfig{ Username: rc.username, Password: rc.password, ServerAddress: fmt.Sprintf("localhost:%s", rc.RunRegistryPort)} } func (rc *TestRegistryConfig) Login(t *testing.T, username string, password string) { Eventually(t, func() bool { _, err := dockerCli(t).RegistryLogin(context.Background(), dockertypes.AuthConfig{ Username: username, Password: password, ServerAddress: fmt.Sprintf("localhost:%s", rc.RunRegistryPort)}) return err == nil }, 100*time.Millisecond, 10*time.Second) } func startRegistry(t *testing.T, runRegistryName, username, password string) string { AssertNil(t, PullImageWithAuth(dockerCli(t), registryContainerName, "")) ctx := context.Background() htpasswdTar := generateHtpasswd(ctx, t, username, password) ctr, err := dockerCli(t).ContainerCreate(ctx, &dockercontainer.Config{ Image: registryContainerName, Labels: map[string]string{"author": "pack"}, Env: []string{ "REGISTRY_AUTH=htpasswd", "REGISTRY_AUTH_HTPASSWD_REALM=Registry Realm", "REGISTRY_AUTH_HTPASSWD_PATH=/registry_test_htpasswd", }, }, &dockercontainer.HostConfig{ AutoRemove: true, PortBindings: nat.PortMap{ "5000/tcp": []nat.PortBinding{{}}, }, }, nil, runRegistryName) AssertNil(t, err) err = dockerCli(t).CopyToContainer(ctx, ctr.ID, "/", htpasswdTar, dockertypes.CopyToContainerOptions{}) AssertNil(t, err) err = dockerCli(t).ContainerStart(ctx, ctr.ID, dockertypes.ContainerStartOptions{}) AssertNil(t, err) inspect, err := dockerCli(t).ContainerInspect(context.TODO(), ctr.ID) AssertNil(t, err) runRegistryPort := inspect.NetworkSettings.Ports["5000/tcp"][0].HostPort if os.Getenv("DOCKER_HOST") != "" { err := proxyDockerHostPort(runRegistryPort) AssertNil(t, err) } return runRegistryPort } func generateHtpasswd(ctx context.Context, t *testing.T, username string, password string) io.Reader { //https://docs.docker.com/registry/deploying/#restricting-access htpasswdCtr, err := dockerCli(t).ContainerCreate(ctx, &dockercontainer.Config{ Image: registryContainerName, Entrypoint: []string{"htpasswd", "-Bbn", username, password}, }, &dockercontainer.HostConfig{ AutoRemove: true, }, nil, "") AssertNil(t, err) var b bytes.Buffer err = RunContainer(ctx, dockerCli(t), htpasswdCtr.ID, &b, &b) AssertNil(t, err) reader, err := archive.CreateSingleFileTarReader("/registry_test_htpasswd", b.String()) AssertNil(t, err) return reader } func setupDockerConfigWithAuth(t *testing.T, username string, password string, runRegistryPort string) string { dockerConfigDir, err := ioutil.TempDir("", "pack.test.docker.config.dir") AssertNil(t, err) AssertNil(t, ioutil.WriteFile(filepath.Join(dockerConfigDir, "config.json"), []byte(fmt.Sprintf(`{ "auths": { "localhost:%s": { "auth": "%s" } } } `, runRegistryPort, encodedUserPass(username, password))), 0666)) return dockerConfigDir } func encodedUserPass(username string, password string) string { return base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("%s:%s", username, password))) } func (rc *TestRegistryConfig) StopRegistry(t *testing.T) { t.Log("stop registry") t.Helper() err := dockerCli(t).ContainerKill(context.Background(), rc.runRegistryName, "SIGKILL") AssertNil(t, err) err = os.RemoveAll(rc.DockerConfigDir) AssertNil(t, err) } func (rc *TestRegistryConfig) RepoName(name string) string { return "localhost:" + rc.RunRegistryPort + "/" + name } func (rc *TestRegistryConfig) RegistryAuth() string { return base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf(`{"username":"%s","password":"%s"}`, rc.username, rc.password))) } func (rc *TestRegistryConfig) RegistryCatalog() (string, error) { return HTTPGetE(fmt.Sprintf("http://localhost:%s/v2/_catalog", rc.RunRegistryPort), map[string]string{ "Authorization": "Basic " + encodedUserPass(rc.username, rc.password), }) }
[ "\"DOCKER_HOST\"" ]
[]
[ "DOCKER_HOST" ]
[]
["DOCKER_HOST"]
go
1
0
manage.py
#!/usr/bin/env python """Django's command-line utility for administrative tasks.""" import os import sys def main(): """Run administrative tasks.""" os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'kite_runner.settings.local') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) if __name__ == '__main__': main()
[]
[]
[]
[]
[]
python
0
0
tgik-controller.go
package main import ( "flag" "fmt" "log" "os" "time" "github.com/jbeda/tgik-controller/version" "k8s.io/client-go/informers" "k8s.io/client-go/kubernetes" "k8s.io/client-go/rest" "k8s.io/client-go/tools/clientcmd" ) func main() { log.Printf("tgik-controller version %s", version.VERSION) kubeconfig := "" flag.StringVar(&kubeconfig, "kubeconfig", kubeconfig, "kubeconfig file") flag.Parse() if kubeconfig == "" { kubeconfig = os.Getenv("KUBECONFIG") } var ( config *rest.Config err error ) if kubeconfig != "" { config, err = clientcmd.BuildConfigFromFlags("", kubeconfig) } else { config, err = rest.InClusterConfig() } if err != nil { fmt.Fprintf(os.Stderr, "error creating client: %v", err) os.Exit(1) } client := kubernetes.NewForConfigOrDie(config) sharedInformers := informers.NewSharedInformerFactory(client, 10*time.Minute) tgikController := NewTGIKController(client, sharedInformers.Core().V1().Secrets(), sharedInformers.Core().V1().Namespaces()) sharedInformers.Start(nil) tgikController.Run(nil) }
[ "\"KUBECONFIG\"" ]
[]
[ "KUBECONFIG" ]
[]
["KUBECONFIG"]
go
1
0
core/polyaxon/connections/aws/base.py
#!/usr/bin/python # # Copyright 2018-2021 Polyaxon, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from typing import List, Optional, Union import boto3 from polyaxon.connections.base import BaseService from polyaxon.connections.reader import get_connection_context_path, read_keys from polyaxon.exceptions import PolyaxonConnectionError def get_aws_access_key_id( keys: Optional[Union[str, List[str]]] = None, context_path: Optional[str] = None ): keys = keys or ["AWS_ACCESS_KEY_ID"] return read_keys(context_path=context_path, keys=keys) def get_aws_secret_access_key( keys: Optional[Union[str, List[str]]] = None, context_path: Optional[str] = None ): keys = keys or ["AWS_SECRET_ACCESS_KEY"] return read_keys(context_path=context_path, keys=keys) def get_aws_security_token( keys: Optional[Union[str, List[str]]] = None, context_path: Optional[str] = None ): keys = keys or ["AWS_SECURITY_TOKEN"] return read_keys(context_path=context_path, keys=keys) def get_region( keys: Optional[Union[str, List[str]]] = None, context_path: Optional[str] = None ): keys = keys or ["AWS_REGION"] return read_keys(context_path=context_path, keys=keys) def get_endpoint_url( keys: Optional[Union[str, List[str]]] = None, context_path: Optional[str] = None ): keys = keys or ["AWS_ENDPOINT_URL"] return read_keys(context_path=context_path, keys=keys) def get_aws_use_ssl( keys: Optional[Union[str, List[str]]] = None, context_path: Optional[str] = None ): keys = keys or ["AWS_USE_SSL"] return read_keys(context_path=context_path, keys=keys) def get_aws_verify_ssl( keys: Optional[Union[str, List[str]]] = None, context_path: Optional[str] = None ): keys = keys or ["AWS_VERIFY_SSL"] return read_keys(context_path=context_path, keys=keys) def get_aws_legacy_api( keys: Optional[Union[str, List[str]]] = None, context_path: Optional[str] = None ): keys = keys or ["AWS_LEGACY_API"] return read_keys(context_path=context_path, keys=keys) def get_legacy_api(legacy_api=False): legacy_api = legacy_api or get_aws_legacy_api() return legacy_api def get_aws_session( aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, region_name=None, context_path=None, ): aws_access_key_id = aws_access_key_id or get_aws_access_key_id( context_path=context_path ) aws_secret_access_key = aws_secret_access_key or get_aws_secret_access_key( context_path=context_path ) aws_session_token = aws_session_token or get_aws_security_token( context_path=context_path ) region_name = region_name or get_region(context_path=context_path) return boto3.session.Session( aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, region_name=region_name, ) def get_aws_client( client_type, endpoint_url=None, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, region_name=None, aws_use_ssl=True, aws_verify_ssl=None, context_path=None, ): session = get_aws_session( aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, region_name=region_name, context_path=context_path, ) endpoint_url = endpoint_url or get_endpoint_url(context_path=context_path) aws_use_ssl = aws_use_ssl or get_aws_use_ssl(context_path=context_path) if aws_verify_ssl is None: aws_verify_ssl = get_aws_verify_ssl(context_path=context_path) else: aws_verify_ssl = aws_verify_ssl return session.client( client_type, endpoint_url=endpoint_url, use_ssl=aws_use_ssl, verify=aws_verify_ssl, ) def get_aws_resource( resource_type, endpoint_url=None, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, region_name=None, aws_use_ssl=True, aws_verify_ssl=None, context_path=None, ): session = get_aws_session( aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, region_name=region_name, context_path=context_path, ) endpoint_url = endpoint_url or get_endpoint_url(context_path=context_path) aws_use_ssl = aws_use_ssl or get_aws_use_ssl(context_path=context_path) if aws_verify_ssl is None: aws_verify_ssl = get_aws_verify_ssl(context_path=context_path) else: aws_verify_ssl = aws_verify_ssl return session.resource( resource_type, endpoint_url=endpoint_url, use_ssl=aws_use_ssl, verify=aws_verify_ssl, ) class AWSService(BaseService): ENCRYPTION = "AES256" RESOURCE_TYPE = "" def __init__(self, connection=None, resource=None, **kwargs): super().__init__(connection=connection, **kwargs) if not self.RESOURCE_TYPE: raise PolyaxonConnectionError("Aws connection requires a RESOURCE_TYPE") self._resource = resource self._encoding = kwargs.get("encoding", "utf-8") self._endpoint_url = ( kwargs.get("endpoint_url") or kwargs.get("aws_endpoint_url") or kwargs.get("AWS_ENDPOINT_URL") ) self._aws_access_key_id = ( kwargs.get("access_key_id") or kwargs.get("aws_access_key_id") or kwargs.get("AWS_ACCESS_KEY_ID") ) self._aws_secret_access_key = ( kwargs.get("secret_access_key") or kwargs.get("aws_secret_access_key") or kwargs.get("AWS_SECRET_ACCESS_KEY") ) self._aws_session_token = ( kwargs.get("session_token") or kwargs.get("aws_session_token") or kwargs.get("AWS_SECURITY_TOKEN") ) self._region_name = ( kwargs.get("region") or kwargs.get("aws_region") or kwargs.get("AWS_REGION") ) self._aws_verify_ssl = kwargs.get( "verify_ssl", kwargs.get("aws_verify_ssl", kwargs.get("AWS_VERIFY_SSL", None)), ) self._aws_use_ssl = ( kwargs.get("use_ssl") or kwargs.get("aws_use_ssl") or kwargs.get("AWS_USE_SSL") ) self._aws_legacy_api = ( kwargs.get("legacy_api") or kwargs.get("aws_legacy_api") or kwargs.get("AWS_LEGACY_API") ) def set_connection( self, connection=None, connection_type=None, endpoint_url=None, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, region_name=None, aws_use_ssl=True, aws_verify_ssl=None, ): """ Sets a new connection. Args: endpoint_url: `str`. The complete URL to use for the constructed client. aws_access_key_id: `str`. The access key to use when creating the client. aws_secret_access_key: `str`. The secret key to use when creating the client. aws_session_token: `str`. The session token to use when creating the client. region_name: `str`. The name of the region associated with the client. A client is associated with a single region. Returns: Service client instance """ if connection: self._connection = connection return connection_type = connection_type or self._connection_type connection_name = connection_type.name if connection_type else None context_path = get_connection_context_path(name=connection_name) self._connection = get_aws_client( self.RESOURCE_TYPE or self.resource, endpoint_url=endpoint_url, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, region_name=region_name, aws_use_ssl=aws_use_ssl, aws_verify_ssl=aws_verify_ssl, context_path=context_path, ) def set_env_vars(self): if self._endpoint_url: os.environ["AWS_ENDPOINT_URL"] = self._endpoint_url if self._aws_access_key_id: os.environ["AWS_ACCESS_KEY_ID"] = self._aws_access_key_id if self._aws_secret_access_key: os.environ["AWS_SECRET_ACCESS_KEY"] = self._aws_secret_access_key if self._aws_session_token: os.environ["AWS_SECURITY_TOKEN"] = self._aws_session_token if self._region_name: os.environ["AWS_REGION"] = self._region_name if self._aws_use_ssl is not None: os.environ["AWS_USE_SSL"] = self._aws_use_ssl if self._aws_verify_ssl is not None: os.environ["AWS_VERIFY_SSL"] = self._aws_verify_ssl if self._aws_legacy_api: os.environ["AWS_LEGACY_API"] = self._aws_legacy_api @property def resource(self): if self._resource is None: self.set_resource( endpoint_url=self._endpoint_url, aws_access_key_id=self._aws_access_key_id, aws_secret_access_key=self._aws_secret_access_key, aws_session_token=self._aws_session_token, region_name=self._region_name, ) return self._resource def set_resource( self, connection_type=None, endpoint_url=None, aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None, region_name=None, ): """ Sets a new resource. Args: connection_type: V1ConnectionType. The connection name to resolve. endpoint_url: `str`. The complete URL to use for the constructed client. aws_access_key_id: `str`. The access key to use when creating the client. aws_secret_access_key: `str`. The secret key to use when creating the client. aws_session_token: `str`. The session token to use when creating the client. region_name: `str`. The name of the region associated with the client. A client is associated with a single region. Returns: Service resource instance """ connection_type = connection_type or self._connection_type connection_name = connection_type.name if connection_type else None context_path = get_connection_context_path(name=connection_name) self._resource = get_aws_resource( self.RESOURCE_TYPE or self.resource, endpoint_url=endpoint_url, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, region_name=region_name, context_path=context_path, )
[]
[]
[ "AWS_USE_SSL", "AWS_SECRET_ACCESS_KEY", "AWS_REGION", "AWS_VERIFY_SSL", "AWS_LEGACY_API", "AWS_ENDPOINT_URL", "AWS_ACCESS_KEY_ID", "AWS_SECURITY_TOKEN" ]
[]
["AWS_USE_SSL", "AWS_SECRET_ACCESS_KEY", "AWS_REGION", "AWS_VERIFY_SSL", "AWS_LEGACY_API", "AWS_ENDPOINT_URL", "AWS_ACCESS_KEY_ID", "AWS_SECURITY_TOKEN"]
python
8
0
test/testhelper/settings/settings.go
// Copyright © 2021 sealos. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package settings import ( "os" "time" ) // init test params and settings func init() { defaultWaiteTime := os.Getenv("DEFAULT_WAITE_TIME") if defaultWaiteTime == "" { DefaultWaiteTime = 300 * time.Second } else { DefaultWaiteTime, _ = time.ParseDuration(defaultWaiteTime) } maxWaiteTime := os.Getenv("MAX_WAITE_TIME") if maxWaiteTime == "" { MaxWaiteTime = 2400 * time.Second } else { MaxWaiteTime, _ = time.ParseDuration(maxWaiteTime) } pollingInterval := os.Getenv("DEFAULT_POLLING_INTERVAL") if pollingInterval == "" { DefaultPollingInterval = 10 } }
[ "\"DEFAULT_WAITE_TIME\"", "\"MAX_WAITE_TIME\"", "\"DEFAULT_POLLING_INTERVAL\"" ]
[]
[ "DEFAULT_POLLING_INTERVAL", "MAX_WAITE_TIME", "DEFAULT_WAITE_TIME" ]
[]
["DEFAULT_POLLING_INTERVAL", "MAX_WAITE_TIME", "DEFAULT_WAITE_TIME"]
go
3
0
wienerschnitzelgemeinschaft/src/Christof/models/GAPNet/9_crop/1/train1_lm.py
import os, sys #os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" os.environ["CUDA_VISIBLE_DEVICES"]="0" import numpy as np import pandas as pd import matplotlib.pyplot as plt import skimage.io from skimage.transform import resize from imgaug import augmenters as iaa from tqdm import tqdm import PIL from PIL import Image import cv2 from sklearn.utils import class_weight, shuffle from ml_stratifiers import MultilabelStratifiedKFold import warnings warnings.filterwarnings("ignore") from classification_models.resnet.models import ResNet18 import albumentations as A MODEL_PATH = 'Christof/models/GAPNet/9_crop/' # a) added batchnorm and cut out one Dense 256 layer # b) a) + added 16 size layer to GAP exp_suffix = '_lm' SIZE = 256 # Load dataset info path_to_train = 'Christof/assets/train_rgb_512/' data = pd.read_csv('Christof/assets/train.csv') normal_aug = A.Compose([#A.Rotate((0,30),p=0.75), A.RandomRotate90(p=1), A.HorizontalFlip(p=0.5), #A.RandomBrightness(0.05), #A.RandomContrast(0.05), A.IAAAffine(translate_percent=10,rotate=45,shear=10, scale=(0.9,1.1)), #A.RandomAffine(degrees=45, translate=(0.1,0.1), shear=10, scale=(0.9,1.1)) A.Normalize(mean=(0.08069, 0.05258, 0.05487), std=(0.1300, 0.0879, 0.1386), max_pixel_value=255.) ]) normal_aug_ext = A.Compose([#A.Rotate((0,30),p=0.75), A.RandomRotate90(p=1), A.HorizontalFlip(p=0.5), #A.RandomBrightness(0.05), #A.RandomContrast(0.05), A.IAAAffine(translate_percent=10,rotate=45,shear=10, scale=(0.9,1.1)), #A.RandomAffine(degrees=45, translate=(0.1,0.1), shear=10, scale=(0.9,1.1)) A.Normalize(mean=(0.1174382, 0.06798691, 0.06592218), std=(0.16392466 ,0.10036821, 0.16703453), max_pixel_value=255.) ]) val_aug = A.Compose([A.HorizontalFlip(p=0.5), A.Normalize(mean=(0.08069, 0.05258, 0.05487), std=(0.1300, 0.0879, 0.1386), max_pixel_value=255.)]) from torchvision import transforms eps = 0.004 desired = { 0: 0.36239782, 1: 0.043841336, 2: 0.075268817, 3: 0.059322034, 4: 0.075268817, 5: 0.075268817, 6: 0.043841336, 7: 0.075268817, 8: eps, 9: eps, 10: eps, 11: 0.043841336, 12: 0.043841336, 13: 0.014198783, 14: 0.043841336, 15: eps, 16: 0.028806584, 17: 0.014198783, 18: 0.028806584, 19: 0.059322034, 20: eps, 21: 0.126126126, 22: 0.028806584, 23: 0.075268817, 24: eps, 25: 0.222493888, 26: 0.028806584, 27: eps } sampling_weights = [ 2.6473, 35.0588 , 8.2069 , 19.3439 , 16.0145 , 13.3245 , 32.8644, 10.607 , 551.3 , 501.1818 , 787.5714 , 25.8523 , 39.0301, 51.644, 30.0846 ,1470.1333 , 62.8262, 190.1034 , 39.3084 , 23.2126 , 170.9457 , 8.2592, 33.2609 , 9.6889 , 92.2678 , 4.19 , 99.3333 ,3150.2857] sample_weights_ext = [ 2.6728, 41.1617 , 10.3068 , 42.4172 , 22.9729 , 21.9808 , 26.8267 , 11.5358 , 474.8659 , 486.7375 , 492.8987 , 66.963 , 50.2763 , 82.7609, 45.0683, 1854.2381, 100.3582 , 319.1721 , 76.5762 , 33.424 , 272.3007, 7.3664 , 39.4319 , 10.239 , 734.6981 , 2.548 , 196.6616 , 638.3443] train_dataset_info = [] for name, labels in zip(data['Id'], data['Target'].str.split(' ')): path = os.path.join(path_to_train, name) labs = np.array([int(label) for label in labels]) bucket_ind = np.argmin([desired[l] for l in labs]) bucket = labs[bucket_ind] weight = sampling_weights[bucket] train_dataset_info.append({ 'path': path, 'labels': labs, 'weight':weight}) train_dataset_info = np.array(train_dataset_info) data_ext1 = pd.read_csv('Christof/assets/train_ext1.csv') path_to_train_ext1 = 'Christof/assets/ext_tomomi/' train_dataset_info_ext1 = [] for name, labels in zip(data_ext1['Id'], data_ext1['Target'].str.split(' ')): path = os.path.join(path_to_train_ext1, name[:-5]) labs = np.array([int(label) for label in labels]) bucket_ind = np.argmin([desired[l] for l in labs]) bucket = labs[bucket_ind] weight = sample_weights_ext[bucket] train_dataset_info_ext1.append({ 'path':path, 'labels': labs, 'weight':weight}) train_dataset_info_ext1 = np.array(train_dataset_info_ext1) counts = np.zeros(28) for item in train_dataset_info: for l in item['labels']: counts[l] = counts[l] + 1 counts = counts / len(train_dataset_info) rare_classes = np.where(counts < 0.005) #rare_dataset_info = np.array([item for item in train_dataset_info if np.isin(item['labels'], rare_classes).any()]) #train_dataset_info = rare_dataset_info from torch.utils.data.sampler import WeightedRandomSampler from classification_models.resnet import preprocess_input class data_generator: @staticmethod def create_train(dataset_info, batch_size, shape, augument=None, weighted_sample = True): assert shape[2] == 3 if weighted_sample: p = np.array([item['weight'] for item in dataset_info]) p = p/np.sum(p) else: p = None while True: #dataset_info = shuffle(dataset_info) for start in range(0, len(dataset_info), batch_size): #end = min(start + batch_size, len(dataset_info)) batch_images = [] X_train_batch = np.random.choice(dataset_info,batch_size,p=p) batch_labels = np.zeros((len(X_train_batch), 28)) for i in range(len(X_train_batch)): image = data_generator.load_image(X_train_batch[i]['path'], shape) #image = preprocess_input(image) #rare = np.isin(X_train_batch[i]['labels'], rare_classes).any() if augument: image = data_generator.augment(augument,image) batch_images.append(image) batch_labels[i][X_train_batch[i]['labels']] = 1 yield np.array(batch_images, np.float32), batch_labels @staticmethod def load_image(path, shape): image = cv2.imread(path + '.png', cv2.IMREAD_UNCHANGED) image = image[128:384,:256,:] return image @staticmethod def augment(aug,image): image_aug = aug(image=image)['image'] return image_aug from keras.preprocessing.image import ImageDataGenerator from keras.models import Sequential, load_model from keras.layers import Activation, Dropout, Flatten, Dense, GlobalAveragePooling2D, Concatenate, Input, Conv2D from keras.applications.inception_v3 import InceptionV3 from keras.callbacks import ModelCheckpoint from keras import metrics from keras.optimizers import Adam from keras import backend as K import keras from keras.models import Model from keras.layers import Layer, InputSpec from keras import initializers from keras.constraints import Constraint import keras.backend as K from keras.layers import Reshape, Permute, multiply def squeeze_excite_block(input, ratio=16): init = input channel_axis = 1 if K.image_data_format() == "channels_first" else -1 filters = init._keras_shape[channel_axis] se_shape = (1, 1, filters) se = GlobalAveragePooling2D()(init) se = Reshape(se_shape)(se) se = Dense(filters // ratio, activation='relu', kernel_initializer='he_normal', use_bias=False)(se) se = Dense(filters, activation='sigmoid', kernel_initializer='he_normal', use_bias=False)(se) if K.image_data_format() == 'channels_first': se = Permute((3, 1, 2))(se) x = multiply([init, se]) return x def encoder(backbone): c0 = backbone.get_layer('relu0').output c1 = backbone.get_layer('stage2_unit1_relu1').get_output_at(0) # 128 c2 = backbone.get_layer('stage3_unit1_relu1').output # 63 c3 = backbone.get_layer('stage4_unit1_relu1').output # 32 enc_out = backbone.get_layer('relu1').output # 16 #enc_out = backbone.output # 8 short_cuts = [c0,c1,c2,c3] return enc_out, short_cuts from keras.layers import BatchNormalization def create_model(input_shape, n_out): input_tensor = Input(shape=(SIZE, SIZE, 3)) #bn = BatchNormalization()(input_tensor) #conv = Conv2D(3,(3,3),padding='same',activation='relu')(bn) base_model = ResNet18(include_top=False, weights='imagenet', input_shape=(SIZE, SIZE, 3),input_tensor=input_tensor) enc_out, short_cuts = encoder(base_model) x0 = GlobalAveragePooling2D()(squeeze_excite_block(enc_out)) x1 = GlobalAveragePooling2D()(squeeze_excite_block(short_cuts[0])) x2 = GlobalAveragePooling2D()(squeeze_excite_block(short_cuts[1])) x3 = GlobalAveragePooling2D()(squeeze_excite_block(short_cuts[2])) x4 = GlobalAveragePooling2D()(squeeze_excite_block(short_cuts[3])) x = Concatenate()([x0,x1,x2,x3,x4]) x = BatchNormalization()(x) x = Dropout(0.5)(x) x = Dense(256, activation='relu')(x) #x = BatchNormalization()(x) #x = Dropout(0.5)(x) #x = Dense(256, activation='relu')(x) x = BatchNormalization()(x) x = Dropout(0.5)(x) output = Dense(n_out, activation='sigmoid')(x) model = Model(input_tensor, output) # transfer imagenet weights #res_img = ResNet34(include_top=False, weights='imagenet', input_shape=(SIZE, SIZE, 3)) #offset = 2 #for i, l in enumerate(base_model.layers[offset+1:]): # l.set_weights(res_img.layers[i + 1].get_weights()) return model # create callbacks list from keras.callbacks import ModelCheckpoint, LearningRateScheduler, EarlyStopping, ReduceLROnPlateau, TensorBoard from keras_callbacks import F1Metric #from keras_metrics import f1, f1_02 #from keras_losses import f1_loss epochs = [20,150] batch_size = 32 # split data into train, valid mskf = MultilabelStratifiedKFold(n_splits=5,shuffle=True,random_state=18) y = np.zeros((len(train_dataset_info), 28)) for i in range(len(train_dataset_info)): y[i][train_dataset_info[i]['labels']] = 1 mskf.get_n_splits(train_dataset_info, y) kf = mskf.split(train_dataset_info, y) fold_id = 1 for f in range(fold_id): train_indexes, valid_indexes = next(kf) train_indexes, valid_indexes = next(kf) train_generator_orig = data_generator.create_train(train_dataset_info[train_indexes], batch_size, (SIZE, SIZE, 3), augument=normal_aug) train_generator_ext1 = data_generator.create_train(train_dataset_info_ext1, batch_size, (SIZE, SIZE, 3), augument=normal_aug_ext) import random def gen(): while True: x = random.random() if x > 0.5: batch = next(train_generator_orig) else: batch = next(train_generator_ext1) yield batch train_generator = gen() validation_generator = data_generator.create_train(train_dataset_info[valid_indexes], batch_size, (SIZE, SIZE, 3), augument=val_aug, weighted_sample=False) checkpoint = ModelCheckpoint(MODEL_PATH + 'model_loss{}.h5'.format(exp_suffix), monitor='val_loss', verbose=1, save_best_only=True, mode='min', save_weights_only=True) tensorboard = TensorBoard(MODEL_PATH + 'logs{}'.format(fold_id) + '{}'.format(exp_suffix) + '/') # reduceLROnPlat = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=3, # verbose=1, mode='auto', epsilon=0.0001) # early = EarlyStopping(monitor="val_loss", # mode="min", # patience=6) #f1_metric = F1Metric(validation_generator2,2*len(valid_indexes)//batch_size,batch_size,28) #2 times val because of val_aug nb_epochs = epochs[0] nb_cycles = 1 init_lr = 0.0005 def _cosine_anneal_schedule(t): cos_inner = np.pi * (t % (nb_epochs // nb_cycles)) cos_inner /= nb_epochs// nb_cycles cos_out = np.cos(cos_inner) + 1 return float(init_lr / 2 * cos_out) lr_schedule = LearningRateScheduler(_cosine_anneal_schedule,verbose=True) callbacks_list = [lr_schedule, tensorboard] # warm up model model = create_model( input_shape=(SIZE, SIZE, 3), n_out=28) POS_WEIGHT = 10 # multiplier for positive targets, needs to be tuned import tensorflow as tf import keras.backend.tensorflow_backend as tfb def weighted_binary_crossentropy(target, output): """ Weighted binary crossentropy between an output tensor and a target tensor. POS_WEIGHT is used as a multiplier for the positive targets. Combination of the following functions: * keras.losses.binary_crossentropy * keras.backend.tensorflow_backend.binary_crossentropy * tf.nn.weighted_cross_entropy_with_logits """ # transform back to logits _epsilon = tfb._to_tensor(tfb.epsilon(), output.dtype.base_dtype) #_epsilon = K.epsilon() output = tf.clip_by_value(output, _epsilon, 1 - _epsilon) output = tf.log(output / (1 - output)) # compute weighted loss loss = tf.nn.weighted_cross_entropy_with_logits(targets=target, logits=output, pos_weight=POS_WEIGHT) return tf.reduce_mean(loss, axis=-1) import tensorflow as tf from tensorflow.python.framework import ops from functools import reduce def binaryRound(x): """ Rounds a tensor whose values are in [0,1] to a tensor with values in {0, 1}, using the straight through estimator for the gradient. """ g = tf.get_default_graph() with ops.name_scope("BinaryRound") as name: with g.gradient_override_map({"Round": "Identity"}): return tf.round(x, name=name) # For Tensorflow v0.11 and below use: #with g.gradient_override_map({"Floor": "Identity"}): # return tf.round(x, name=name) def brian_f1(y_true, y_pred): y_pred = binaryRound(y_pred) tp = K.sum(K.cast(y_true*y_pred, 'float'), axis=0) tn = K.sum(K.cast((1-y_true)*(1-y_pred), 'float'), axis=0) fp = K.sum(K.cast((1-y_true)*y_pred, 'float'), axis=0) fn = K.sum(K.cast(y_true*(1-y_pred), 'float'), axis=0) p = tp / (tp + fp + K.epsilon()) r = tp / (tp + fn + K.epsilon()) f1 = 2*p*r / (p+r+K.epsilon()) f1 = tf.where(tf.is_nan(f1), tf.zeros_like(f1), f1) return K.mean(f1) def brian_f1_loss(y_true, y_pred): return 1- brian_f1(y_true, y_pred) def custom_loss(y_true, y_pred): return 4*weighted_binary_crossentropy(y_true,y_pred) - K.log(brian_f1(y_true,y_pred)) # train all layers from keras.metrics import binary_accuracy # model.compile(loss=custom_loss, # optimizer=Adam(lr=5e-4), # metrics=[binary_accuracy,brian_f1]) # model.fit_generator( # train_generator, # steps_per_epoch=np.ceil(float(2*len(train_indexes)) / float(batch_size)), # #validation_data=validation_generator, # #validation_steps=2*np.ceil(float(len(valid_indexes)) / float(batch_size)), # epochs=epochs[0], # verbose=1, # callbacks=callbacks_list) # model.save_weights(MODEL_PATH + 'model_loss{}.h5'.format(exp_suffix)) model.load_weights(MODEL_PATH + 'model_loss{}.h5'.format(exp_suffix)) submit = pd.read_csv('Christof/assets/sample_submission.csv') tta = 8 draw_predict = np.zeros((len(submit['Id']), 28)) for i, name in tqdm(enumerate(submit['Id'])): path = os.path.join('Christof/assets/test_rgb_512/', name) image = data_generator.load_image(path, (SIZE, SIZE, 3)) images = [data_generator.augment(normal_aug, image) for _ in range(tta)] tta_predicts = model.predict(np.array(images)) draw_predict[i] = np.median(tta_predicts,axis = 0) np.save(MODEL_PATH + f'pred{fold_id}{exp_suffix}.npy',draw_predict) # custom thresholds to match lb proportions thresholds = np.linspace(0.95, 0.05, 101) pred = draw_predict.copy() for j in tqdm(range(pred.shape[1])): for t in thresholds: pred[:, j] = (draw_predict[:, j] > t).astype(int) prop = np.mean(pred[:, j]) if prop >= desired[j]: break print(j, '%3.2f' % t, '%6.4f' % desired[j], '%6.4f' % prop, j, ) print(pred[:5].astype(int)) label_predict = [np.arange(28)[score_predict == 1] for score_predict in pred] str_predict_label = [' '.join(str(l) for l in lp) for lp in label_predict] submit['Predicted'] = str_predict_label # np.save('draw_predict_InceptionV3.npy', score_predict) submit.to_csv(MODEL_PATH + 'submission_loss{}_lb_dist_adjusted_8tta.csv'.format(exp_suffix), index=False) from Christof.utils import f1_sub best_sub = pd.read_csv('ens18.csv') f1_sub(best_sub,submit) best_sub = pd.read_csv('ens56d.csv') f1_sub(best_sub,submit) # submit2 = pd.read_csv('Christof/models/GAPNet/11/submission_loss_0_lb_dist_adjusted_8tta.csv') # f1_sub(best_sub,submit2) # # submit2 = pd.read_csv('Christof/models/GAPNet/11_tests_on_clr/submission_loss_1in20_0005_2c_lb_dist_adjusted_8tta.csv') # f1_sub(best_sub,submit2) # # submit2 = pd.read_csv('Christof/models/GAPNet/11_tests_on_clr/submission_loss_1in20_0005_lb_dist_adjusted_8tta.csv') # f1_sub(best_sub,submit2)
[]
[]
[ "CUDA_DEVICE_ORDER", "CUDA_VISIBLE_DEVICES" ]
[]
["CUDA_DEVICE_ORDER", "CUDA_VISIBLE_DEVICES"]
python
2
0
bin/basicswap_prepare.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright (c) 2019-2020 tecnovert # Distributed under the MIT software license, see the accompanying # file LICENSE or http://www.opensource.org/licenses/mit-license.php. import os import sys import json import mmap import stat import gnupg import signal import hashlib import tarfile import zipfile import logging import platform import urllib.parse from urllib.request import urlretrieve import basicswap.config as cfg from basicswap.rpc import ( callrpc_cli, waitForRPC, ) from basicswap.basicswap import BasicSwap from basicswap.chainparams import Coins from bin.basicswap_run import startDaemon, startXmrWalletDaemon if platform.system() == 'Darwin': BIN_ARCH = 'osx64.tar.gz' elif platform.system() == 'Windows': BIN_ARCH = 'win64.zip' else: BIN_ARCH = 'x86_64-linux-gnu.tar.gz' known_coins = { 'particl': '0.19.1.2', 'litecoin': '0.18.1', 'bitcoin': '0.20.1', 'namecoin': '0.18.0', 'monero': '0.17.1.5', } logger = logging.getLogger() logger.level = logging.DEBUG if not len(logger.handlers): logger.addHandler(logging.StreamHandler(sys.stdout)) XMR_RPC_HOST = os.getenv('XMR_RPC_HOST', 'localhost') BASE_XMR_RPC_PORT = int(os.getenv('BASE_XMR_RPC_PORT', 29798)) BASE_XMR_ZMQ_PORT = int(os.getenv('BASE_XMR_ZMQ_PORT', 30898)) BASE_XMR_WALLET_PORT = int(os.getenv('BASE_XMR_WALLET_PORT', 29998)) XMR_WALLET_RPC_USER = os.getenv('XMR_WALLET_RPC_USER', 'xmr_wallet_user') XMR_WALLET_RPC_PWD = os.getenv('XMR_WALLET_RPC_PWD', 'xmr_wallet_pwd') DEFAULT_XMR_RESTORE_HEIGHT = 2245107 def make_reporthook(): read = 0 # Number of bytes read so far last_percent_str = '' def reporthook(blocknum, blocksize, totalsize): nonlocal read nonlocal last_percent_str read += blocksize if totalsize > 0: percent_str = '%5.1f%%' % (read * 1e2 / totalsize) if percent_str != last_percent_str: logger.info(percent_str) last_percent_str = percent_str else: logger.info('read %d' % (read,)) return reporthook def downloadFile(url, path): logger.info('Downloading file %s', url) opener = urllib.request.build_opener() opener.addheaders = [('User-agent', 'Mozilla/5.0')] urllib.request.install_opener(opener) urlretrieve(url, path, make_reporthook()) def extractCore(coin, version, settings, bin_dir, release_path): logger.info('extractCore %s v%s', coin, version) bins = [coin + 'd', coin + '-cli', coin + '-tx'] if coin == 'monero': with tarfile.open(release_path) as ft: for member in ft.getmembers(): if member.isdir(): continue out_path = os.path.join(bin_dir, os.path.basename(member.name)) fi = ft.extractfile(member) with open(out_path, 'wb') as fout: fout.write(fi.read()) fi.close() os.chmod(out_path, stat.S_IRWXU | stat.S_IXGRP | stat.S_IXOTH) return bins = [coin + 'd', coin + '-cli', coin + '-tx'] versions = version.split('.') if coin == 'particl' and int(versions[1]) >= 19: bins.append(coin + '-wallet') if 'win32' in BIN_ARCH or 'win64' in BIN_ARCH: with zipfile.ZipFile(release_path) as fz: for b in bins: b += '.exe' out_path = os.path.join(bin_dir, b) with open(out_path, 'wb') as fout: fout.write(fz.read('{}-{}/bin/{}'.format(coin, version, b))) os.chmod(out_path, stat.S_IRWXU | stat.S_IXGRP | stat.S_IXOTH) else: with tarfile.open(release_path) as ft: for b in bins: out_path = os.path.join(bin_dir, b) fi = ft.extractfile('{}-{}/bin/{}'.format(coin, version, b)) with open(out_path, 'wb') as fout: fout.write(fi.read()) fi.close() os.chmod(out_path, stat.S_IRWXU | stat.S_IXGRP | stat.S_IXOTH) def prepareCore(coin, version, settings, data_dir): logger.info('prepareCore %s v%s', coin, version) bin_dir = os.path.expanduser(settings['chainclients'][coin]['bindir']) if not os.path.exists(bin_dir): os.makedirs(bin_dir) if 'osx' in BIN_ARCH: os_dir_name = 'osx-unsigned' os_name = 'osx' elif 'win32' in BIN_ARCH or 'win64' in BIN_ARCH: os_dir_name = 'win-unsigned' os_name = 'win' else: os_dir_name = 'linux' os_name = 'linux' release_filename = '{}-{}-{}'.format(coin, version, BIN_ARCH) if coin == 'monero': release_url = 'https://downloads.getmonero.org/cli/monero-linux-x64-v{}.tar.bz2'.format(version) release_path = os.path.join(bin_dir, release_filename) if not os.path.exists(release_path): downloadFile(release_url, release_path) # TODO: How to get version specific hashes assert_filename = 'monero-{}-hashes.txt'.format(version) assert_url = 'https://www.getmonero.org/downloads/hashes.txt' assert_path = os.path.join(bin_dir, assert_filename) if not os.path.exists(assert_path): downloadFile(assert_url, assert_path) else: release_filename = '{}-{}-{}'.format(coin, version, BIN_ARCH) if coin == 'particl': signing_key_name = 'tecnovert' release_url = 'https://github.com/tecnovert/particl-core/releases/download/v{}/{}'.format(version, release_filename) assert_filename = '{}-{}-{}-build.assert'.format(coin, os_name, version) assert_url = 'https://raw.githubusercontent.com/tecnovert/gitian.sigs/master/%s-%s/%s/%s' % (version, os_dir_name, signing_key_name, assert_filename) elif coin == 'litecoin': signing_key_name = 'thrasher' release_url = 'https://download.litecoin.org/litecoin-{}/{}/{}'.format(version, os_name, release_filename) assert_filename = '{}-{}-{}-build.assert'.format(coin, os_name, version.rsplit('.', 1)[0]) assert_url = 'https://raw.githubusercontent.com/litecoin-project/gitian.sigs.ltc/master/%s-%s/%s/%s' % (version, os_dir_name, signing_key_name, assert_filename) elif coin == 'bitcoin': signing_key_name = 'laanwj' release_url = 'https://bitcoincore.org/bin/bitcoin-core-{}/{}'.format(version, release_filename) assert_filename = '{}-core-{}-{}-build.assert'.format(coin, os_name, '.'.join(version.split('.')[:2])) assert_url = 'https://raw.githubusercontent.com/bitcoin-core/gitian.sigs/master/%s-%s/%s/%s' % (version, os_dir_name, signing_key_name, assert_filename) elif coin == 'namecoin': signing_key_name = 'JeremyRand' release_url = 'https://beta.namecoin.org/files/namecoin-core/namecoin-core-{}/{}'.format(version, release_filename) assert_filename = '{}-{}-{}-build.assert'.format(coin, os_name, version.rsplit('.', 1)[0]) assert_url = 'https://raw.githubusercontent.com/namecoin/gitian.sigs/master/%s-%s/%s/%s' % (version, os_dir_name, signing_key_name, assert_filename) else: raise ValueError('Unknown coin') assert_sig_filename = assert_filename + '.sig' assert_sig_url = assert_url + '.sig' release_path = os.path.join(bin_dir, release_filename) if not os.path.exists(release_path): downloadFile(release_url, release_path) # Rename assert files with full version assert_filename = '{}-{}-{}-build.assert'.format(coin, os_name, version) assert_path = os.path.join(bin_dir, assert_filename) if not os.path.exists(assert_path): downloadFile(assert_url, assert_path) assert_sig_filename = '{}-{}-{}-build.assert.sig'.format(coin, os_name, version) assert_sig_path = os.path.join(bin_dir, assert_sig_filename) if not os.path.exists(assert_sig_path): downloadFile(assert_sig_url, assert_sig_path) hasher = hashlib.sha256() with open(release_path, 'rb') as fp: hasher.update(fp.read()) release_hash = hasher.digest() logger.info('%s hash: %s', release_filename, release_hash.hex()) with open(assert_path, 'rb', 0) as fp, mmap.mmap(fp.fileno(), 0, access=mmap.ACCESS_READ) as s: if s.find(bytes(release_hash.hex(), 'utf-8')) == -1: raise ValueError('Error: release hash %s not found in assert file.' % (release_hash.hex())) else: logger.info('Found release hash in assert file.') """ gnupghome = os.path.join(data_dir, 'gpg') if not os.path.exists(gnupghome): os.makedirs(gnupghome) """ gpg = gnupg.GPG() if coin == 'monero': with open(assert_path, 'rb') as fp: verified = gpg.verify_file(fp) if verified.username is None: logger.warning('Signature not verified.') pubkeyurl = 'https://raw.githubusercontent.com/monero-project/monero/master/utils/gpg_keys/binaryfate.asc' logger.info('Importing public key from url: ' + pubkeyurl) rv = gpg.import_keys(urllib.request.urlopen(pubkeyurl).read()) print('import_keys', rv) assert('F0AF4D462A0BDF92' in rv.fingerprints[0]) with open(assert_path, 'rb') as fp: verified = gpg.verify_file(fp) else: with open(assert_sig_path, 'rb') as fp: verified = gpg.verify_file(fp, assert_path) if verified.username is None: logger.warning('Signature not verified.') pubkeyurl = 'https://raw.githubusercontent.com/tecnovert/basicswap/master/gitianpubkeys/{}_{}.pgp'.format(coin, signing_key_name) logger.info('Importing public key from url: ' + pubkeyurl) gpg.import_keys(urllib.request.urlopen(pubkeyurl).read()) with open(assert_sig_path, 'rb') as fp: verified = gpg.verify_file(fp, assert_path) if verified.valid is False \ and not (verified.status == 'signature valid' and verified.key_status == 'signing key has expired'): raise ValueError('Signature verification failed.') extractCore(coin, version, settings, bin_dir, release_path) def prepareDataDir(coin, settings, data_dir, chain, particl_mnemonic): core_settings = settings['chainclients'][coin] data_dir = core_settings['datadir'] if not os.path.exists(data_dir): os.makedirs(data_dir) if coin == 'monero': core_conf_path = os.path.join(data_dir, coin + 'd.conf') if os.path.exists(core_conf_path): exitWithError('{} exists'.format(core_conf_path)) with open(core_conf_path, 'w') as fp: if chain == 'regtest': fp.write('regtest=1\n') fp.write('keep-fakechain=1\n') fp.write('fixed-difficulty=1\n') elif chain == 'testnet': fp.write('testnet=1\n') fp.write('data-dir={}\n'.format(data_dir)) fp.write('rpc-bind-port={}\n'.format(core_settings['rpcport'])) fp.write('rpc-bind-ip=127.0.0.1\n') fp.write('zmq-rpc-bind-port={}\n'.format(core_settings['zmqport'])) fp.write('zmq-rpc-bind-ip=127.0.0.1\n') wallet_conf_path = os.path.join(data_dir, coin + '_wallet.conf') if os.path.exists(wallet_conf_path): exitWithError('{} exists'.format(wallet_conf_path)) with open(wallet_conf_path, 'w') as fp: fp.write('daemon-address={}:{}\n'.format(core_settings['rpchost'], core_settings['rpcport'])) fp.write('no-dns=1\n') fp.write('rpc-bind-port={}\n'.format(core_settings['walletrpcport'])) fp.write('wallet-dir={}\n'.format(os.path.join(data_dir, 'wallets'))) fp.write('log-file={}\n'.format(os.path.join(data_dir, 'wallet.log'))) fp.write('shared-ringdb-dir={}\n'.format(os.path.join(data_dir, 'shared-ringdb'))) fp.write('rpc-login={}:{}\n'.format(core_settings['walletrpcuser'], core_settings['walletrpcpassword'])) return core_conf_path = os.path.join(data_dir, coin + '.conf') if os.path.exists(core_conf_path): exitWithError('{} exists'.format(core_conf_path)) with open(core_conf_path, 'w') as fp: if chain != 'mainnet': fp.write(chain + '=1\n') if chain == 'testnet': fp.write('[test]\n\n') if chain == 'regtest': fp.write('[regtest]\n\n') else: logger.warning('Unknown chain %s', chain) fp.write('rpcport={}\n'.format(core_settings['rpcport'])) fp.write('printtoconsole=0\n') fp.write('daemon=0\n') if coin == 'particl': fp.write('debugexclude=libevent\n') fp.write('zmqpubsmsg=tcp://127.0.0.1:{}\n'.format(settings['zmqport'])) fp.write('spentindex=1\n') fp.write('txindex=1\n') fp.write('staking=0\n') if particl_mnemonic == 'none': fp.write('createdefaultmasterkey=1') elif coin == 'litecoin': fp.write('prune=1000\n') elif coin == 'bitcoin': fp.write('prune=1000\n') fp.write('fallbackfee=0.0002\n') elif coin == 'namecoin': fp.write('prune=1000\n') else: logger.warning('Unknown coin %s', coin) def printVersion(): from basicswap import __version__ logger.info('Basicswap version:', __version__) def printHelp(): logger.info('Usage: basicswap-prepare ') logger.info('\n--help, -h Print help.') logger.info('--version, -v Print version.') logger.info('--datadir=PATH Path to basicswap data directory, default:{}.'.format(cfg.DEFAULT_DATADIR)) logger.info('--bindir=PATH Path to cores directory, default:datadir/bin.') logger.info('--mainnet Run in mainnet mode.') logger.info('--testnet Run in testnet mode.') logger.info('--regtest Run in regtest mode.') logger.info('--particl_mnemonic= Recovery phrase to use for the Particl wallet, default is randomly generated,\n' + ' "none" to set autogenerate account mode.') logger.info('--withcoin= Prepare system to run daemon for coin.') logger.info('--withoutcoin= Do not prepare system to run daemon for coin.') logger.info('--addcoin= Add coin to existing setup.') logger.info('--disablecoin= Make coin inactive.') logger.info('--preparebinonly Don\'t prepare settings or datadirs.') logger.info('--portoffset=n Raise all ports by n.') logger.info('--htmlhost= Interface to host on, default:localhost.') logger.info('--xmrrestoreheight=n Block height to restore Monero wallet from, default:{}.'.format(DEFAULT_XMR_RESTORE_HEIGHT)) logger.info('\n' + 'Known coins: %s', ', '.join(known_coins.keys())) def make_rpc_func(bin_dir, data_dir, chain): bin_dir = bin_dir data_dir = data_dir chain = chain def rpc_func(cmd): nonlocal bin_dir nonlocal data_dir nonlocal chain return callrpc_cli(bin_dir, data_dir, chain, cmd, cfg.PARTICL_CLI) return rpc_func def exitWithError(error_msg): sys.stderr.write('Error: {}, exiting.\n'.format(error_msg)) sys.exit(1) def main(): data_dir = None bin_dir = None port_offset = None chain = 'mainnet' particl_wallet_mnemonic = None prepare_bin_only = False with_coins = {'particl', 'litecoin'} add_coin = '' disable_coin = '' htmlhost = 'localhost' xmr_restore_height = DEFAULT_XMR_RESTORE_HEIGHT for v in sys.argv[1:]: if len(v) < 2 or v[0] != '-': exitWithError('Unknown argument {}'.format(v)) s = v.split('=') name = s[0].strip() for i in range(2): if name[0] == '-': name = name[1:] if name == 'v' or name == 'version': printVersion() return 0 if name == 'h' or name == 'help': printHelp() return 0 if name == 'mainnet': continue if name == 'testnet': chain = 'testnet' continue if name == 'regtest': chain = 'regtest' continue if name == 'preparebinonly': prepare_bin_only = True continue if len(s) == 2: if name == 'datadir': data_dir = os.path.expanduser(s[1].strip('"')) continue if name == 'bindir': bin_dir = os.path.expanduser(s[1].strip('"')) continue if name == 'portoffset': port_offset = int(s[1]) continue if name == 'particl_mnemonic': particl_wallet_mnemonic = s[1].strip('"') continue if name == 'withcoin' or name == 'withcoins': coins = s[1].split(',') for coin in coins: if coin not in known_coins: exitWithError('Unknown coin {}'.format(coin)) with_coins.add(coin) continue if name == 'withoutcoin' or name == 'withoutcoins': coins = s[1].split(',') for coin in coins: if coin not in known_coins: exitWithError('Unknown coin {}'.format(coin)) with_coins.discard(coin) continue if name == 'addcoin': if s[1] not in known_coins: exitWithError('Unknown coin {}'.format(s[1])) add_coin = s[1] with_coins = [add_coin, ] continue if name == 'disablecoin': if s[1] not in known_coins: exitWithError('Unknown coin {}'.format(s[1])) disable_coin = s[1] continue if name == 'htmlhost': htmlhost = s[1].strip('"') continue if name == 'xmrrestoreheight': xmr_restore_height = int(s[1]) continue exitWithError('Unknown argument {}'.format(v)) if data_dir is None: data_dir = os.path.join(os.path.expanduser(cfg.DEFAULT_DATADIR)) if bin_dir is None: bin_dir = os.path.join(data_dir, 'bin') logger.info('Using datadir: %s', data_dir) logger.info('Chain: %s', chain) if port_offset is None: port_offset = 300 if chain == 'testnet' else 0 if not os.path.exists(data_dir): os.makedirs(data_dir) config_path = os.path.join(data_dir, cfg.CONFIG_FILENAME) withchainclients = {} chainclients = { 'particl': { 'connection_type': 'rpc', 'manage_daemon': True, 'rpcport': 19792 + port_offset, 'datadir': os.path.join(data_dir, 'particl'), 'bindir': os.path.join(bin_dir, 'particl'), 'blocks_confirmed': 2, 'override_feerate': 0.002, 'conf_target': 2, 'core_version_group': 18, 'chain_lookups': 'local', }, 'litecoin': { 'connection_type': 'rpc' if 'litecoin' in with_coins else 'none', 'manage_daemon': True if 'litecoin' in with_coins else False, 'rpcport': 19795 + port_offset, 'datadir': os.path.join(data_dir, 'litecoin'), 'bindir': os.path.join(bin_dir, 'litecoin'), 'use_segwit': True, 'blocks_confirmed': 2, 'conf_target': 2, 'core_version_group': 18, 'chain_lookups': 'local', }, 'bitcoin': { 'connection_type': 'rpc' if 'bitcoin' in with_coins else 'none', 'manage_daemon': True if 'bitcoin' in with_coins else False, 'rpcport': 19796 + port_offset, 'datadir': os.path.join(data_dir, 'bitcoin'), 'bindir': os.path.join(bin_dir, 'bitcoin'), 'use_segwit': True, 'blocks_confirmed': 1, 'conf_target': 2, 'core_version_group': 18, 'chain_lookups': 'local', }, 'namecoin': { 'connection_type': 'rpc' if 'namecoin' in with_coins else 'none', 'manage_daemon': True if 'namecoin' in with_coins else False, 'rpcport': 19798 + port_offset, 'datadir': os.path.join(data_dir, 'namecoin'), 'bindir': os.path.join(bin_dir, 'namecoin'), 'use_segwit': False, 'use_csv': False, 'blocks_confirmed': 1, 'conf_target': 2, 'core_version_group': 18, 'chain_lookups': 'local', }, 'monero': { 'connection_type': 'rpc' if 'monero' in with_coins else 'none', 'manage_daemon': True if ('monero' in with_coins and XMR_RPC_HOST == 'localhost') else False, 'manage_wallet_daemon': True if 'monero' in with_coins else False, 'rpcport': BASE_XMR_RPC_PORT + port_offset, 'zmqport': BASE_XMR_ZMQ_PORT + port_offset, 'walletrpcport': BASE_XMR_WALLET_PORT + port_offset, 'rpchost': XMR_RPC_HOST, 'walletrpcuser': XMR_WALLET_RPC_USER, 'walletrpcpassword': XMR_WALLET_RPC_PWD, 'walletfile': 'swap_wallet', 'datadir': os.path.join(data_dir, 'monero'), 'bindir': os.path.join(bin_dir, 'monero'), 'restore_height': xmr_restore_height, 'blocks_confirmed': 7, # TODO: 10? } } if disable_coin != '': logger.info('Disabling coin: %s', disable_coin) if not os.path.exists(config_path): exitWithError('{} does not exist'.format(config_path)) with open(config_path) as fs: settings = json.load(fs) if disable_coin not in settings['chainclients']: exitWithError('{} has not been prepared'.format(disable_coin)) settings['chainclients'][disable_coin]['connection_type'] = 'none' settings['chainclients'][disable_coin]['manage_daemon'] = False with open(config_path, 'w') as fp: json.dump(settings, fp, indent=4) logger.info('Done.') return 0 if add_coin != '': logger.info('Adding coin: %s', add_coin) if not os.path.exists(config_path): exitWithError('{} does not exist'.format(config_path)) with open(config_path) as fs: settings = json.load(fs) if add_coin in settings['chainclients']: coin_settings = settings['chainclients'][add_coin] if coin_settings['connection_type'] == 'none' and coin_settings['manage_daemon'] is False: logger.info('Enabling coin: %s', add_coin) coin_settings['connection_type'] = 'rpc' coin_settings['manage_daemon'] = True with open(config_path, 'w') as fp: json.dump(settings, fp, indent=4) logger.info('Done.') return 0 exitWithError('{} is already in the settings file'.format(add_coin)) settings['chainclients'][add_coin] = chainclients[add_coin] prepareCore(add_coin, known_coins[add_coin], settings, data_dir) if not prepare_bin_only: prepareDataDir(add_coin, settings, data_dir, chain, particl_wallet_mnemonic) with open(config_path, 'w') as fp: json.dump(settings, fp, indent=4) logger.info('Done.') return 0 logger.info('With coins: %s', ', '.join(with_coins)) if os.path.exists(config_path): if not prepare_bin_only: exitWithError('{} exists'.format(config_path)) else: with open(config_path) as fs: settings = json.load(fs) else: for c in with_coins: withchainclients[c] = chainclients[c] settings = { 'debug': True, 'zmqhost': 'tcp://127.0.0.1', 'zmqport': 20792 + port_offset, 'htmlhost': htmlhost, 'htmlport': 12700 + port_offset, 'network_key': '7sW2UEcHXvuqEjkpE5mD584zRaQYs6WXYohue4jLFZPTvMSxwvgs', 'network_pubkey': '035758c4a22d7dd59165db02a56156e790224361eb3191f02197addcb3bde903d2', 'chainclients': withchainclients, 'min_delay_event': 5, # Min delay in seconds before reacting to an event 'max_delay_event': 50, # Max delay in seconds before reacting to an event 'check_progress_seconds': 60, 'check_watched_seconds': 60, 'check_expired_seconds': 60 } for c in with_coins: prepareCore(c, known_coins[c], settings, data_dir) if prepare_bin_only: logger.info('Done.') return 0 for c in with_coins: prepareDataDir(c, settings, data_dir, chain, particl_wallet_mnemonic) with open(config_path, 'w') as fp: json.dump(settings, fp, indent=4) if particl_wallet_mnemonic == 'none': logger.info('Done.') return 0 logger.info('Loading Particl mnemonic') particl_settings = settings['chainclients']['particl'] partRpc = make_rpc_func(particl_settings['bindir'], particl_settings['datadir'], chain) daemons = [] daemons.append(startDaemon(particl_settings['datadir'], particl_settings['bindir'], cfg.PARTICLD, ['-noconnect', '-nofindpeers', '-nostaking', '-nodnsseed', '-nolisten'])) try: waitForRPC(partRpc) if particl_wallet_mnemonic is None: particl_wallet_mnemonic = partRpc('mnemonic new')['mnemonic'] partRpc('extkeyimportmaster "{}"'.format(particl_wallet_mnemonic)) # Initialise wallets with open(os.path.join(data_dir, 'basicswap.log'), 'a') as fp: swap_client = BasicSwap(fp, data_dir, settings, chain) swap_client.setCoinConnectParams(Coins.PART) swap_client.setDaemonPID(Coins.PART, daemons[-1].pid) swap_client.setCoinRunParams(Coins.PART) swap_client.createCoinInterface(Coins.PART) for coin_name in with_coins: coin_settings = settings['chainclients'][coin_name] c = swap_client.getCoinIdFromName(coin_name) if c == Coins.PART: continue swap_client.setCoinConnectParams(c) if c == Coins.XMR: if not coin_settings['manage_wallet_daemon']: continue daemons.append(startXmrWalletDaemon(coin_settings['datadir'], coin_settings['bindir'], 'monero-wallet-rpc')) else: if not coin_settings['manage_daemon']: continue filename = coin_name + 'd' + ('.exe' if os.name == 'nt' else '') daemons.append(startDaemon(coin_settings['datadir'], coin_settings['bindir'], filename, ['-noconnect', '-nodnsseed', '-nolisten'])) swap_client.setDaemonPID(c, daemons[-1].pid) swap_client.setCoinRunParams(c) swap_client.createCoinInterface(c) swap_client.waitForDaemonRPC(c) swap_client.initialiseWallet(c) finally: for d in daemons: logging.info('Interrupting {}'.format(d.pid)) d.send_signal(signal.SIGINT) d.wait(timeout=120) for fp in (d.stdout, d.stderr, d.stdin): if fp: fp.close() logger.info('IMPORTANT - Save your particl wallet recovery phrase:\n{}\n'.format(particl_wallet_mnemonic)) logger.info('Done.') if __name__ == '__main__': main()
[]
[]
[ "XMR_RPC_HOST", "BASE_XMR_RPC_PORT", "XMR_WALLET_RPC_USER", "XMR_WALLET_RPC_PWD", "BASE_XMR_WALLET_PORT", "BASE_XMR_ZMQ_PORT" ]
[]
["XMR_RPC_HOST", "BASE_XMR_RPC_PORT", "XMR_WALLET_RPC_USER", "XMR_WALLET_RPC_PWD", "BASE_XMR_WALLET_PORT", "BASE_XMR_ZMQ_PORT"]
python
6
0
src/core/common/commons/src/main/java/eu/interiot/gateway/commons/api/command/CommandLine.java
/* * Copyright 2016-2018 Universitat Politècnica de València * Copyright 2016-2018 Università della Calabria * Copyright 2016-2018 Prodevelop, SL * Copyright 2016-2018 Technische Universiteit Eindhoven * Copyright 2016-2018 Fundación de la Comunidad Valenciana para la * Investigación, Promoción y Estudios Comerciales de Valenciaport * Copyright 2016-2018 Rinicom Ltd * Copyright 2016-2018 Association pour le développement de la formation * professionnelle dans le transport * Copyright 2016-2018 Noatum Ports Valenciana, S.A.U. * Copyright 2016-2018 XLAB razvoj programske opreme in svetovanje d.o.o. * Copyright 2016-2018 Systems Research Institute Polish Academy of Sciences * Copyright 2016-2018 Azienda Sanitaria Locale TO5 * Copyright 2016-2018 Alessandro Bassi Consulting SARL * Copyright 2016-2018 Neways Technologies B.V. * * See the NOTICE file distributed with this work for additional information * regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* Copyright 2017 Remko Popma Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package eu.interiot.gateway.commons.api.command; import static eu.interiot.gateway.commons.api.command.CommandLine.ArgSpecBuilder.abbreviate; import static eu.interiot.gateway.commons.api.command.CommandLine.Help.Column.Overflow.SPAN; import static eu.interiot.gateway.commons.api.command.CommandLine.Help.Column.Overflow.TRUNCATE; import static eu.interiot.gateway.commons.api.command.CommandLine.Help.Column.Overflow.WRAP; import static java.util.Locale.ENGLISH; import java.io.File; import java.io.FileReader; import java.io.LineNumberReader; import java.io.PrintStream; import java.io.PrintWriter; import java.io.StreamTokenizer; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.reflect.Array; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.WildcardType; import java.math.BigDecimal; import java.math.BigInteger; import java.net.InetAddress; import java.net.MalformedURLException; import java.net.NetworkInterface; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.nio.ByteOrder; import java.nio.charset.Charset; import java.sql.Connection; import java.sql.Driver; import java.sql.DriverManager; import java.sql.Time; import java.sql.Timestamp; import java.text.BreakIterator; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Currency; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.Set; import java.util.SortedSet; import java.util.Stack; import java.util.TimeZone; import java.util.TreeSet; import java.util.UUID; import java.util.concurrent.Callable; import java.util.regex.Pattern; import eu.interiot.gateway.commons.api.command.CommandLine.Help.Ansi.IStyle; import eu.interiot.gateway.commons.api.command.CommandLine.Help.Ansi.Style; import eu.interiot.gateway.commons.api.command.CommandLine.Help.Ansi.Text; /** * <p> * CommandLine interpreter that uses reflection to initialize an annotated domain object with values obtained from the * command line arguments. * </p><h2>Example</h2> * <pre>import static picocli.CommandLine.*; * * &#064;Command(header = "Encrypt FILE(s), or standard input, to standard output or to the output file.", * version = "v1.2.3") * public class Encrypt { * * &#064;Parameters(type = File.class, description = "Any number of input files") * private List&lt;File&gt; files = new ArrayList&lt;File&gt;(); * * &#064;Option(names = { "-o", "--out" }, description = "Output file (default: print to console)") * private File outputFile; * * &#064;Option(names = { "-v", "--verbose"}, description = "Verbose mode. Helpful for troubleshooting. Multiple -v options increase the verbosity.") * private boolean[] verbose; * * &#064;Option(names = { "-h", "--help", "-?", "-help"}, usageHelp = true, description = "Display this help and exit") * private boolean help; * * &#064;Option(names = { "-V", "--version"}, versionHelp = true, description = "Display version info and exit") * private boolean versionHelp; * } * </pre> * <p> * Use {@code CommandLine} to initialize a domain object as follows: * </p><pre> * public static void main(String... args) { * Encrypt encrypt = new Encrypt(); * try { * List&lt;CommandLine&gt; parsedCommands = new CommandLine(encrypt).parse(args); * if (!CommandLine.printHelpIfRequested(parsedCommands, System.err, Help.Ansi.AUTO)) { * runProgram(encrypt); * } * } catch (ParameterException ex) { // command line arguments could not be parsed * System.err.println(ex.getMessage()); * ex.getCommandLine().usage(System.err); * } * } * </pre><p> * Invoke the above program with some command line arguments. The below are all equivalent: * </p> * <pre> * --verbose --out=outfile in1 in2 * --verbose --out outfile in1 in2 * -v --out=outfile in1 in2 * -v -o outfile in1 in2 * -v -o=outfile in1 in2 * -vo outfile in1 in2 * -vo=outfile in1 in2 * -v -ooutfile in1 in2 * -vooutfile in1 in2 * </pre> */ public class CommandLine { /** This is picocli version {@value}. */ public static final String VERSION = "3.0.0-alpha-1-SNAPSHOT"; private final Tracer tracer = new Tracer(); private final CommandSpec commandSpec; private final Interpreter interpreter; private final IFactory factory; private boolean overwrittenOptionsAllowed = false; private boolean unmatchedArgumentsAllowed = false; private boolean expandAtFiles = true; private List<String> unmatchedArguments = new ArrayList<String>(); private boolean usageHelpRequested; private boolean versionHelpRequested; /** * Constructs a new {@code CommandLine} interpreter with the specified object and a default subcommand factory. * <p>The specified object may be a {@link CommandSpec CommandSpec} object, or it may be a {@code @Command}-annotated * user object with {@code @Option} and {@code @Parameters}-annotated fields, in which case picocli automatically * constructs a {@code CommandSpec} from this user object. * </p><p> * When the {@link #parse(String...)} method is called, the {@link CommandSpec CommandSpec} object will be * initialized based on command line arguments. If the commandSpec is created from an annotated user object, this * user object will be initialized based on the command line arguments.</p> * @param command an annotated user object or a {@code CommandSpec} object to initialize from the command line arguments * @throws InitializationException if the specified command object does not have a {@link Command}, {@link Option} or {@link Parameters} annotation */ public CommandLine(Object command) { this(command, new DefaultFactory()); } /** * Constructs a new {@code CommandLine} interpreter with the specified object and object factory. * <p>The specified object may be a {@link CommandSpec CommandSpec} object, or it may be a {@code @Command}-annotated * user object with {@code @Option} and {@code @Parameters}-annotated fields, in which case picocli automatically * constructs a {@code CommandSpec} from this user object. * </p><p> * When the {@link #parse(String...)} method is called, the {@link CommandSpec CommandSpec} object will be * initialized based on command line arguments. If the commandSpec is created from an annotated user object, this * user object will be initialized based on the command line arguments.</p> * @param command an annotated user object or a {@code CommandSpec} object to initialize from the command line arguments * @param factory the factory used to create instances of {@linkplain Command#subcommands() subcommands}, {@linkplain Option#converter() converters}, etc., that are registered declaratively with annotation attributes * @throws InitializationException if the specified command object does not have a {@link Command}, {@link Option} or {@link Parameters} annotation * @since 2.2 */ public CommandLine(Object command, IFactory factory) { this.factory = Assert.notNull(factory, "factory"); interpreter = new Interpreter(); commandSpec = CommandSpecBuilder.build(command, factory); commandSpec.commandLine(this); commandSpec.validate(); } /** * Returns the {@code CommandSpec} model that this {@code CommandLine} was constructed with. * @return the {@code CommandSpec} model * @since 3.0 */ public CommandSpec getCommandSpec() { return commandSpec; } /** * Adds the options and positional parameters in the specified mixin to this command. * <p>The specified object may be a {@link CommandSpec CommandSpec} object, or it may be a user object with * {@code @Option} and {@code @Parameters}-annotated fields, in which case picocli automatically * constructs a {@code CommandSpec} from this user object. * </p> * @param name the name by which the mixin object may later be retrieved * @param mixin an annotated user object or a {@link CommandSpec CommandSpec} object whose options and positional parameters to add to this command * @return this CommandLine object, to allow method chaining * @since 3.0 */ public CommandLine addMixin(String name, Object mixin) { getCommandSpec().addMixin(name, CommandSpecBuilder.build(mixin, factory)); return this; } /** * Returns a map of user objects whose options and positional parameters were added to ("mixed in" with) this command. * @return a new Map containing the user objects mixed in with this command. If {@code CommandSpec} objects without * user objects were programmatically added, use the {@link CommandSpec#mixins() underlying model} directly. * @since 3.0 */ public Map<String, Object> getMixins() { Map<String, CommandSpec> mixins = getCommandSpec().mixins(); Map<String, Object> result = new LinkedHashMap<String, Object>(); for (String name : mixins.keySet()) { result.put(name, mixins.get(name).userObject); } return result; } /** Registers a subcommand with the specified name. For example: * <pre> * CommandLine commandLine = new CommandLine(new Git()) * .addSubcommand("status", new GitStatus()) * .addSubcommand("commit", new GitCommit(); * .addSubcommand("add", new GitAdd()) * .addSubcommand("branch", new GitBranch()) * .addSubcommand("checkout", new GitCheckout()) * //... * ; * </pre> * * <p>The specified object can be an annotated object or a * {@code CommandLine} instance with its own nested subcommands. For example:</p> * <pre> * CommandLine commandLine = new CommandLine(new MainCommand()) * .addSubcommand("cmd1", new ChildCommand1()) // subcommand * .addSubcommand("cmd2", new ChildCommand2()) * .addSubcommand("cmd3", new CommandLine(new ChildCommand3()) // subcommand with nested sub-subcommands * .addSubcommand("cmd3sub1", new GrandChild3Command1()) * .addSubcommand("cmd3sub2", new GrandChild3Command2()) * .addSubcommand("cmd3sub3", new CommandLine(new GrandChild3Command3()) // deeper nesting * .addSubcommand("cmd3sub3sub1", new GreatGrandChild3Command3_1()) * .addSubcommand("cmd3sub3sub2", new GreatGrandChild3Command3_2()) * ) * ); * </pre> * <p>The default type converters are available on all subcommands and nested sub-subcommands, but custom type * converters are registered only with the subcommand hierarchy as it existed when the custom type was registered. * To ensure a custom type converter is available to all subcommands, register the type converter last, after * adding subcommands.</p> * <p>See also the {@link Command#subcommands()} annotation to register subcommands declaratively.</p> * * @param name the string to recognize on the command line as a subcommand * @param command the object to initialize with command line arguments following the subcommand name. * This may be a {@code CommandLine} instance with its own (nested) subcommands * @return this CommandLine object, to allow method chaining * @see #registerConverter(Class, ITypeConverter) * @since 0.9.7 * @see Command#subcommands() */ public CommandLine addSubcommand(String name, Object command) { CommandLine subcommandLine = toCommandLine(command, factory); getCommandSpec().addSubcommand(name, subcommandLine); CommandSpecBuilder.initParentCommand(subcommandLine.getCommandSpec().userObject(), getCommandSpec().userObject()); return this; } /** Returns a map with the subcommands {@linkplain #addSubcommand(String, Object) registered} on this instance. * @return a map with the registered subcommands * @since 0.9.7 */ public Map<String, CommandLine> getSubcommands() { return new LinkedHashMap<String, CommandLine>(getCommandSpec().subcommands()); } /** * Returns the command that this is a subcommand of, or {@code null} if this is a top-level command. * @return the command that this is a subcommand of, or {@code null} if this is a top-level command * @see #addSubcommand(String, Object) * @see Command#subcommands() * @since 0.9.8 */ public CommandLine getParent() { CommandSpec parent = getCommandSpec().parent(); return parent == null ? null : parent.commandLine(); } /** Returns the annotated user object that this {@code CommandLine} instance was constructed with. * @param <T> the type of the variable that the return value is being assigned to * @return the annotated object that this {@code CommandLine} instance was constructed with * @since 0.9.7 */ @SuppressWarnings("unchecked") public <T> T getCommand() { return (T) getCommandSpec().userObject(); } /** Returns {@code true} if an option annotated with {@link Option#usageHelp()} was specified on the command line. * @return whether the parser encountered an option annotated with {@link Option#usageHelp()}. * @since 0.9.8 */ public boolean isUsageHelpRequested() { return usageHelpRequested; } /** Returns {@code true} if an option annotated with {@link Option#versionHelp()} was specified on the command line. * @return whether the parser encountered an option annotated with {@link Option#versionHelp()}. * @since 0.9.8 */ public boolean isVersionHelpRequested() { return versionHelpRequested; } /** Returns whether options for single-value fields can be specified multiple times on the command line. * The default is {@code false} and a {@link OverwrittenOptionException} is thrown if this happens. * When {@code true}, the last specified value is retained. * @return {@code true} if options for single-value fields can be specified multiple times on the command line, {@code false} otherwise * @since 0.9.7 */ public boolean isOverwrittenOptionsAllowed() { return overwrittenOptionsAllowed; } /** Sets whether options for single-value fields can be specified multiple times on the command line without a {@link OverwrittenOptionException} being thrown. * <p>The specified setting will be registered with this {@code CommandLine} and the full hierarchy of its * subcommands and nested sub-subcommands <em>at the moment this method is called</em>. Subcommands added * later will have the default setting. To ensure a setting is applied to all * subcommands, call the setter last, after adding subcommands.</p> * @param newValue the new setting * @return this {@code CommandLine} object, to allow method chaining * @since 0.9.7 */ public CommandLine setOverwrittenOptionsAllowed(boolean newValue) { this.overwrittenOptionsAllowed = newValue; for (CommandLine command : getCommandSpec().subcommands().values()) { command.setOverwrittenOptionsAllowed(newValue); } return this; } /** Returns whether the end user may specify arguments on the command line that are not matched to any option or parameter fields. * The default is {@code false} and a {@link UnmatchedArgumentException} is thrown if this happens. * When {@code true}, the last unmatched arguments are available via the {@link #getUnmatchedArguments()} method. * @return {@code true} if the end use may specify unmatched arguments on the command line, {@code false} otherwise * @see #getUnmatchedArguments() * @since 0.9.7 */ public boolean isUnmatchedArgumentsAllowed() { return unmatchedArgumentsAllowed; } /** Sets whether the end user may specify unmatched arguments on the command line without a {@link UnmatchedArgumentException} being thrown. * <p>The specified setting will be registered with this {@code CommandLine} and the full hierarchy of its * subcommands and nested sub-subcommands <em>at the moment this method is called</em>. Subcommands added * later will have the default setting. To ensure a setting is applied to all * subcommands, call the setter last, after adding subcommands.</p> * @param newValue the new setting. When {@code true}, the last unmatched arguments are available via the {@link #getUnmatchedArguments()} method. * @return this {@code CommandLine} object, to allow method chaining * @since 0.9.7 * @see #getUnmatchedArguments() */ public CommandLine setUnmatchedArgumentsAllowed(boolean newValue) { this.unmatchedArgumentsAllowed = newValue; for (CommandLine command : getCommandSpec().subcommands().values()) { command.setUnmatchedArgumentsAllowed(newValue); } return this; } /** Returns the list of unmatched command line arguments, if any. * @return the list of unmatched command line arguments or an empty list * @see #isUnmatchedArgumentsAllowed() * @since 0.9.7 */ public List<String> getUnmatchedArguments() { return unmatchedArguments; } /** * <p> * Convenience method that initializes the specified annotated object from the specified command line arguments. * </p><p> * This is equivalent to * </p><pre> * CommandLine cli = new CommandLine(command); * cli.parse(args); * return command; * </pre> * * @param command the object to initialize. This object contains fields annotated with * {@code @Option} or {@code @Parameters}. * @param args the command line arguments to parse * @param <T> the type of the annotated object * @return the specified annotated object * @throws InitializationException if the specified command object does not have a {@link Command}, {@link Option} or {@link Parameters} annotation * @throws ParameterException if the specified command line arguments are invalid * @since 0.9.7 */ public static <T> T populateCommand(T command, String... args) { CommandLine cli = toCommandLine(command, new DefaultFactory()); cli.parse(args); return command; } /** Parses the specified command line arguments and returns a list of {@code CommandLine} objects representing the * top-level command and any subcommands (if any) that were recognized and initialized during the parsing process. * <p> * If parsing succeeds, the first element in the returned list is always {@code this CommandLine} object. The * returned list may contain more elements if subcommands were {@linkplain #addSubcommand(String, Object) registered} * and these subcommands were initialized by matching command line arguments. If parsing fails, a * {@link ParameterException} is thrown. * </p> * * @param args the command line arguments to parse * @return a list with the top-level command and any subcommands initialized by this method * @throws ParameterException if the specified command line arguments are invalid; use * {@link ParameterException#getCommandLine()} to get the command or subcommand whose user input was invalid */ public List<CommandLine> parse(String... args) { return interpreter.parse(args); } /** * Represents a function that can process a List of {@code CommandLine} objects resulting from successfully * {@linkplain #parse(String...) parsing} the command line arguments. This is a * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/package-summary.html">functional interface</a> * whose functional method is {@link #handleParseResult(List, PrintWriter, CommandLine.Help.Ansi)}. * <p> * Implementations of this functions can be passed to the {@link #parseWithHandlers(IParseResultHandler, PrintWriter, Help.Ansi, IExceptionHandler, String...) CommandLine::parseWithHandler} * methods to take some next step after the command line was successfully parsed. * </p> * @see RunFirst * @see RunLast * @see RunAll * @since 2.0 */ public static interface IParseResultHandler { /** Processes a List of {@code CommandLine} objects resulting from successfully * {@linkplain #parse(String...) parsing} the command line arguments and optionally returns a list of results. * @param parsedCommands the {@code CommandLine} objects that resulted from successfully parsing the command line arguments * @param out the {@code PrintWriter} to print help to if requested * @param ansi for printing help messages using ANSI styles and colors * @return a list of results, or an empty list if there are no results * @throws ExecutionException if a problem occurred while processing the parse results; use * {@link ExecutionException#getCommandLine()} to get the command or subcommand where processing failed */ List<Object> handleParseResult(List<CommandLine> parsedCommands, PrintWriter out, Help.Ansi ansi) throws ExecutionException; } /** * Represents a function that can handle a {@code ParameterException} that occurred while * {@linkplain #parse(String...) parsing} the command line arguments. This is a * <a href="https://docs.oracle.com/javase/8/docs/api/java/util/function/package-summary.html">functional interface</a> * whose functional method is {@link #handleException(CommandLine.ParameterException, PrintWriter, CommandLine.Help.Ansi, String...)}. * <p> * Implementations of this functions can be passed to the {@link #parseWithHandlers(IParseResultHandler, PrintWriter, Help.Ansi, IExceptionHandler, String...) CommandLine::parseWithHandler} * methods to handle situations when the command line could not be parsed. * </p> * @see DefaultExceptionHandler * @since 2.0 */ public static interface IExceptionHandler { /** Handles a {@code ParameterException} that occurred while {@linkplain #parse(String...) parsing} the command * line arguments and optionally returns a list of results. * @param ex the ParameterException describing the problem that occurred while parsing the command line arguments, * and the CommandLine representing the command or subcommand whose input was invalid * @param out the {@code PrintWriter} to print help to if requested * @param ansi for printing help messages using ANSI styles and colors * @param args the command line arguments that could not be parsed * @return a list of results, or an empty list if there are no results */ List<Object> handleException(ParameterException ex, PrintWriter out, Help.Ansi ansi, String... args); } /** * Default exception handler that prints the exception message to the specified {@code PrintWriter}, followed by the * usage message for the command or subcommand whose input was invalid. * <p>Implementation roughly looks like this:</p> * <pre> * System.err.println(paramException.getMessage()); * paramException.getCommandLine().usage(System.err); * </pre> * @since 2.0 */ public static class DefaultExceptionHandler implements IExceptionHandler { public List<Object> handleException(ParameterException ex, PrintWriter out, Help.Ansi ansi, String... args) { out.println(ex.getMessage()); ex.getCommandLine().usage(out, ansi); return Collections.emptyList(); } } /** * Helper method that may be useful when processing the list of {@code CommandLine} objects that result from successfully * {@linkplain #parse(String...) parsing} command line arguments. This method prints out * {@linkplain #usage(PrintWriter, Help.Ansi) usage help} if {@linkplain #isUsageHelpRequested() requested} * or {@linkplain #printVersionHelp(PrintWriter, Help.Ansi) version help} if {@linkplain #isVersionHelpRequested() requested} * and returns {@code true}. Otherwise, if none of the specified {@code CommandLine} objects have help requested, * this method returns {@code false}. * <p> * Note that this method <em>only</em> looks at the {@link Option#usageHelp() usageHelp} and * {@link Option#versionHelp() versionHelp} attributes. The {@link Option#help() help} attribute is ignored. * </p> * @param parsedCommands the list of {@code CommandLine} objects to check if help was requested * @param out the {@code PrintWriter} to print help to if requested * @param ansi for printing help messages using ANSI styles and colors * @return {@code true} if help was printed, {@code false} otherwise * @since 2.0 */ public static boolean printHelpIfRequested(List<CommandLine> parsedCommands, PrintWriter out, Help.Ansi ansi) { for (int i = 0; i < parsedCommands.size(); i++) { CommandLine parsed = parsedCommands.get(i); if (parsed.isUsageHelpRequested()) { parsed.usage(out, ansi); return true; } else if (parsed.isVersionHelpRequested()) { parsed.printVersionHelp(out, ansi); return true; } else if (i > 0 && parsed.getCommand() instanceof AutoHelpMixin.HelpCommand) { CommandLine main = parsedCommands.get(i - 1); AutoHelpMixin.HelpCommand helpCommand = parsed.getCommand(); CommandLine subcommand = null; if (helpCommand.commands.length > 0) { subcommand = main.getSubcommands().get(helpCommand.commands[0]); if (subcommand != null) { subcommand.usage(out, ansi); } else { out.println("Unknown subcommand '" + helpCommand.commands[0] + "'."); main.usage(out, ansi); } } else { main.usage(out, ansi); } return true; } } return false; } private static Object execute(CommandLine parsed) { Object command = parsed.getCommand(); if (command instanceof Runnable) { try { ((Runnable) command).run(); return null; } catch (Exception ex) { throw new ExecutionException(parsed, "Error while running command (" + command + "): " + ex, ex); } } else if (command instanceof Callable) { try { @SuppressWarnings("unchecked") Callable<Object> callable = (Callable<Object>) command; return callable.call(); } catch (Exception ex) { throw new ExecutionException(parsed, "Error while calling command (" + command + "): " + ex, ex); } } throw new ExecutionException(parsed, "Parsed command (" + command + ") is not Runnable or Callable"); } /** * Command line parse result handler that prints help if requested, and otherwise executes the top-level * {@code Runnable} or {@code Callable} command. * For use in the {@link #parseWithHandlers(IParseResultHandler, PrintWriter, Help.Ansi, IExceptionHandler, String...) parseWithHandler} methods. * <p> * From picocli v2.0, {@code RunFirst} is used to implement the {@link #run(Runnable, PrintWriter, Help.Ansi, String...) run} * and {@link #call(Callable, PrintWriter, Help.Ansi, String...) call} convenience methods. * </p> * @since 2.0 */ public static class RunFirst implements IParseResultHandler { /** Prints help if requested, and otherwise executes the top-level {@code Runnable} or {@code Callable} command. * If the top-level command does not implement either {@code Runnable} or {@code Callable}, a {@code ExecutionException} * is thrown detailing the problem and capturing the offending {@code CommandLine} object. * * @param parsedCommands the {@code CommandLine} objects that resulted from successfully parsing the command line arguments * @param out the {@code PrintWriter} to print help to if requested * @param ansi for printing help messages using ANSI styles and colors * @return an empty list if help was requested, or a list containing a single element: the result of calling the * {@code Callable}, or a {@code null} element if the top-level command was a {@code Runnable} * @throws ExecutionException if a problem occurred while processing the parse results; use * {@link ExecutionException#getCommandLine()} to get the command or subcommand where processing failed */ public List<Object> handleParseResult(List<CommandLine> parsedCommands, PrintWriter out, Help.Ansi ansi) { if (printHelpIfRequested(parsedCommands, out, ansi)) { return Collections.emptyList(); } return Arrays.asList(execute(parsedCommands.get(0))); } } /** * Command line parse result handler that prints help if requested, and otherwise executes the most specific * {@code Runnable} or {@code Callable} subcommand. * For use in the {@link #parseWithHandlers(IParseResultHandler, PrintWriter, Help.Ansi, IExceptionHandler, String...) parseWithHandler} methods. * <p> * Something like this:</p> * <pre> * // RunLast implementation: print help if requested, otherwise execute the most specific subcommand * if (CommandLine.printHelpIfRequested(parsedCommands, System.err, Help.Ansi.AUTO)) { * return emptyList(); * } * CommandLine last = parsedCommands.get(parsedCommands.size() - 1); * Object command = last.getCommand(); * if (command instanceof Runnable) { * try { * ((Runnable) command).run(); * } catch (Exception ex) { * throw new ExecutionException(last, "Error in runnable " + command, ex); * } * } else if (command instanceof Callable) { * Object result; * try { * result = ((Callable) command).call(); * } catch (Exception ex) { * throw new ExecutionException(last, "Error in callable " + command, ex); * } * // ...do something with result * } else { * throw new ExecutionException(last, "Parsed command (" + command + ") is not Runnable or Callable"); * } * </pre> * @since 2.0 */ public static class RunLast implements IParseResultHandler { /** Prints help if requested, and otherwise executes the most specific {@code Runnable} or {@code Callable} subcommand. * If the last (sub)command does not implement either {@code Runnable} or {@code Callable}, a {@code ExecutionException} * is thrown detailing the problem and capturing the offending {@code CommandLine} object. * * @param parsedCommands the {@code CommandLine} objects that resulted from successfully parsing the command line arguments * @param out the {@code PrintWriter} to print help to if requested * @param ansi for printing help messages using ANSI styles and colors * @return an empty list if help was requested, or a list containing a single element: the result of calling the * {@code Callable}, or a {@code null} element if the last (sub)command was a {@code Runnable} * @throws ExecutionException if a problem occurred while processing the parse results; use * {@link ExecutionException#getCommandLine()} to get the command or subcommand where processing failed */ public List<Object> handleParseResult(List<CommandLine> parsedCommands, PrintWriter out, Help.Ansi ansi) { if (printHelpIfRequested(parsedCommands, out, ansi)) { return Collections.emptyList(); } CommandLine last = parsedCommands.get(parsedCommands.size() - 1); return Arrays.asList(execute(last)); } } /** * Command line parse result handler that prints help if requested, and otherwise executes the top-level command and * all subcommands as {@code Runnable} or {@code Callable}. * For use in the {@link #parseWithHandlers(IParseResultHandler, PrintWriter, Help.Ansi, IExceptionHandler, String...) parseWithHandler} methods. * @since 2.0 */ public static class RunAll implements IParseResultHandler { /** Prints help if requested, and otherwise executes the top-level command and all subcommands as {@code Runnable} * or {@code Callable}. If any of the {@code CommandLine} commands does not implement either * {@code Runnable} or {@code Callable}, a {@code ExecutionException} * is thrown detailing the problem and capturing the offending {@code CommandLine} object. * * @param parsedCommands the {@code CommandLine} objects that resulted from successfully parsing the command line arguments * @param out the {@code PrintWriter} to print help to if requested * @param ansi for printing help messages using ANSI styles and colors * @return an empty list if help was requested, or a list containing the result of executing all commands: * the return values from calling the {@code Callable} commands, {@code null} elements for commands that implement {@code Runnable} * @throws ExecutionException if a problem occurred while processing the parse results; use * {@link ExecutionException#getCommandLine()} to get the command or subcommand where processing failed */ public List<Object> handleParseResult(List<CommandLine> parsedCommands, PrintWriter out, Help.Ansi ansi) { if (printHelpIfRequested(parsedCommands, out, ansi)) { return Collections.emptyList(); } List<Object> result = new ArrayList<Object>(); for (CommandLine parsed : parsedCommands) { result.add(execute(parsed)); } return result; } } /** * Returns the result of calling {@link #parseWithHandlers(IParseResultHandler, PrintWriter, Help.Ansi, IExceptionHandler, String...)} * with {@code Help.Ansi.AUTO} and a new {@link DefaultExceptionHandler} in addition to the specified parse result handler, * {@code PrintWriter}, and the specified command line arguments. * <p> * This is a convenience method intended to offer the same ease of use as the {@link #run(Runnable, PrintWriter, Help.Ansi, String...) run} * and {@link #call(Callable, PrintWriter, Help.Ansi, String...) call} methods, but with more flexibility and better * support for nested subcommands. * </p> * <p>Calling this method roughly expands to:</p> * <pre> * try { * List&lt;CommandLine&gt; parsedCommands = parse(args); * return parseResultsHandler.handleParseResult(parsedCommands, out, Help.Ansi.AUTO); * } catch (ParameterException ex) { * return new DefaultExceptionHandler().handleException(ex, out, ansi, args); * } * </pre> * <p> * Picocli provides some default handlers that allow you to accomplish some common tasks with very little code. * The following handlers are available:</p> * <ul> * <li>{@link RunLast} handler prints help if requested, and otherwise gets the last specified command or subcommand * and tries to execute it as a {@code Runnable} or {@code Callable}.</li> * <li>{@link RunFirst} handler prints help if requested, and otherwise executes the top-level command as a {@code Runnable} or {@code Callable}.</li> * <li>{@link RunAll} handler prints help if requested, and otherwise executes all recognized commands and subcommands as {@code Runnable} or {@code Callable} tasks.</li> * <li>{@link DefaultExceptionHandler} prints the error message followed by usage help</li> * </ul> * @param handler the function that will process the result of successfully parsing the command line arguments * @param out the {@code PrintWriter} to print help to if requested * @param args the command line arguments * @return a list of results, or an empty list if there are no results * @throws ExecutionException if the command line arguments were parsed successfully but a problem occurred while processing the * parse results; use {@link ExecutionException#getCommandLine()} to get the command or subcommand where processing failed * @see RunLast * @see RunAll * @since 2.0 */ public List<Object> parseWithHandler(IParseResultHandler handler, PrintWriter out, String... args) { return parseWithHandlers(handler, out, Help.Ansi.AUTO, new DefaultExceptionHandler(), args); } /** * Tries to {@linkplain #parse(String...) parse} the specified command line arguments, and if successful, delegates * the processing of the resulting list of {@code CommandLine} objects to the specified {@linkplain IParseResultHandler handler}. * If the command line arguments were invalid, the {@code ParameterException} thrown from the {@code parse} method * is caught and passed to the specified {@link IExceptionHandler}. * <p> * This is a convenience method intended to offer the same ease of use as the {@link #run(Runnable, PrintWriter, Help.Ansi, String...) run} * and {@link #call(Callable, PrintWriter, Help.Ansi, String...) call} methods, but with more flexibility and better * support for nested subcommands. * </p> * <p>Calling this method roughly expands to:</p> * <pre> * try { * List&lt;CommandLine&gt; parsedCommands = parse(args); * return parseResultsHandler.handleParseResult(parsedCommands, out, ansi); * } catch (ParameterException ex) { * return new exceptionHandler.handleException(ex, out, ansi, args); * } * </pre> * <p> * Picocli provides some default handlers that allow you to accomplish some common tasks with very little code. * The following handlers are available:</p> * <ul> * <li>{@link RunLast} handler prints help if requested, and otherwise gets the last specified command or subcommand * and tries to execute it as a {@code Runnable} or {@code Callable}.</li> * <li>{@link RunFirst} handler prints help if requested, and otherwise executes the top-level command as a {@code Runnable} or {@code Callable}.</li> * <li>{@link RunAll} handler prints help if requested, and otherwise executes all recognized commands and subcommands as {@code Runnable} or {@code Callable} tasks.</li> * <li>{@link DefaultExceptionHandler} prints the error message followed by usage help</li> * </ul> * * @param handler the function that will process the result of successfully parsing the command line arguments * @param out the {@code PrintWriter} to print help to if requested * @param ansi for printing help messages using ANSI styles and colors * @param exceptionHandler the function that can handle the {@code ParameterException} thrown when the command line arguments are invalid * @param args the command line arguments * @return a list of results produced by the {@code IParseResultHandler} or the {@code IExceptionHandler}, or an empty list if there are no results * @throws ExecutionException if the command line arguments were parsed successfully but a problem occurred while processing the parse * result {@code CommandLine} objects; use {@link ExecutionException#getCommandLine()} to get the command or subcommand where processing failed * @see RunLast * @see RunAll * @see DefaultExceptionHandler * @since 2.0 */ public List<Object> parseWithHandlers(IParseResultHandler handler, PrintWriter out, Help.Ansi ansi, IExceptionHandler exceptionHandler, String... args) { try { List<CommandLine> result = parse(args); return handler.handleParseResult(result, out, ansi); } catch (ParameterException ex) { return exceptionHandler.handleException(ex, out, ansi, args); } } /** * Equivalent to {@code new CommandLine(command).usage(out)}. See {@link #usage(PrintWriter)} for details. * @param command the object annotated with {@link Command}, {@link Option} and {@link Parameters} * @param out the print stream to print the help message to * @throws IllegalArgumentException if the specified command object does not have a {@link Command}, {@link Option} or {@link Parameters} annotation */ public static void usage(Object command, PrintWriter out) { toCommandLine(command, new DefaultFactory()).usage(out); } /** * Equivalent to {@code new CommandLine(command).usage(out, ansi)}. * See {@link #usage(PrintWriter, Help.Ansi)} for details. * @param command the object annotated with {@link Command}, {@link Option} and {@link Parameters} * @param out the print stream to print the help message to * @param ansi whether the usage message should contain ANSI escape codes or not * @throws IllegalArgumentException if the specified command object does not have a {@link Command}, {@link Option} or {@link Parameters} annotation */ public static void usage(Object command, PrintWriter out, Help.Ansi ansi) { toCommandLine(command, new DefaultFactory()).usage(out, ansi); } /** * Equivalent to {@code new CommandLine(command).usage(out, colorScheme)}. * See {@link #usage(PrintWriter, Help.ColorScheme)} for details. * @param command the object annotated with {@link Command}, {@link Option} and {@link Parameters} * @param out the print stream to print the help message to * @param colorScheme the {@code ColorScheme} defining the styles for options, parameters and commands when ANSI is enabled * @throws IllegalArgumentException if the specified command object does not have a {@link Command}, {@link Option} or {@link Parameters} annotation */ public static void usage(Object command, PrintWriter out, Help.ColorScheme colorScheme) { toCommandLine(command, new DefaultFactory()).usage(out, colorScheme); } /** * Delegates to {@link #usage(PrintWriter, Help.Ansi)} with the {@linkplain Help.Ansi#AUTO platform default}. * @param out the printStream to print to * @see #usage(PrintWriter, Help.ColorScheme) */ public void usage(PrintWriter out) { usage(out, Help.Ansi.AUTO); } /** * Delegates to {@link #usage(PrintWriter, Help.ColorScheme)} with the {@linkplain Help#defaultColorScheme(CommandLine.Help.Ansi) default color scheme}. * @param out the printStream to print to * @param ansi whether the usage message should include ANSI escape codes or not * @see #usage(PrintWriter, Help.ColorScheme) */ public void usage(PrintWriter out, Help.Ansi ansi) { usage(out, Help.defaultColorScheme(ansi)); } /** * Prints a usage help message for the annotated command class to the specified {@code PrintWriter}. * Delegates construction of the usage help message to the {@link Help} inner class and is equivalent to: * <pre> * Help help = new Help(command).addAllSubcommands(getSubcommands()); * StringBuilder sb = new StringBuilder() * .append(help.headerHeading()) * .append(help.header()) * .append(help.synopsisHeading()) //e.g. Usage: * .append(help.synopsis()) //e.g. &lt;main class&gt; [OPTIONS] &lt;command&gt; [COMMAND-OPTIONS] [ARGUMENTS] * .append(help.descriptionHeading()) //e.g. %nDescription:%n%n * .append(help.description()) //e.g. {"Converts foos to bars.", "Use options to control conversion mode."} * .append(help.parameterListHeading()) //e.g. %nPositional parameters:%n%n * .append(help.parameterList()) //e.g. [FILE...] the files to convert * .append(help.optionListHeading()) //e.g. %nOptions:%n%n * .append(help.optionList()) //e.g. -h, --help displays this help and exits * .append(help.commandListHeading()) //e.g. %nCommands:%n%n * .append(help.commandList()) //e.g. add adds the frup to the frooble * .append(help.footerHeading()) * .append(help.footer()); * out.print(sb); * </pre> * <p>Annotate your class with {@link Command} to control many aspects of the usage help message, including * the program name, text of section headings and section contents, and some aspects of the auto-generated sections * of the usage help message. * <p>To customize the auto-generated sections of the usage help message, like how option details are displayed, * instantiate a {@link Help} object and use a {@link Help.TextTable} with more of fewer columns, a custom * {@linkplain Help.Layout layout}, and/or a custom option {@linkplain Help.IOptionRenderer renderer} * for ultimate control over which aspects of an Option or Field are displayed where.</p> * @param out the {@code PrintWriter} to print the usage help message to * @param colorScheme the {@code ColorScheme} defining the styles for options, parameters and commands when ANSI is enabled */ public void usage(PrintWriter out, Help.ColorScheme colorScheme) { Help help = new Help(getCommandSpec(), colorScheme); StringBuilder sb = new StringBuilder() .append(help.headerHeading()) .append(help.header()) .append(help.synopsisHeading()) //e.g. Usage: .append(help.synopsis(help.synopsisHeadingLength())) //e.g. &lt;main class&gt; [OPTIONS] &lt;command&gt; [COMMAND-OPTIONS] [ARGUMENTS] .append(help.descriptionHeading()) //e.g. %nDescription:%n%n .append(help.description()) //e.g. {"Converts foos to bars.", "Use options to control conversion mode."} .append(help.parameterListHeading()) //e.g. %nPositional parameters:%n%n .append(help.parameterList()) //e.g. [FILE...] the files to convert .append(help.optionListHeading()) //e.g. %nOptions:%n%n .append(help.optionList()) //e.g. -h, --help displays this help and exits .append(help.commandListHeading()) //e.g. %nCommands:%n%n .append(help.commandList()) //e.g. add adds the frup to the frooble .append(help.footerHeading()) .append(help.footer()); out.print(sb); } /** * Delegates to {@link #printVersionHelp(PrintWriter, Help.Ansi)} with the {@linkplain Help.Ansi#AUTO platform default}. * @param out the printStream to print to * @see #printVersionHelp(PrintWriter, Help.Ansi) * @since 0.9.8 */ public void printVersionHelp(PrintWriter out) { printVersionHelp(out, Help.Ansi.AUTO); } /** * Prints version information from the {@link Command#version()} annotation to the specified {@code PrintWriter}. * Each element of the array of version strings is printed on a separate line. Version strings may contain * <a href="http://picocli.info/#_usage_help_with_styles_and_colors">markup for colors and style</a>. * @param out the printStream to print to * @param ansi whether the usage message should include ANSI escape codes or not * @see Command#version() * @see Option#versionHelp() * @see #isVersionHelpRequested() * @since 0.9.8 */ public void printVersionHelp(PrintWriter out, Help.Ansi ansi) { for (String versionInfo : getCommandSpec().version()) { out.println(ansi.new Text(versionInfo)); } } /** * Prints version information from the {@link Command#version()} annotation to the specified {@code PrintWriter}. * Each element of the array of version strings is {@linkplain String#format(String, Object...) formatted} with the * specified parameters, and printed on a separate line. Both version strings and parameters may contain * <a href="http://picocli.info/#_usage_help_with_styles_and_colors">markup for colors and style</a>. * @param out the printStream to print to * @param ansi whether the usage message should include ANSI escape codes or not * @param params Arguments referenced by the format specifiers in the version strings * @see Command#version() * @see Option#versionHelp() * @see #isVersionHelpRequested() * @since 1.0.0 */ public void printVersionHelp(PrintWriter out, Help.Ansi ansi, Object... params) { for (String versionInfo : getCommandSpec().version()) { out.println(ansi.new Text(String.format(versionInfo, params))); } } /** * Delegates to {@link #call(Callable, PrintWriter, Help.Ansi, String...)} with {@link Help.Ansi#AUTO}. * <p> * From picocli v2.0, this method prints usage help or version help if {@linkplain #printHelpIfRequested(List, PrintWriter, Help.Ansi) requested}, * and any exceptions thrown by the {@code Callable} are caught and rethrown wrapped in an {@code ExecutionException}. * </p> * @param callable the command to call when {@linkplain #parse(String...) parsing} succeeds. * @param out the printStream to print to * @param args the command line arguments to parse * @param <C> the annotated object must implement Callable * @param <T> the return type of the most specific command (must implement {@code Callable}) * @see #call(Callable, PrintWriter, Help.Ansi, String...) * @throws InitializationException if the specified command object does not have a {@link Command}, {@link Option} or {@link Parameters} annotation * @throws ExecutionException if the Callable throws an exception * @return {@code null} if an error occurred while parsing the command line options, otherwise returns the result of calling the Callable * @see #parseWithHandlers(IParseResultHandler, PrintWriter, Help.Ansi, IExceptionHandler, String...) * @see RunFirst */ public static <C extends Callable<T>, T> T call(C callable, PrintWriter out, String... args) { return call(callable, out, Help.Ansi.AUTO, args); } /** * Convenience method to allow command line application authors to avoid some boilerplate code in their application. * The annotated object needs to implement {@link Callable}. Calling this method is equivalent to: * <pre> * CommandLine cmd = new CommandLine(callable); * List&lt;CommandLine&gt; parsedCommands; * try { * parsedCommands = cmd.parse(args); * } catch (ParameterException ex) { * out.println(ex.getMessage()); * cmd.usage(out, ansi); * return null; * } * if (CommandLine.printHelpIfRequested(parsedCommands, out, ansi)) { * return null; * } * CommandLine last = parsedCommands.get(parsedCommands.size() - 1); * try { * Callable&lt;Object&gt; subcommand = last.getCommand(); * return subcommand.call(); * } catch (Exception ex) { * throw new ExecutionException(last, "Error calling " + last.getCommand(), ex); * } * </pre> * <p> * If the specified Callable command has subcommands, the {@linkplain RunLast last} subcommand specified on the * command line is executed. * Commands with subcommands may be interested in calling the {@link #parseWithHandler(IParseResultHandler, PrintWriter, String...) parseWithHandler} * method with a {@link RunAll} handler or a custom handler. * </p><p> * From picocli v2.0, this method prints usage help or version help if {@linkplain #printHelpIfRequested(List, PrintWriter, Help.Ansi) requested}, * and any exceptions thrown by the {@code Callable} are caught and rethrown wrapped in an {@code ExecutionException}. * </p> * @param callable the command to call when {@linkplain #parse(String...) parsing} succeeds. * @param out the printStream to print to * @param ansi whether the usage message should include ANSI escape codes or not * @param args the command line arguments to parse * @param <C> the annotated object must implement Callable * @param <T> the return type of the specified {@code Callable} * @throws InitializationException if the specified command object does not have a {@link Command}, {@link Option} or {@link Parameters} annotation * @throws ExecutionException if the Callable throws an exception * @return {@code null} if an error occurred while parsing the command line options, or if help was requested and printed. Otherwise returns the result of calling the Callable * @see #parseWithHandlers(IParseResultHandler, PrintWriter, Help.Ansi, IExceptionHandler, String...) * @see RunLast */ public static <C extends Callable<T>, T> T call(C callable, PrintWriter out, Help.Ansi ansi, String... args) { return call(new CommandLine(callable), out, ansi, args); } public static <C extends Callable<T>, T> T call(CommandLine cmd, PrintWriter out, Help.Ansi ansi, String... args) { List<Object> results = cmd.parseWithHandlers(new RunLast(), out, ansi, new DefaultExceptionHandler(), args); @SuppressWarnings("unchecked") T result = results == null || results.isEmpty() ? null : (T) results.get(0); return result; } /** * Delegates to {@link #run(Runnable, PrintWriter, Help.Ansi, String...)} with {@link Help.Ansi#AUTO}. * <p> * From picocli v2.0, this method prints usage help or version help if {@linkplain #printHelpIfRequested(List, PrintWriter, Help.Ansi) requested}, * and any exceptions thrown by the {@code Runnable} are caught and rethrown wrapped in an {@code ExecutionException}. * </p> * @param runnable the command to run when {@linkplain #parse(String...) parsing} succeeds. * @param out the printStream to print to * @param args the command line arguments to parse * @param <R> the annotated object must implement Runnable * @see #run(Runnable, PrintWriter, Help.Ansi, String...) * @throws InitializationException if the specified command object does not have a {@link Command}, {@link Option} or {@link Parameters} annotation * @throws ExecutionException if the Runnable throws an exception * @see #parseWithHandlers(IParseResultHandler, PrintWriter, Help.Ansi, IExceptionHandler, String...) * @see RunFirst */ public static <R extends Runnable> void run(R runnable, PrintWriter out, String... args) { run(runnable, out, Help.Ansi.AUTO, args); } /** * Convenience method to allow command line application authors to avoid some boilerplate code in their application. * The annotated object needs to implement {@link Runnable}. Calling this method is equivalent to: * <pre> * CommandLine cmd = new CommandLine(runnable); * List&lt;CommandLine&gt; parsedCommands; * try { * parsedCommands = cmd.parse(args); * } catch (ParameterException ex) { * out.println(ex.getMessage()); * cmd.usage(out, ansi); * return null; * } * if (CommandLine.printHelpIfRequested(parsedCommands, out, ansi)) { * return null; * } * CommandLine last = parsedCommands.get(parsedCommands.size() - 1); * try { * Runnable subcommand = last.getCommand(); * subcommand.run(); * } catch (Exception ex) { * throw new ExecutionException(last, "Error running " + last.getCommand(), ex); * } * </pre> * <p> * If the specified Runnable command has subcommands, the {@linkplain RunLast last} subcommand specified on the * command line is executed. * Commands with subcommands may be interested in calling the {@link #parseWithHandler(IParseResultHandler, PrintWriter, String...) parseWithHandler} * method with a {@link RunAll} handler or a custom handler. * </p><p> * From picocli v2.0, this method prints usage help or version help if {@linkplain #printHelpIfRequested(List, PrintWriter, Help.Ansi) requested}, * and any exceptions thrown by the {@code Runnable} are caught and rethrown wrapped in an {@code ExecutionException}. * </p> * @param runnable the command to run when {@linkplain #parse(String...) parsing} succeeds. * @param out the printStream to print to * @param ansi whether the usage message should include ANSI escape codes or not * @param args the command line arguments to parse * @param <R> the annotated object must implement Runnable * @throws InitializationException if the specified command object does not have a {@link Command}, {@link Option} or {@link Parameters} annotation * @throws ExecutionException if the Runnable throws an exception * @see #parseWithHandlers(IParseResultHandler, PrintWriter, Help.Ansi, IExceptionHandler, String...) * @see RunLast */ public static <R extends Runnable> void run(R runnable, PrintWriter out, Help.Ansi ansi, String... args) { CommandLine cmd = new CommandLine(runnable); // validate command outside of try-catch cmd.parseWithHandlers(new RunLast(), out, ansi, new DefaultExceptionHandler(), args); } /** * Registers the specified type converter for the specified class. When initializing fields annotated with * {@link Option}, the field's type is used as a lookup key to find the associated type converter, and this * type converter converts the original command line argument string value to the correct type. * <p> * Java 8 lambdas make it easy to register custom type converters: * </p> * <pre> * commandLine.registerConverter(java.nio.file.Path.class, s -&gt; java.nio.file.Paths.get(s)); * commandLine.registerConverter(java.time.Duration.class, s -&gt; java.time.Duration.parse(s));</pre> * <p> * Built-in type converters are pre-registered for the following java 1.5 types: * </p> * <ul> * <li>all primitive types</li> * <li>all primitive wrapper types: Boolean, Byte, Character, Double, Float, Integer, Long, Short</li> * <li>any enum</li> * <li>java.io.File</li> * <li>java.math.BigDecimal</li> * <li>java.math.BigInteger</li> * <li>java.net.InetAddress</li> * <li>java.net.URI</li> * <li>java.net.URL</li> * <li>java.nio.charset.Charset</li> * <li>java.sql.Time</li> * <li>java.util.Date</li> * <li>java.util.UUID</li> * <li>java.util.regex.Pattern</li> * <li>StringBuilder</li> * <li>CharSequence</li> * <li>String</li> * </ul> * <p>The specified converter will be registered with this {@code CommandLine} and the full hierarchy of its * subcommands and nested sub-subcommands <em>at the moment the converter is registered</em>. Subcommands added * later will not have this converter added automatically. To ensure a custom type converter is available to all * subcommands, register the type converter last, after adding subcommands.</p> * * @param cls the target class to convert parameter string values to * @param converter the class capable of converting string values to the specified target type * @param <K> the target type * @return this CommandLine object, to allow method chaining * @see #addSubcommand(String, Object) */ public <K> CommandLine registerConverter(Class<K> cls, ITypeConverter<K> converter) { interpreter.converterRegistry.put(Assert.notNull(cls, "class"), Assert.notNull(converter, "converter")); for (CommandLine command : getCommandSpec().commands.values()) { command.registerConverter(cls, converter); } return this; } /** Returns the String that separates option names from option values when parsing command line options. * @return the String the parser uses to separate option names from option values * @see CommandSpec#separator() */ public String getSeparator() { return getCommandSpec().separator(); } /** Sets the String the parser uses to separate option names from option values to the specified value. * The separator may also be set declaratively with the {@link CommandLine.Command#separator()} annotation attribute. * @param separator the String that separates option names from option values * @see CommandSpec#separator(String) * @return this {@code CommandLine} object, to allow method chaining */ public CommandLine setSeparator(String separator) { getCommandSpec().separator(Assert.notNull(separator, "separator")); return this; } /** Returns the command name (also called program name) displayed in the usage help synopsis. * @return the command name (also called program name) displayed in the usage * @see CommandSpec#name() * @since 2.0 */ public String getCommandName() { return getCommandSpec().name(); } /** Sets the command name (also called program name) displayed in the usage help synopsis to the specified value. * Note that this method only modifies the usage help message, it does not impact parsing behaviour. * The command name may also be set declaratively with the {@link CommandLine.Command#name()} annotation attribute. * @param commandName command name (also called program name) displayed in the usage help synopsis * @return this {@code CommandLine} object, to allow method chaining * @see CommandSpec#name(String) * @since 2.0 */ public CommandLine setCommandName(String commandName) { getCommandSpec().name(Assert.notNull(commandName, "commandName")); return this; } /** Returns whether arguments starting with {@code '@'} should be treated as the path to an argument file and its * contents should be expanded into separate arguments for each line in the specified file. * This property is {@code true} by default. * @return whether "argument files" or {@code @files} should be expanded into their content * @since 2.1 */ public boolean isExpandAtFiles() { return expandAtFiles; } /** Sets whether arguments starting with {@code '@'} should be treated as the path to an argument file and its * contents should be expanded into separate arguments for each line in the specified file. ({@code true} by default.) * @param expandAtFiles whether "argument files" or {@code @files} should be expanded into their content * @return this {@code CommandLine} object, to allow method chaining * @since 2.1 */ public CommandLine setExpandAtFiles(boolean expandAtFiles) { this.expandAtFiles = expandAtFiles; return this; } private static boolean empty(String str) { return str == null || str.trim().length() == 0; } private static boolean empty(Object[] array) { return array == null || array.length == 0; } private static boolean empty(Text txt) { return txt == null || txt.plain.toString().trim().length() == 0; } private static String str(String[] arr, int i) { return (arr == null || arr.length == 0) ? "" : arr[i]; } private static boolean isBoolean(Class<?> type) { return type == Boolean.class || type == Boolean.TYPE; } private static CommandLine toCommandLine(Object obj, IFactory factory) { return obj instanceof CommandLine ? (CommandLine) obj : new CommandLine(obj, factory);} private static boolean isMultiValue(Field field) { return isMultiValue(field.getType()); } private static boolean isMultiValue(Class<?> cls) { return cls.isArray() || Collection.class.isAssignableFrom(cls) || Map.class.isAssignableFrom(cls); } /** * <p> * Annotate fields in your class with {@code @Option} and picocli will initialize these fields when matching * arguments are specified on the command line. * </p><p> * For example: * </p> * <pre> * import static picocli.CommandLine.*; * * public class MyClass { * &#064;Parameters(description = "Any number of input files") * private List&lt;File&gt; files = new ArrayList&lt;File&gt;(); * * &#064;Option(names = { "-o", "--out" }, description = "Output file (default: print to console)") * private File outputFile; * * &#064;Option(names = { "-v", "--verbose"}, description = "Verbose mode. Helpful for troubleshooting. Multiple -v options increase the verbosity.") * private boolean[] verbose; * * &#064;Option(names = { "-h", "--help", "-?", "-help"}, usageHelp = true, description = "Display this help and exit") * private boolean help; * } * </pre> * <p> * A field cannot be annotated with both {@code @Parameters} and {@code @Option} or a * {@code ParameterException} is thrown. * </p> */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) public @interface Option { /** * One or more option names. At least one option name is required. * <p> * Different environments have different conventions for naming options, but usually options have a prefix * that sets them apart from parameters. * Picocli supports all of the below styles. The default separator is {@code '='}, but this can be configured. * </p><p> * <b>*nix</b> * </p><p> * In Unix and Linux, options have a short (single-character) name, a long name or both. * Short options * (<a href="http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap12.html#tag_12_02">POSIX * style</a> are single-character and are preceded by the {@code '-'} character, e.g., {@code `-v'}. * <a href="https://www.gnu.org/software/tar/manual/html_node/Long-Options.html">GNU-style</a> long * (or <em>mnemonic</em>) options start with two dashes in a row, e.g., {@code `--file'}. * </p><p>Picocli supports the POSIX convention that short options can be grouped, with the last option * optionally taking a parameter, which may be attached to the option name or separated by a space or * a {@code '='} character. The below examples are all equivalent: * </p><pre> * -xvfFILE * -xvf FILE * -xvf=FILE * -xv --file FILE * -xv --file=FILE * -x -v --file FILE * -x -v --file=FILE * </pre><p> * <b>DOS</b> * </p><p> * DOS options mostly have upper case single-character names and start with a single slash {@code '/'} character. * Option parameters are separated by a {@code ':'} character. Options cannot be grouped together but * must be specified separately. For example: * </p><pre> * DIR /S /A:D /T:C * </pre><p> * <b>PowerShell</b> * </p><p> * Windows PowerShell options generally are a word preceded by a single {@code '-'} character, e.g., {@code `-Help'}. * Option parameters are separated by a space or by a {@code ':'} character. * </p> * @return one or more option names */ String[] names(); /** * Indicates whether this option is required. By default this is false. * If an option is required, but a user invokes the program without specifying the required option, * a {@link MissingParameterException} is thrown from the {@link #parse(String...)} method. * @return whether this option is required */ boolean required() default false; /** * Set {@code help=true} if this option should disable validation of the remaining arguments: * If the {@code help} option is specified, no error message is generated for missing required options. * <p> * This attribute is useful for special options like help ({@code -h} and {@code --help} on unix, * {@code -?} and {@code -Help} on Windows) or version ({@code -V} and {@code --version} on unix, * {@code -Version} on Windows). * </p> * <p> * Note that the {@link #parse(String...)} method will not print help documentation. It will only set * the value of the annotated field. It is the responsibility of the caller to inspect the annotated fields * and take the appropriate action. * </p> * @return whether this option disables validation of the other arguments * @deprecated Use {@link #usageHelp()} and {@link #versionHelp()} instead. See {@link #printHelpIfRequested(List, PrintWriter, CommandLine.Help.Ansi)} */ @Deprecated boolean help() default false; /** * Set {@code usageHelp=true} if this option allows the user to request usage help. If this option is * specified on the command line, picocli will not validate the remaining arguments (so no "missing required * option" errors) and the {@link CommandLine#isUsageHelpRequested()} method will return {@code true}. * <p> * This attribute is useful for special options like help ({@code -h} and {@code --help} on unix, * {@code -?} and {@code -Help} on Windows). * </p> * <p> * Note that the {@link #parse(String...)} method will not print usage help documentation. It will only set * the value of the annotated field. It is the responsibility of the caller to inspect the annotated fields * and take the appropriate action. * </p> * @return whether this option allows the user to request usage help * @since 0.9.8 */ boolean usageHelp() default false; /** * Set {@code versionHelp=true} if this option allows the user to request version information. If this option is * specified on the command line, picocli will not validate the remaining arguments (so no "missing required * option" errors) and the {@link CommandLine#isVersionHelpRequested()} method will return {@code true}. * <p> * This attribute is useful for special options like version ({@code -V} and {@code --version} on unix, * {@code -Version} on Windows). * </p> * <p> * Note that the {@link #parse(String...)} method will not print version information. It will only set * the value of the annotated field. It is the responsibility of the caller to inspect the annotated fields * and take the appropriate action. * </p> * @return whether this option allows the user to request version information * @since 0.9.8 */ boolean versionHelp() default false; /** * Description of this option, used when generating the usage documentation. * @return the description of this option */ String[] description() default {}; /** * Specifies the minimum number of required parameters and the maximum number of accepted parameters. * If an option declares a positive arity, and the user specifies an insufficient number of parameters on the * command line, a {@link MissingParameterException} is thrown by the {@link #parse(String...)} method. * <p> * In many cases picocli can deduce the number of required parameters from the field's type. * By default, flags (boolean options) have arity zero, * and single-valued type fields (String, int, Integer, double, Double, File, Date, etc) have arity one. * Generally, fields with types that cannot hold multiple values can omit the {@code arity} attribute. * </p><p> * Fields used to capture options with arity two or higher should have a type that can hold multiple values, * like arrays or Collections. See {@link #type()} for strongly-typed Collection fields. * </p><p> * For example, if an option has 2 required parameters and any number of optional parameters, * specify {@code @Option(names = "-example", arity = "2..*")}. * </p> * <b>A note on boolean options</b> * <p> * By default picocli does not expect boolean options (also called "flags" or "switches") to have a parameter. * You can make a boolean option take a required parameter by annotating your field with {@code arity="1"}. * For example: </p> * <pre>&#064;Option(names = "-v", arity = "1") boolean verbose;</pre> * <p> * Because this boolean field is defined with arity 1, the user must specify either {@code <program> -v false} * or {@code <program> -v true} * on the command line, or a {@link MissingParameterException} is thrown by the {@link #parse(String...)} * method. * </p><p> * To make the boolean parameter possible but optional, define the field with {@code arity = "0..1"}. * For example: </p> * <pre>&#064;Option(names="-v", arity="0..1") boolean verbose;</pre> * <p>This will accept any of the below without throwing an exception:</p> * <pre> * -v * -v true * -v false * </pre> * @return how many arguments this option requires */ String arity() default ""; /** * Specify a {@code paramLabel} for the option parameter to be used in the usage help message. If omitted, * picocli uses the field name in fish brackets ({@code '<'} and {@code '>'}) by default. Example: * <pre>class Example { * &#064;Option(names = {"-o", "--output"}, paramLabel="FILE", description="path of the output file") * private File out; * &#064;Option(names = {"-j", "--jobs"}, arity="0..1", description="Allow N jobs at once; infinite jobs with no arg.") * private int maxJobs = -1; * }</pre> * <p>By default, the above gives a usage help message like the following:</p><pre> * Usage: &lt;main class&gt; [OPTIONS] * -o, --output FILE path of the output file * -j, --jobs [&lt;maxJobs&gt;] Allow N jobs at once; infinite jobs with no arg. * </pre> * @return name of the option parameter used in the usage help message */ String paramLabel() default ""; /** <p> * Optionally specify a {@code type} to control exactly what Class the option parameter should be converted * to. This may be useful when the field type is an interface or an abstract class. For example, a field can * be declared to have type {@code java.lang.Number}, and annotating {@code @Option(type=Short.class)} * ensures that the option parameter value is converted to a {@code Short} before setting the field value. * </p><p> * For array fields whose <em>component</em> type is an interface or abstract class, specify the concrete <em>component</em> type. * For example, a field with type {@code Number[]} may be annotated with {@code @Option(type=Short.class)} * to ensure that option parameter values are converted to {@code Short} before adding an element to the array. * </p><p> * Picocli will use the {@link ITypeConverter} that is * {@linkplain #registerConverter(Class, ITypeConverter) registered} for the specified type to convert * the raw String values before modifying the field value. * </p><p> * Prior to 2.0, the {@code type} attribute was necessary for {@code Collection} and {@code Map} fields, * but starting from 2.0 picocli will infer the component type from the generic type's type arguments. * For example, for a field of type {@code Map<TimeUnit, Long>} picocli will know the option parameter * should be split up in key=value pairs, where the key should be converted to a {@code java.util.concurrent.TimeUnit} * enum value, and the value should be converted to a {@code Long}. No {@code @Option(type=...)} type attribute * is required for this. For generic types with wildcards, picocli will take the specified upper or lower bound * as the Class to convert to, unless the {@code @Option} annotation specifies an explicit {@code type} attribute. * </p><p> * If the field type is a raw collection or a raw map, and you want it to contain other values than Strings, * or if the generic type's type arguments are interfaces or abstract classes, you may * specify a {@code type} attribute to control the Class that the option parameter should be converted to. * @return the type(s) to convert the raw String values */ Class<?>[] type() default {}; /** * Optionally specify one or more {@link ITypeConverter} classes to use to convert the command line argument into * a strongly typed value (or key-value pair for map fields). This is useful when a particular field should * use a custom conversion that is different from the normal conversion for the field's type. * <p>For example, for a specific field you may want to use a converter that maps the constant names defined * in {@link java.sql.Types java.sql.Types} to the {@code int} value of these constants, but any other {@code int} fields should * not be affected by this and should continue to use the standard int converter that parses numeric values.</p> * @return the type converter(s) to use to convert String values to strongly typed values for this field * @see CommandLine#registerConverter(Class, ITypeConverter) */ Class<? extends ITypeConverter<?>>[] converter() default {}; /** * Specify a regular expression to use to split option parameter values before applying them to the field. * All elements resulting from the split are added to the array or Collection. Ignored for single-value fields. * @return a regular expression to split option parameter values or {@code ""} if the value should not be split * @see String#split(String) */ String split() default ""; /** * Set {@code hidden=true} if this option should not be included in the usage documentation. * @return whether this option should be excluded from the usage message */ boolean hidden() default false; /** Use this attribute to control for a specific option whether its default value should be shown in the usage * help message. If not specified, the default value is only shown when the {@link Command#showDefaultValues()} * is set {@code true} on the command. Use this attribute to specify whether the default value * for this specific option should always be shown or never be shown, regardless of the command setting. * @return whether this option's default value should be shown in the usage help message */ Help.Visibility showDefaultValue() default Help.Visibility.ON_DEMAND; } /** * <p> * Fields annotated with {@code @Parameters} will be initialized with positional parameters. By specifying the * {@link #index()} attribute you can pick the exact position or a range of positional parameters to apply. If no * index is specified, the field will get all positional parameters (and so it should be an array or a collection). * </p><p> * For example: * </p> * <pre> * import static picocli.CommandLine.*; * * public class MyCalcParameters { * &#064;Parameters(description = "Any number of input numbers") * private List&lt;BigDecimal&gt; files = new ArrayList&lt;BigDecimal&gt;(); * * &#064;Option(names = { "-h", "--help" }, usageHelp = true, description = "Display this help and exit") * private boolean help; * } * </pre><p> * A field cannot be annotated with both {@code @Parameters} and {@code @Option} or a {@code ParameterException} * is thrown.</p> */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) public @interface Parameters { /** Specify an index ("0", or "1", etc.) to pick which of the command line arguments should be assigned to this * field. For array or Collection fields, you can also specify an index range ("0..3", or "2..*", etc.) to assign * a subset of the command line arguments to this field. The default is "*", meaning all command line arguments. * @return an index or range specifying which of the command line arguments should be assigned to this field */ String index() default "*"; /** Description of the parameter(s), used when generating the usage documentation. * @return the description of the parameter(s) */ String[] description() default {}; /** * Specifies the minimum number of required parameters and the maximum number of accepted parameters. If a * positive arity is declared, and the user specifies an insufficient number of parameters on the command line, * {@link MissingParameterException} is thrown by the {@link #parse(String...)} method. * <p>The default depends on the type of the parameter: booleans require no parameters, arrays and Collections * accept zero to any number of parameters, and any other type accepts one parameter.</p> * @return the range of minimum and maximum parameters accepted by this command */ String arity() default ""; /** * Specify a {@code paramLabel} for the parameter to be used in the usage help message. If omitted, * picocli uses the field name in fish brackets ({@code '<'} and {@code '>'}) by default. Example: * <pre>class Example { * &#064;Parameters(paramLabel="FILE", description="path of the input FILE(s)") * private File[] inputFiles; * }</pre> * <p>By default, the above gives a usage help message like the following:</p><pre> * Usage: &lt;main class&gt; [FILE...] * [FILE...] path of the input FILE(s) * </pre> * @return name of the positional parameter used in the usage help message */ String paramLabel() default ""; /** * <p> * Optionally specify a {@code type} to control exactly what Class the positional parameter should be converted * to. This may be useful when the field type is an interface or an abstract class. For example, a field can * be declared to have type {@code java.lang.Number}, and annotating {@code @Parameters(type=Short.class)} * ensures that the positional parameter value is converted to a {@code Short} before setting the field value. * </p><p> * For array fields whose <em>component</em> type is an interface or abstract class, specify the concrete <em>component</em> type. * For example, a field with type {@code Number[]} may be annotated with {@code @Parameters(type=Short.class)} * to ensure that positional parameter values are converted to {@code Short} before adding an element to the array. * </p><p> * Picocli will use the {@link ITypeConverter} that is * {@linkplain #registerConverter(Class, ITypeConverter) registered} for the specified type to convert * the raw String values before modifying the field value. * </p><p> * Prior to 2.0, the {@code type} attribute was necessary for {@code Collection} and {@code Map} fields, * but starting from 2.0 picocli will infer the component type from the generic type's type arguments. * For example, for a field of type {@code Map<TimeUnit, Long>} picocli will know the positional parameter * should be split up in key=value pairs, where the key should be converted to a {@code java.util.concurrent.TimeUnit} * enum value, and the value should be converted to a {@code Long}. No {@code @Parameters(type=...)} type attribute * is required for this. For generic types with wildcards, picocli will take the specified upper or lower bound * as the Class to convert to, unless the {@code @Parameters} annotation specifies an explicit {@code type} attribute. * </p><p> * If the field type is a raw collection or a raw map, and you want it to contain other values than Strings, * or if the generic type's type arguments are interfaces or abstract classes, you may * specify a {@code type} attribute to control the Class that the positional parameter should be converted to. * @return the type(s) to convert the raw String values */ Class<?>[] type() default {}; /** * Optionally specify one or more {@link ITypeConverter} classes to use to convert the command line argument into * a strongly typed value (or key-value pair for map fields). This is useful when a particular field should * use a custom conversion that is different from the normal conversion for the field's type. * <p>For example, for a specific field you may want to use a converter that maps the constant names defined * in {@link java.sql.Types java.sql.Types} to the {@code int} value of these constants, but any other {@code int} fields should * not be affected by this and should continue to use the standard int converter that parses numeric values.</p> * @return the type converter(s) to use to convert String values to strongly typed values for this field * @see CommandLine#registerConverter(Class, ITypeConverter) */ Class<? extends ITypeConverter<?>>[] converter() default {}; /** * Specify a regular expression to use to split positional parameter values before applying them to the field. * All elements resulting from the split are added to the array or Collection. Ignored for single-value fields. * @return a regular expression to split operand values or {@code ""} if the value should not be split * @see String#split(String) */ String split() default ""; /** * Set {@code hidden=true} if this parameter should not be included in the usage message. * @return whether this parameter should be excluded from the usage message */ boolean hidden() default false; /** Use this attribute to control for a specific positional parameter whether its default value should be shown in the usage * help message. If not specified, the default value is only shown when the {@link Command#showDefaultValues()} * is set {@code true} on the command. Use this attribute to specify whether the default value * for this specific positional parameter should always be shown or never be shown, regardless of the command setting. * @return whether this positional parameter's default value should be shown in the usage help message */ Help.Visibility showDefaultValue() default Help.Visibility.ON_DEMAND; } /** * <p> * Fields annotated with {@code @ParentCommand} will be initialized with the parent command of the current subcommand. * If the current command does not have a parent command, this annotation has no effect. * </p><p> * Parent commands often define options that apply to all the subcommands. * This annotation offers a convenient way to inject a reference to the parent command into a subcommand, so the * subcommand can access its parent options. For example: * </p><pre> * &#064;Command(name = "top", subcommands = Sub.class) * class Top implements Runnable { * * &#064;Option(names = {"-d", "--directory"}, description = "this option applies to all subcommands") * File baseDirectory; * * public void run() { System.out.println("Hello from top"); } * } * * &#064;Command(name = "sub") * class Sub implements Runnable { * * &#064;ParentCommand * private Top parent; * * public void run() { * System.out.println("Subcommand: parent command 'directory' is " + parent.baseDirectory); * } * } * </pre> * @since 2.2 */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) public @interface ParentCommand { } /** * <p> * Fields annotated with {@code @Mixin} are "expanded" into the current command: {@link Option @Option} and * {@link Parameters @Parameters} in the mixin class are added to the options and positional parameters of this command. * A {@link DuplicateOptionAnnotationsException} is thrown if any of the options in the mixin has the same name as * an option in this command. * </p><p> * The {@code Mixin} annotation provides a way to reuse common options and parameters without subclassing. For example: * </p><pre> * class HelloWorld implements Runnable { * * // adds the --help and --version options to this command * &#064;Mixin * private HelpOptions = new HelpOptions(); * * &#064;Option(names = {"-u", "--userName"}, required = true, description = "The user name") * String userName; * * public void run() { System.out.println("Hello, " + userName); } * } * * // Common reusable help options. * class HelpOptions { * * &#064;Option(names = { "-h", "--help"}, usageHelp = true, description = "Display this help and exit") * private boolean help; * * &#064;Option(names = { "-V", "--version"}, versionHelp = true, description = "Display version info and exit") * private boolean versionHelp; * } * </pre> * @since 3.0 */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) public @interface Mixin { /** Optionally specify a name that the mixin object can be retrieved with from the {@code CommandSpec}. * If not specified the name of the annotated field is used. * @return a String to register the mixin object with, or an empty String if the name of the annotated field should be used */ String name() default ""; } /** * <p>Annotate your class with {@code @Command} when you want more control over the format of the generated help * message. * </p><pre> * &#064;Command(name = "Encrypt", * description = "Encrypt FILE(s), or standard input, to standard output or to the output file.", * version = "Encrypt version 1.0", * footer = "Copyright (c) 2017") * public class Encrypt { * &#064;Parameters(paramLabel = "FILE", description = "Any number of input files") * private List&lt;File&gt; files = new ArrayList&lt;File&gt;(); * * &#064;Option(names = { "-o", "--out" }, description = "Output file (default: print to console)") * private File outputFile; * * &#064;Option(names = { "-v", "--verbose"}, description = "Verbose mode. Helpful for troubleshooting. Multiple -v options increase the verbosity.") * private boolean[] verbose; * * &#064;Option(names = { "-h", "--help" }, usageHelp = true, description = "Display this help and exit") * private boolean help; * * &#064;Option(names = { "-V", "--version"}, versionHelp = true, description = "Display version information and exit") * private boolean version; * }</pre> * <p> * The structure of a help message looks like this: * </p><ul> * <li>[header]</li> * <li>[synopsis]: {@code Usage: <commandName> [OPTIONS] [FILE...]}</li> * <li>[description]</li> * <li>[parameter list]: {@code [FILE...] Any number of input files}</li> * <li>[option list]: {@code -h, --help prints this help message and exits}</li> * <li>[footer]</li> * </ul> */ @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.TYPE, ElementType.LOCAL_VARIABLE, ElementType.PACKAGE}) public @interface Command { /** Program name to show in the synopsis. If omitted, {@code "<main class>"} is used. * For {@linkplain #subcommands() declaratively added} subcommands, this attribute is also used * by the parser to recognize subcommands in the command line arguments. * @return the program name to show in the synopsis * @see CommandSpec#name() * @see Help#commandName() */ String name() default "<main class>"; /** A list of classes to instantiate and register as subcommands. When registering subcommands declaratively * like this, you don't need to call the {@link CommandLine#addSubcommand(String, Object)} method. For example, this: * <pre> * &#064;Command(subcommands = { * GitStatus.class, * GitCommit.class, * GitBranch.class }) * public class Git { ... } * * CommandLine commandLine = new CommandLine(new Git()); * </pre> is equivalent to this: * <pre> * // alternative: programmatically add subcommands. * // NOTE: in this case there should be no `subcommands` attribute on the @Command annotation. * &#064;Command public class Git { ... } * * CommandLine commandLine = new CommandLine(new Git()) * .addSubcommand("status", new GitStatus()) * .addSubcommand("commit", new GitCommit()) * .addSubcommand("branch", new GitBranch()); * </pre> * @return the declaratively registered subcommands of this command, or an empty array if none * @see CommandLine#addSubcommand(String, Object) * @since 0.9.8 */ Class<?>[] subcommands() default {}; /** String that separates options from option parameters. Default is {@code "="}. Spaces are also accepted. * @return the string that separates options from option parameters, used both when parsing and when generating usage help * @see CommandLine#setSeparator(String) */ String separator() default "="; /** Version information for this command, to print to the console when the user specifies an * {@linkplain Option#versionHelp() option} to request version help. This is not part of the usage help message. * * @return a string or an array of strings with version information about this command (each string in the array is displayed on a separate line). * @since 0.9.8 * @see CommandLine#printVersionHelp(PrintWriter) */ String[] version() default {}; /** Class that can provide version information dynamically at runtime. An implementation may return version * information obtained from the JAR manifest, a properties file or some other source. * @return a Class that can provide version information dynamically at runtime * @since 2.2 */ Class<? extends IVersionProvider> versionProvider() default NoVersionProvider.class; /** * Add the auto-help mixin to this command, which adds {@code -h} and {@code --help} {@linkplain Option#usageHelp() usageHelp} * options and {@code -V} and {@code --version} {@linkplain Option#versionHelp() versionHelp} options to the options * of this command, as well as a {@code help} subcommand (more information below). * <p> * Note that if no {@link #version()} or {@link #versionProvider()} is specified, the {@code --version} option will not print anything. * </p><p> * Auto-help also registers a {@code help} subcommand that will print help for the subcommand following it, or * for this command in case no subcommand is specified. For example: * </p><pre> * # two ways to get usage help for a subcommand: * maincommand help subcommand * maincommand subcommand --help * * # two ways to get usage help for the main command: * maincommand help * maincommand --help * </pre> * @return whether the auto-help mixin should be added to this command * @since 3.0 */ boolean autoHelp() default false; /** Set the heading preceding the header section. May contain embedded {@linkplain java.util.Formatter format specifiers}. * @return the heading preceding the header section * @see CommandSpec#headerHeading() * @see Help#headerHeading(Object...) */ String headerHeading() default ""; /** Optional summary description of the command, shown before the synopsis. * @return summary description of the command * @see CommandSpec#header() * @see Help#header(Object...) */ String[] header() default {}; /** Set the heading preceding the synopsis text. May contain embedded * {@linkplain java.util.Formatter format specifiers}. The default heading is {@code "Usage: "} (without a line * break between the heading and the synopsis text). * @return the heading preceding the synopsis text * @see Help#synopsisHeading(Object...) */ String synopsisHeading() default "Usage: "; /** Specify {@code true} to generate an abbreviated synopsis like {@code "<main> [OPTIONS] [PARAMETERS...]"}. * By default, a detailed synopsis with individual option names and parameters is generated. * @return whether the synopsis should be abbreviated * @see Help#abbreviatedSynopsis() * @see Help#detailedSynopsis(Comparator, boolean) */ boolean abbreviateSynopsis() default false; /** Specify one or more custom synopsis lines to display instead of an auto-generated synopsis. * @return custom synopsis text to replace the auto-generated synopsis * @see Help#customSynopsis(Object...) */ String[] customSynopsis() default {}; /** Set the heading preceding the description section. May contain embedded {@linkplain java.util.Formatter format specifiers}. * @return the heading preceding the description section * @see Help#descriptionHeading(Object...) */ String descriptionHeading() default ""; /** Optional text to display between the synopsis line(s) and the list of options. * @return description of this command * @see Help#description(Object...) */ String[] description() default {}; /** Set the heading preceding the parameters list. May contain embedded {@linkplain java.util.Formatter format specifiers}. * @return the heading preceding the parameters list * @see Help#parameterListHeading(Object...) */ String parameterListHeading() default ""; /** Set the heading preceding the options list. May contain embedded {@linkplain java.util.Formatter format specifiers}. * @return the heading preceding the options list * @see Help#optionListHeading(Object...) */ String optionListHeading() default ""; /** Specify {@code false} to show Options in declaration order. The default is to sort alphabetically. * @return whether options should be shown in alphabetic order. */ boolean sortOptions() default true; /** Prefix required options with this character in the options list. The default is no marker: the synopsis * indicates which options and parameters are required. * @return the character to show in the options list to mark required options */ char requiredOptionMarker() default ' '; /** Specify {@code true} to show default values in the description column of the options list (except for * boolean options). False by default. * @return whether the default values for options and parameters should be shown in the description column */ boolean showDefaultValues() default false; /** Set the heading preceding the subcommands list. May contain embedded {@linkplain java.util.Formatter format specifiers}. * The default heading is {@code "Commands:%n"} (with a line break at the end). * @return the heading preceding the subcommands list * @see Help#commandListHeading(Object...) */ String commandListHeading() default "Commands:%n"; /** Set the heading preceding the footer section. May contain embedded {@linkplain java.util.Formatter format specifiers}. * @return the heading preceding the footer section * @see Help#footerHeading(Object...) */ String footerHeading() default ""; /** Optional text to display after the list of options. * @return text to display after the list of options * @see Help#footer(Object...) */ String[] footer() default {}; } /** * <p> * When parsing command line arguments and initializing * fields annotated with {@link Option @Option} or {@link Parameters @Parameters}, * String values can be converted to any type for which a {@code ITypeConverter} is registered. * </p><p> * This interface defines the contract for classes that know how to convert a String into some domain object. * Custom converters can be registered with the {@link #registerConverter(Class, ITypeConverter)} method. * </p><p> * Java 8 lambdas make it easy to register custom type converters: * </p> * <pre> * commandLine.registerConverter(java.nio.file.Path.class, s -&gt; java.nio.file.Paths.get(s)); * commandLine.registerConverter(java.time.Duration.class, s -&gt; java.time.Duration.parse(s));</pre> * <p> * Built-in type converters are pre-registered for the following java 1.5 types: * </p> * <ul> * <li>all primitive types</li> * <li>all primitive wrapper types: Boolean, Byte, Character, Double, Float, Integer, Long, Short</li> * <li>any enum</li> * <li>java.io.File</li> * <li>java.math.BigDecimal</li> * <li>java.math.BigInteger</li> * <li>java.net.InetAddress</li> * <li>java.net.URI</li> * <li>java.net.URL</li> * <li>java.nio.charset.Charset</li> * <li>java.sql.Time</li> * <li>java.util.Date</li> * <li>java.util.UUID</li> * <li>java.util.regex.Pattern</li> * <li>StringBuilder</li> * <li>CharSequence</li> * <li>String</li> * </ul> * @param <K> the type of the object that is the result of the conversion */ public interface ITypeConverter<K> { /** * Converts the specified command line argument value to some domain object. * @param value the command line argument String value * @return the resulting domain object * @throws Exception an exception detailing what went wrong during the conversion */ K convert(String value) throws Exception; } /** * Provides version information for a command. Commands may configure a provider with the * {@link Command#versionProvider()} annotation attribute. * @since 2.2 */ public interface IVersionProvider { /** * Returns version information for a command. * @return version information (each string in the array is displayed on a separate line) * @throws Exception an exception detailing what went wrong when obtaining version information */ String[] getVersion() throws Exception; } private static class NoVersionProvider implements IVersionProvider { public String[] getVersion() throws Exception { throw new UnsupportedOperationException(); } } /** * Factory for instantiating classes that are registered declaratively with annotation attributes, like * {@link Command#subcommands()}, {@link Option#converter()}, {@link Parameters#converter()} and {@link Command#versionProvider()}. * @since 2.2 */ public interface IFactory { /** * Creates and returns an instance of the specified class. * @param cls the class to instantiate * @param <K> the type to instantiate * @return the new instance * @throws Exception an exception detailing what went wrong when creating the instance */ <K> K create(Class<K> cls) throws Exception; } /** Returns a default {@link IFactory} implementation. Package-protected for testing purposes. */ static IFactory defaultFactory() { return new DefaultFactory(); } private static class DefaultFactory implements IFactory { public <T> T create(Class<T> cls) throws Exception { try { return cls.newInstance(); } catch (Exception ex) { Constructor<T> constructor = cls.getDeclaredConstructor(); constructor.setAccessible(true); return constructor.newInstance(); } } private static ITypeConverter<?>[] createConverter(IFactory factory, Class<? extends ITypeConverter<?>>[] classes) { ITypeConverter<?>[] result = new ITypeConverter<?>[classes.length]; for (int i = 0; i < classes.length; i++) { try { result[i] = factory.create(classes[i]); } catch (Exception ex) { throw new InitializationException("Could not instantiate " + classes[i] + ": " + ex, ex); } } return result; } public static IVersionProvider createVersionProvider(IFactory factory, Class<? extends IVersionProvider> cls) { try { return factory.create(cls); } catch (Exception ex) { throw new InitializationException("Could not instantiate " + cls + ": " + ex, ex); } } } /** Describes the number of parameters required and accepted by an option or a positional parameter. * @since 0.9.7 */ public static class Range implements Comparable<Range> { /** Required number of parameters for an option or positional parameter. */ public final int min; /** Maximum accepted number of parameters for an option or positional parameter. */ public final int max; public final boolean isVariable; private final boolean isUnspecified; private final String originalValue; /** Constructs a new Range object with the specified parameters. * @param min minimum number of required parameters * @param max maximum number of allowed parameters (or Integer.MAX_VALUE if variable) * @param variable {@code true} if any number or parameters is allowed, {@code false} otherwise * @param unspecified {@code true} if no arity was specified on the option/parameter (value is based on type) * @param originalValue the original value that was specified on the option or parameter */ public Range(int min, int max, boolean variable, boolean unspecified, String originalValue) { this.min = min; this.max = max; this.isVariable = variable; this.isUnspecified = unspecified; this.originalValue = originalValue; } /** Returns a new {@code Range} based on the {@link Option#arity()} annotation on the specified field, * or the field type's default arity if no arity was specified. * @param field the field whose Option annotation to inspect * @return a new {@code Range} based on the Option arity annotation on the specified field */ public static Range optionArity(Field field) { return field.isAnnotationPresent(Option.class) ? adjustForType(Range.valueOf(field.getAnnotation(Option.class).arity()), field) : new Range(0, 0, false, true, "0"); } /** Returns a new {@code Range} based on the {@link Parameters#arity()} annotation on the specified field, * or the field type's default arity if no arity was specified. * @param field the field whose Parameters annotation to inspect * @return a new {@code Range} based on the Parameters arity annotation on the specified field */ public static Range parameterArity(Field field) { return field.isAnnotationPresent(Parameters.class) ? adjustForType(Range.valueOf(field.getAnnotation(Parameters.class).arity()), field) : new Range(0, 0, false, true, "0"); } /** Returns a new {@code Range} based on the {@link Parameters#index()} annotation on the specified field. * @param field the field whose Parameters annotation to inspect * @return a new {@code Range} based on the Parameters index annotation on the specified field */ public static Range parameterIndex(Field field) { return field.isAnnotationPresent(Parameters.class) ? Range.valueOf(field.getAnnotation(Parameters.class).index()) : new Range(0, 0, false, true, "0"); } static Range adjustForType(Range result, Field field) { return result.isUnspecified ? defaultArity(field) : result; } /** Returns the default arity {@code Range}: for {@link Option options} this is 0 for booleans and 1 for * other types, for {@link Parameters parameters} booleans have arity 0, arrays or Collections have * arity "0..*", and other types have arity 1. * @param field the field whose default arity to return * @return a new {@code Range} indicating the default arity of the specified field * @since 2.0 */ public static Range defaultArity(Field field) { Class<?> type = field.getType(); if (field.isAnnotationPresent(Option.class)) { Class<?>[] typeAttribute = ArgSpecBuilder.inferTypes(type, field.getAnnotation(Option.class).type(), field.getGenericType()); boolean zeroArgs = isBoolean(type) || (isMultiValue(type) && isBoolean(typeAttribute[0])); return zeroArgs ? Range.valueOf("0") : Range.valueOf("1"); } if (isMultiValue(type)) { return Range.valueOf("0..1"); } return Range.valueOf("1");// for single-valued fields (incl. boolean positional parameters) } /** Returns the default arity {@code Range} for {@link Option options}: booleans have arity 0, other types have arity 1. * @param type the type whose default arity to return * @return a new {@code Range} indicating the default arity of the specified type * @deprecated use {@link #defaultArity(Field)} instead */ @Deprecated public static Range defaultArity(Class<?> type) { return isBoolean(type) ? Range.valueOf("0") : Range.valueOf("1"); } private int size() { return 1 + max - min; } static Range parameterCapacity(Field field) { Range arity = parameterArity(field); if (!isMultiValue(field)) { return arity; } Range index = parameterIndex(field); return parameterCapacity(arity, index); } private static Range parameterCapacity(Range arity, Range index) { if (arity.max == 0) { return arity; } if (index.size() == 1) { return arity; } if (index.isVariable) { return Range.valueOf(arity.min + "..*"); } if (arity.size() == 1) { return Range.valueOf(arity.min * index.size() + ""); } if (arity.isVariable) { return Range.valueOf(arity.min * index.size() + "..*"); } return Range.valueOf(arity.min * index.size() + ".." + arity.max * index.size()); } /** Leniently parses the specified String as an {@code Range} value and return the result. A range string can * be a fixed integer value or a range of the form {@code MIN_VALUE + ".." + MAX_VALUE}. If the * {@code MIN_VALUE} string is not numeric, the minimum is zero. If the {@code MAX_VALUE} is not numeric, the * range is taken to be variable and the maximum is {@code Integer.MAX_VALUE}. * @param range the value range string to parse * @return a new {@code Range} value */ public static Range valueOf(String range) { range = range.trim(); boolean unspecified = range.length() == 0 || range.startsWith(".."); // || range.endsWith(".."); int min = -1, max = -1; boolean variable = false; int dots = -1; if ((dots = range.indexOf("..")) >= 0) { min = parseInt(range.substring(0, dots), 0); max = parseInt(range.substring(dots + 2), Integer.MAX_VALUE); variable = max == Integer.MAX_VALUE; } else { max = parseInt(range, Integer.MAX_VALUE); variable = max == Integer.MAX_VALUE; min = variable ? 0 : max; } Range result = new Range(min, max, variable, unspecified, range); return result; } private static int parseInt(String str, int defaultValue) { try { return Integer.parseInt(str); } catch (Exception ex) { return defaultValue; } } /** Returns a new Range object with the {@code min} value replaced by the specified value. * The {@code max} of the returned Range is guaranteed not to be less than the new {@code min} value. * @param newMin the {@code min} value of the returned Range object * @return a new Range object with the specified {@code min} value */ public Range min(int newMin) { return new Range(newMin, Math.max(newMin, max), isVariable, isUnspecified, originalValue); } /** Returns a new Range object with the {@code max} value replaced by the specified value. * The {@code min} of the returned Range is guaranteed not to be greater than the new {@code max} value. * @param newMax the {@code max} value of the returned Range object * @return a new Range object with the specified {@code max} value */ public Range max(int newMax) { return new Range(Math.min(min, newMax), newMax, isVariable, isUnspecified, originalValue); } /** * Returns {@code true} if this Range includes the specified value, {@code false} otherwise. * @param value the value to check * @return {@code true} if the specified value is not less than the minimum and not greater than the maximum of this Range */ public boolean contains(int value) { return min <= value && max >= value; } public boolean equals(Object object) { if (!(object instanceof Range)) { return false; } Range other = (Range) object; return other.max == this.max && other.min == this.min && other.isVariable == this.isVariable; } public int hashCode() { return ((17 * 37 + max) * 37 + min) * 37 + (isVariable ? 1 : 0); } public String toString() { return min == max ? String.valueOf(min) : min + ".." + (isVariable ? "*" : max); } public int compareTo(Range other) { int result = min - other.min; return (result == 0) ? max - other.max : result; } } private static void validatePositionalParameters(List<PositionalParamSpec> positionalParametersFields) { int min = 0; for (PositionalParamSpec positional : positionalParametersFields) { Range index = positional.index(); if (index.min > min) { throw new ParameterIndexGapException("Missing positional parameter with index=" + min + ". Nearest positional parameter '" + positional.paramLabel() + "' has index=" + index.min); } min = Math.max(min, index.max); min = min == Integer.MAX_VALUE ? min : min + 1; } } @SuppressWarnings("unchecked") private static Stack<String> copy(Stack<String> stack) { return (Stack<String>) stack.clone(); } private static <T> Stack<T> reverse(Stack<T> stack) { Collections.reverse(stack); return stack; } private static <T> List<T> reverseList(List<T> list) { Collections.reverse(list); return list; } private static <T> T[] copy(T[] array, Class<T> cls) { try { @SuppressWarnings("unchecked") T[] result = (T[]) Array.newInstance(cls, array.length); System.arraycopy(array, 0, result, 0, result.length); return result; } catch (Exception ex) { throw new InitializationException("Could not copy array :" + ex, ex); } } private static class CommandSpecBuilder { static CommandSpec build(Object command, IFactory factory) { if (command instanceof CommandSpec) { return (CommandSpec) command; } CommandSpec result = new CommandSpec(Assert.notNull(command, "command")); Class<?> cls = command.getClass(); boolean hasCommandAnnotation = false; while (cls != null) { hasCommandAnnotation |= updateCommandAttributes(cls, result, factory); hasCommandAnnotation |= initFromAnnotatedFields(command, cls, result, factory); cls = cls.getSuperclass(); } validateCommandSpec(result, hasCommandAnnotation, command); result.withToString(command.getClass().getName()).validate(); return result; } private static boolean updateCommandAttributes(Class<?> cls, CommandSpec commandSpec, IFactory factory) { // superclass values should not overwrite values if both class and superclass have a @Command annotation if (!cls.isAnnotationPresent(Command.class)) { return false; } Command cmd = cls.getAnnotation(Command.class); initSubcommands(cmd, commandSpec, factory); if (!commandSpec.isSeparatorInitialized()) { commandSpec.separator(cmd.separator()); } if (!commandSpec.isNameInitialized()) { commandSpec.name(cmd.name()); } if (!commandSpec.isSynopsisHeadingInitialized()) { commandSpec.synopsisHeading(cmd.synopsisHeading()); } if (!commandSpec.isCommandListHeadingInitialized()) { commandSpec.commandListHeading(cmd.commandListHeading()); } if (!commandSpec.isRequiredOptionMarkerInitialized()) { commandSpec.requiredOptionMarker(cmd.requiredOptionMarker()); } if (!commandSpec.isVersionInitialized()) { commandSpec.version(cmd.version()); } // only if no dynamic version if (!commandSpec.isCustomSynopsisInitialized()) { commandSpec.customSynopsis(cmd.customSynopsis()); } if (!commandSpec.isDescriptionInitialized()) { commandSpec.description(cmd.description()); } if (!commandSpec.isDescriptionHeadingInitialized()) { commandSpec.descriptionHeading(cmd.descriptionHeading()); } if (!commandSpec.isHeaderInitialized()) { commandSpec.header(cmd.header()); } if (!commandSpec.isHeaderHeadingInitialized()) { commandSpec.headerHeading(cmd.headerHeading()); } if (!commandSpec.isFooterInitialized()) { commandSpec.footer(cmd.footer()); } if (!commandSpec.isFooterHeadingInitialized()) { commandSpec.footerHeading(cmd.footerHeading()); } if (!commandSpec.isParameterListHeadingInitialized()) { commandSpec.parameterListHeading(cmd.parameterListHeading()); } if (!commandSpec.isOptionListHeadingInitialized()) { commandSpec.optionListHeading(cmd.optionListHeading()); } if (!commandSpec.isAbbreviateSynopsisInitialized() && cmd.abbreviateSynopsis()) { commandSpec.abbreviateSynopsis(cmd.abbreviateSynopsis()); } if (!commandSpec.isSortOptionsInitialized() && !cmd.sortOptions()) { commandSpec.sortOptions(cmd.sortOptions()); } if (!commandSpec.isShowDefaultValuesInitialized() && cmd.showDefaultValues()) { commandSpec.showDefaultValues(cmd.showDefaultValues()); } if (!commandSpec.isVersionProviderInitialized() && cmd.versionProvider() != NoVersionProvider.class) { commandSpec.versionProvider(DefaultFactory.createVersionProvider(factory, cmd.versionProvider())); } if (cmd.autoHelp()) { commandSpec.addMixin("autoHelp", build(new AutoHelpMixin(), factory)); } return true; } private static String[] generateVersionStrings(Class<? extends IVersionProvider> cls, IFactory factory) { if (cls == null || cls == NoVersionProvider.class) { return new String[0]; } try { String[] result = factory.create(cls).getVersion(); return result == null ? new String[0] : result; } catch (InitializationException ex) { throw ex; } catch (Exception ex) { throw new InitializationException("Could not get version info from " + cls + ": " + ex, ex); } } private static String[] nonEmpty(String[] left, String[] right) { return empty(left) ? right : left; } private static String nonEmpty(String left, String right) { return empty(left) ? right : left; } private static Boolean nonNull(Boolean left, Boolean right) { return left == null ? right : left; } private static Character nonNull(Character left, Character right) { return left == null ? right : left; } private static void initSubcommands(Command cmd, CommandSpec parent, IFactory factory) { for (Class<?> sub : cmd.subcommands()) { try { CommandLine subcommandLine = toCommandLine(factory.create(sub), factory); parent.addSubcommand(subCommandName(sub), subcommandLine); initParentCommand(subcommandLine.getCommandSpec().userObject(), parent.userObject()); } catch (InitializationException ex) { throw ex; } catch (NoSuchMethodException ex) { throw new InitializationException("Cannot instantiate subcommand " + sub.getName() + ": the class has no constructor", ex); } catch (Exception ex) { throw new InitializationException("Could not instantiate and add subcommand " + sub.getName() + ": " + ex, ex); } } } static void initParentCommand(Object subcommand, Object parent) { try { Class<?> cls = subcommand.getClass(); while (cls != null) { for (Field f : cls.getDeclaredFields()) { if (f.isAnnotationPresent(ParentCommand.class)) { f.setAccessible(true); f.set(subcommand, parent); } } cls = cls.getSuperclass(); } } catch (Exception ex) { throw new InitializationException("Unable to initialize @ParentCommand field: " + ex, ex); } } private static String subCommandName(Class<?> sub) { Command subCommand = sub.getAnnotation(Command.class); if (subCommand == null || Help.DEFAULT_COMMAND_NAME.equals(subCommand.name())) { throw new InitializationException("Subcommand " + sub.getName() + " is missing the mandatory @Command annotation with a 'name' attribute"); } return subCommand.name(); } private static boolean initFromAnnotatedFields(Object scope, Class<?> cls, CommandSpec receiver, IFactory factory) { boolean result = false; for (Field field : cls.getDeclaredFields()) { if (isMixin(field)) { receiver.addMixin(mixinName(field), buildMixinForField(field, scope, factory)); result = true; } if (isArgSpec(field)) { validateArgSpecField(field); if (isOption(field)) { receiver.add(ArgSpecBuilder.buildOptionSpec(scope, field, factory)); } if (isParameter(field)) { receiver.add(ArgSpecBuilder.buildPositionalParamSpec(scope, field, factory)); } } } return result; } private static String mixinName(Field field) { String annotationName = field.getAnnotation(Mixin.class).name(); return empty(annotationName) ? field.getName() : annotationName; } private static void validateArgSpecField(Field field) { if (isOption(field) && isParameter(field)) { throw new DuplicateOptionAnnotationsException("A field can be either @Option or @Parameters, but '" + field + "' is both."); } if (isMixin(field) && (isOption(field) || isParameter(field))) { throw new DuplicateOptionAnnotationsException("A field cannot be both a @Mixin command and an @Option or @Parameters, but '" + field + "' is both."); } if (Modifier.isFinal(field.getModifiers()) && (field.getType().isPrimitive() || String.class.isAssignableFrom(field.getType()))) { throw new InitializationException("Constant (final) primitive and String fields like " + field + " cannot be used as " + (isOption(field) ? "an @Option" : "a @Parameter") + ": compile-time constant inlining may hide new values written to it."); } } private static void validateCommandSpec(CommandSpec result, boolean hasCommandAnnotation, Object command) { if (!hasCommandAnnotation && result.positionalParameters.isEmpty() && result.optionsByNameMap.isEmpty()) { throw new InitializationException(command.getClass().getName() + " is not a command: it has no @Command, @Option or @Parameters annotations"); } } private static CommandSpec buildMixinForField(Field field, Object scope, IFactory factory) { try { field.setAccessible(true); Object userObject = field.get(scope); if (userObject == null) { userObject = factory.create(field.getType()); field.set(scope, userObject); } CommandSpec result = build(userObject, factory); return result.withToString(abbreviate("mixin from field " + field.toGenericString())); } catch (InitializationException ex) { throw ex; } catch (Exception ex) { throw new InitializationException("Could not access or modify mixin field " + field + ": " + ex, ex); } } static boolean isArgSpec(Field f) { return isOption(f) || isParameter(f); } static boolean isOption(Field f) { return f.isAnnotationPresent(Option.class); } static boolean isParameter(Field f) { return f.isAnnotationPresent(Parameters.class); } static boolean isMixin(Field f) { return f.isAnnotationPresent(Mixin.class); } } /** Helper class to reflectively create OptionSpec and PositionalParamSpec objects from annotated elements. * Package protected for testing. CONSIDER THIS CLASS PRIVATE. */ static class ArgSpecBuilder { static OptionSpec buildOptionSpec(Object scope, Field field, IFactory factory) { Option option = field.getAnnotation(Option.class); OptionSpec result = new OptionSpec(option.names()); initCommon(result, scope, field); result.help(option.help()); result.usageHelp(option.usageHelp()); result.versionHelp(option.versionHelp()); result.showDefaultValue(option.showDefaultValue()); result.arity(Range.optionArity(field)); result.required(option.required()); result.description(option.description()); result.auxiliaryTypes(inferTypes(field.getType(), option.type(), field.getGenericType())); result.paramLabel(inferLabel(option.paramLabel(), field.getName(), field.getType(), result.auxiliaryTypes())); result.splitRegex(option.split()); result.hidden(option.hidden()); result.converters(DefaultFactory.createConverter(factory, option.converter())); return result; } private static String inferLabel(String label, String fieldName, Class<?> fieldType, Class<?>[] types) { if (!empty(label)) { return label.trim(); } String name = fieldName; if (Map.class.isAssignableFrom(fieldType)) { // #195 better param labels for map fields Class<?>[] paramTypes = types; if (paramTypes.length < 2 || paramTypes[0] == null || paramTypes[1] == null) { name = "String=String"; } else { name = paramTypes[0].getSimpleName() + "=" + paramTypes[1].getSimpleName(); } } return "<" + name + ">"; } static PositionalParamSpec buildPositionalParamSpec(Object scope, Field field, IFactory factory) { PositionalParamSpec result = new PositionalParamSpec(); initCommon(result, scope, field); result.arity(Range.parameterArity(field)); result.index(Range.parameterIndex(field)); result.capacity = Range.parameterCapacity(field); result.required(result.arity().min > 0); Parameters parameters = field.getAnnotation(Parameters.class); result.description(parameters.description()); result.auxiliaryTypes(inferTypes(field.getType(), parameters.type(), field.getGenericType())); result.paramLabel(inferLabel(parameters.paramLabel(), field.getName(), field.getType(), result.auxiliaryTypes())); result.splitRegex(parameters.split()); result.hidden(parameters.hidden()); result.converters(DefaultFactory.createConverter(factory, parameters.converter())); result.showDefaultValue(parameters.showDefaultValue()); return result; } private static void initCommon(ArgSpec<?> result, Object scope, Field field) { field.setAccessible(true); result.type(field.getType()); // field type result.defaultValue(getDefaultValue(scope, field)); result.withToString(abbreviate("field " + field.toGenericString())); result.getter(new FieldGetter(scope, field)); result.setter(new FieldSetter(scope, field)); } static String abbreviate(String text) { return text.replace("field private ", "field ") .replace("field protected ", "field ") .replace("field public ", "field ") .replace("java.lang.", ""); } private static Class<?>[] inferTypes(Class<?> propertyType, Class<?>[] annotationTypes, Type genericType) { if (annotationTypes.length > 0) { return annotationTypes; } if (propertyType.isArray()) { return new Class<?>[] { propertyType.getComponentType() }; } if (CommandLine.isMultiValue(propertyType)) { if (genericType instanceof ParameterizedType) {// e.g. Map<Long, ? extends Number> ParameterizedType parameterizedType = (ParameterizedType) genericType; Type[] paramTypes = parameterizedType.getActualTypeArguments(); // e.g. ? extends Number Class<?>[] result = new Class<?>[paramTypes.length]; for (int i = 0; i < paramTypes.length; i++) { if (paramTypes[i] instanceof Class) { result[i] = (Class<?>) paramTypes[i]; continue; } // e.g. Long if (paramTypes[i] instanceof WildcardType) { // e.g. ? extends Number WildcardType wildcardType = (WildcardType) paramTypes[i]; Type[] lower = wildcardType.getLowerBounds(); // e.g. [] if (lower.length > 0 && lower[0] instanceof Class) { result[i] = (Class<?>) lower[0]; continue; } Type[] upper = wildcardType.getUpperBounds(); // e.g. Number if (upper.length > 0 && upper[0] instanceof Class) { result[i] = (Class<?>) upper[0]; continue; } } Arrays.fill(result, String.class); return result; // too convoluted generic type, giving up } return result; // we inferred all types from ParameterizedType } return new Class<?>[] {String.class, String.class}; // field is multi-value but not ParameterizedType } return new Class<?>[] {propertyType}; // not a multi-value field } static Object getDefaultValue(Object scope, Field field) { Object defaultValue = null; try { defaultValue = field.get(scope); if (defaultValue != null && field.getType().isArray()) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < Array.getLength(defaultValue); i++) { sb.append(i > 0 ? ", " : "").append(Array.get(defaultValue, i)); } defaultValue = sb.insert(0, "[").append("]").toString(); } } catch (Exception ex) { } return defaultValue; } private static class FieldGetter implements ArgSpec.IGetter { private final Object scope; private final Field field; public FieldGetter(Object scope, Field field) { this.scope = scope; this.field = field; } @SuppressWarnings("unchecked") public <T> T get() throws Exception { return (T) field.get(scope); } } private static class FieldSetter implements ArgSpec.ISetter { private final Object scope; private final Field field; public FieldSetter(Object scope, Field field) { this.scope = scope; this.field = field; } public <T> T set(T value) throws Exception { @SuppressWarnings("unchecked") T result = (T) field.get(scope); field.set(scope, value); return result; } } } /** The {@code CommandSpec} class models a command specification, including the options, positional parameters and subcommands * supported by the command, as well as attributes for the version help message and the usage help message of the command. * <p> * Picocli views a command line application as a hierarchy of commands: there is a top-level command (usually the Java * class with the {@code main} method) with optionally a set of command line options, positional parameters and subcommands. * Subcommands themselves can have options, positional parameters and nested sub-subcommands to any level of depth. * </p><p> * The object model has a corresponding hierarchy of {@code CommandSpec} objects, each with a set of {@link OptionSpec}, * {@link PositionalParamSpec} and {@linkplain CommandLine subcommands} associated with it. * This object model is used by the picocli command line interpreter and help message generator. * </p><p>Picocli can construct a {@code CommandSpec} automatically from classes with {@link Command @Command}, {@link Option @Option} and * {@link Parameters @Parameters} annotations. Alternatively a {@code CommandSpec} can be constructed programmatically. * </p> * @since 3.0 */ public static class CommandSpec { /** Constant String holding the default synopsis heading: <code>{@value}</code>. */ static final String DEFAULT_SYNOPSIS_HEADING = "Usage: "; /** Constant String holding the default command list heading: <code>{@value}</code>. */ static final String DEFAULT_COMMAND_LIST_HEADING = "Commands:%n"; /** Constant String holding the default program name: {@code "<main class>" }. */ static final String DEFAULT_COMMAND_NAME = "<main class>"; /** Constant String holding the default string that separates options from option parameters: {@code ' '} ({@value}). */ static final char DEFAULT_REQUIRED_OPTION_MARKER = ' '; /** Constant String holding the default separator between options and option parameters: <code>{@value}</code>.*/ static final String DEFAULT_SEPARATOR = "="; /** Constant Boolean holding the default setting for whether to abbreviate the synopsis: <code>{@value}</code>.*/ static final Boolean DEFAULT_ABBREVIATE_SYNOPSIS = Boolean.FALSE; /** Constant Boolean holding the default setting for whether to sort the options alphabetically: <code>{@value}</code>.*/ static final Boolean DEFAULT_SORT_OPTIONS = Boolean.TRUE; /** Constant Boolean holding the default setting for whether to show default values in the usage help message: <code>{@value}</code>.*/ static final Boolean DEFAULT_SHOW_DEFAULT_VALUES = Boolean.FALSE; private final Map<String, CommandLine> commands = new LinkedHashMap<String, CommandLine>(); private final Map<String, OptionSpec> optionsByNameMap = new LinkedHashMap<String, OptionSpec>(); private final Map<Character, OptionSpec> posixOptionsByKeyMap = new LinkedHashMap<Character, OptionSpec>(); private final Map<String, CommandSpec> mixins = new LinkedHashMap<String, CommandSpec>(); private final List<ArgSpec<?>> requiredArgs = new ArrayList<ArgSpec<?>>(); private final List<OptionSpec> options = new ArrayList<OptionSpec>(); private final List<PositionalParamSpec> positionalParameters = new ArrayList<PositionalParamSpec>(); private final Object userObject; private CommandLine commandLine; private CommandSpec parent; private String separator; private String name; private IVersionProvider versionProvider; private String[] version = {}; private String[] description = {}; private String[] customSynopsis = {}; private String[] header = {}; private String[] footer = {}; private Boolean abbreviateSynopsis; private Boolean sortOptions; private Boolean showDefaultValues; private Character requiredOptionMarker; private String headerHeading; private String synopsisHeading; private String descriptionHeading; private String parameterListHeading; private String optionListHeading; private String commandListHeading; private String footerHeading; private String toString; /** Constructs a new {@code CommandSpec} without an associated user object. */ public CommandSpec() { this(null); } /** Constructs a new {@code CommandSpec} without the specified associated user object. * @param userObject the associated user object - often this is the object annotated with {@code @Command}. May be {@code null}. */ public CommandSpec(Object userObject) { this.userObject = userObject; } /** Ensures all attributes of this {@code CommandSpec} have a valid value; throws an {@link InitializationException} if this cannot be achieved. */ void validate() { sortOptions = (sortOptions == null) ? true : sortOptions; abbreviateSynopsis = (abbreviateSynopsis == null) ? false : abbreviateSynopsis; requiredOptionMarker = (requiredOptionMarker == null) ? DEFAULT_REQUIRED_OPTION_MARKER : requiredOptionMarker; showDefaultValues = (showDefaultValues == null) ? false : showDefaultValues; synopsisHeading = (synopsisHeading == null) ? DEFAULT_SYNOPSIS_HEADING : synopsisHeading; commandListHeading = (commandListHeading == null) ? DEFAULT_COMMAND_LIST_HEADING : commandListHeading; separator = (separator == null) ? DEFAULT_SEPARATOR : separator; name = (name == null) ? DEFAULT_COMMAND_NAME : name; Collections.sort(positionalParameters, new PositionalParametersSorter()); validatePositionalParameters(positionalParameters); for (OptionSpec option : options) { option.validate(); } for (PositionalParamSpec positional : positionalParameters) { positional.validate(); } } /** Returns the user object associated with this command. * @see CommandLine#getCommand() */ public Object userObject() { return userObject; } /** Returns the CommandLine constructed with this {@code CommandSpec} model. */ public CommandLine commandLine() { return commandLine;} /** Sets the CommandLine constructed with this {@code CommandSpec} model. */ protected CommandSpec commandLine(CommandLine commandLine) { this.commandLine = commandLine; for (CommandLine sub : commands.values()) { sub.getCommandSpec().parent(this); } return this; } /** Returns a read-only view of the subcommand map. */ public Map<String, CommandLine> subcommands() { return Collections.unmodifiableMap(commands); } /** Adds the specified subcommand with the specified name. * @param name subcommand name - when this String is encountered in the command line arguments the subcommand is invoked * @param commandLine the subcommand to envoke when the name is encountered on the command line * @return this {@code CommandLine} object for method chaining */ public CommandSpec addSubcommand(String name, CommandLine commandLine) { commands.put(name, commandLine); commandLine.getCommandSpec().parent(this); return this; } /** Returns the parent command of this subcommand, or {@code null} if this is a top-level command. */ public CommandSpec parent() { return parent; } /** Sets the parent command of this subcommand. * @return this CommandSpec for method chaining */ public CommandSpec parent(CommandSpec parent) { this.parent = parent; return this; } /** Adds the specified option spec or positional parameter spec to the list of configured arguments to expect. * @param arg the option spec or positional parameter spec to add * @return this CommandSpec for method chaining */ public CommandSpec add(ArgSpec<?> arg) { return arg.isOption() ? add((OptionSpec) arg) : add((PositionalParamSpec) arg); } /** Adds the specified option spec to the list of configured arguments to expect. * @param option the option spec to add * @return this CommandSpec for method chaining * @throws DuplicateOptionAnnotationsException if any of the names of the specified option is the same as the name of another option */ public CommandSpec add(OptionSpec option) { option.validate(); options.add(option); for (String name : option.names()) { // cannot be null or empty OptionSpec existing = optionsByNameMap.put(name, option); if (existing != null && !existing.equals(option)) { throw DuplicateOptionAnnotationsException.create(name, option, existing); } if (name.length() == 2 && name.startsWith("-")) { posixOptionsByKeyMap.put(name.charAt(1), option); } } if (option.required()) { requiredArgs.add(option); } return this; } /** Adds the specified positional parameter spec to the list of configured arguments to expect. * @param positional the positional parameter spec to add * @return this CommandSpec for method chaining */ public CommandSpec add(PositionalParamSpec positional) { positional.validate(); positionalParameters.add(positional); if (positional.required()) { requiredArgs.add(positional); } return this; } /** Adds the specified mixin {@code CommandSpec} object to the map of mixins for this command. * @param name the name that can be used to later retrieve the mixin * @param mixin the mixin whose options and positional parameters and other attributes to add to this command * @return this CommandSpec for method chaining */ public CommandSpec addMixin(String name, CommandSpec mixin) { mixins.put(name, mixin); if (!isSeparatorInitialized()) { separator(mixin.separator()); } if (!isNameInitialized()) { name(mixin.name()); } if (!isSynopsisHeadingInitialized()) { synopsisHeading(mixin.synopsisHeading()); } if (!isCommandListHeadingInitialized()) { commandListHeading(mixin.commandListHeading()); } if (!isRequiredOptionMarkerInitialized()) { requiredOptionMarker(mixin.requiredOptionMarker()); } if (!isVersionProviderInitialized()) { versionProvider(mixin.versionProvider()); } if (!isVersionInitialized()) { version(mixin.version()); } // only if no dynamic version if (!isCustomSynopsisInitialized()) { customSynopsis(mixin.customSynopsis()); } if (!isDescriptionInitialized()) { description(mixin.description()); } if (!isDescriptionHeadingInitialized()) { descriptionHeading(mixin.descriptionHeading()); } if (!isHeaderInitialized()) { header(mixin.header()); } if (!isHeaderHeadingInitialized()) { headerHeading(mixin.headerHeading()); } if (!isFooterInitialized()) { footer(mixin.footer()); } if (!isFooterHeadingInitialized()) { footerHeading(mixin.footerHeading()); } if (!isParameterListHeadingInitialized()) { parameterListHeading(mixin.parameterListHeading()); } if (!isOptionListHeadingInitialized()) { optionListHeading(mixin.optionListHeading()); } if (!isAbbreviateSynopsisInitialized() && mixin.abbreviateSynopsis()) { abbreviateSynopsis(mixin.abbreviateSynopsis()); } if (!isSortOptionsInitialized() && !mixin.sortOptions()) { sortOptions(mixin.sortOptions()); } if (!isShowDefaultValuesInitialized() && mixin.showDefaultValues()) { showDefaultValues(mixin.showDefaultValues()); } for (Map.Entry<String, CommandLine> entry : mixin.subcommands().entrySet()) { addSubcommand(entry.getKey(), entry.getValue()); } for (OptionSpec optionSpec : mixin.options()) { add(optionSpec); } for (PositionalParamSpec paramSpec : mixin.positionalParameters()) { add(paramSpec); } return this; } /** Returns a map of the mixin names to mixin {@code CommandSpec} objects configured for this command. * @return an immutable map of mixins added to this command. */ public Map<String, CommandSpec> mixins() { return Collections.unmodifiableMap(mixins); } /** Returns the list of options configured for this command. * @return an immutable list of options that this command recognizes. */ public List<OptionSpec> options() { return Collections.unmodifiableList(options); } /** Returns the list of positional parameters configured for this command. * @return an immutable list of positional parameters that this command recognizes. */ public List<PositionalParamSpec> positionalParameters() { return Collections.unmodifiableList(positionalParameters); } /** Returns a map of the option names to option spec objects configured for this command. * @return an immutable map of options that this command recognizes. */ public Map<String, OptionSpec> optionsMap() { return Collections.unmodifiableMap(optionsByNameMap); } /** Returns a map of the short (single character) option names to option spec objects configured for this command. * @return an immutable map of options that this command recognizes. */ public Map<Character, OptionSpec> posixOptionsMap() { return Collections.unmodifiableMap(posixOptionsByKeyMap); } /** Returns the list of required options and positional parameters configured for this command. * @return an immutable list of the required options and positional parameters for this command. */ public List<ArgSpec<?>> requiredArgs() { return Collections.unmodifiableList(requiredArgs); } /** Returns the String to use as the program name in the synopsis line of the help message. * {@link #DEFAULT_COMMAND_NAME} by default, initialized from {@link Command#name()} if defined. */ public String name() { return name; } /** Sets the String to use as the program name in the synopsis line of the help message. * @return this CommandSpec for method chaining */ public CommandSpec name(String name) { this.name = name; return this; } /** Returns the String to use as the separator between options and option parameters. {@code "="} by default, * initialized from {@link Command#separator()} if defined.*/ public String separator() { return separator; } /** Sets the String to use as the separator between options and option parameters. * @return this CommandSpec for method chaining */ public CommandSpec separator(String separator) { this.separator = separator; return this; } /** Returns version information for this command, to print to the console when the user specifies an * {@linkplain OptionSpec#versionHelp() option} to request version help. This is not part of the usage help message. * @return the version strings generated by the {@link #versionProvider() version provider} if one is set, otherwise the {@linkplain #version(String...) version literals}*/ public String[] version() { if (versionProvider != null) { try { return versionProvider.getVersion(); } catch (Exception ex) { String msg = "Could not get version info from " + versionProvider + ": " + ex; throw new ExecutionException(this.commandLine, msg, ex); } } return version; } /** Sets version information literals for this command, to print to the console when the user specifies an * {@linkplain OptionSpec#versionHelp() option} to request version help. Only used if no {@link #versionProvider() versionProvider} is set. * @return this CommandSpec for method chaining */ public CommandSpec version(String... version) { this.version = version; return this; } /** Returns the version provider for this command, to generate the {@link #version()} strings. * @return the version provider or {@code null} if the version strings should be returned from the {@linkplain #version(String...) version literals}.*/ public IVersionProvider versionProvider() { return versionProvider; } /** Sets version provider for this command, to generate the {@link #version()} strings. * @param versionProvider the version provider to use to generate the version strings, or {@code null} if the {@linkplain #version(String...) version literals} should be used. * @return this CommandSpec for method chaining */ public CommandSpec versionProvider(IVersionProvider versionProvider) { this.versionProvider = versionProvider; return this; } /** Returns the optional heading preceding the header section. Initialized from {@link Command#headerHeading()}, or null. */ public String headerHeading() { return headerHeading; } /** Sets the heading preceding the header section. Initialized from {@link Command#headerHeading()}, or null. * @return this CommandSpec for method chaining */ public CommandSpec headerHeading(String headerHeading) { this.headerHeading = headerHeading; return this; } /** Returns the optional header lines displayed at the top of the help message. For subcommands, the first header line is * displayed in the list of commands. Values are initialized from {@link Command#header()} * if the {@code Command} annotation is present, otherwise this is an empty array and the help message has no * header. Applications may programmatically set this field to create a custom help message. */ public String[] header() { return header; } /** Sets the optional header lines displayed at the top of the help message. For subcommands, the first header line is * displayed in the list of commands. * @return this CommandSpec for method chaining */ public CommandSpec header(String... header) { this.header = header; return this; } /** Returns the optional heading preceding the synopsis. Initialized from {@link Command#synopsisHeading()}, {@code "Usage: "} by default. */ public String synopsisHeading() { return synopsisHeading; } /** Sets the optional heading preceding the synopsis. * @return this CommandSpec for method chaining */ public CommandSpec synopsisHeading(String newValue) {synopsisHeading = newValue; return this;} /** Returns whether the synopsis line(s) should show an abbreviated synopsis without detailed option names. */ public boolean abbreviateSynopsis() { return abbreviateSynopsis; } /** Sets whether the synopsis line(s) should show an abbreviated synopsis without detailed option names. * @return this CommandSpec for method chaining */ public CommandSpec abbreviateSynopsis(boolean newValue) {abbreviateSynopsis = newValue; return this;} /** Returns the optional custom synopsis lines to use instead of the auto-generated synopsis. * Initialized from {@link Command#customSynopsis()} if the {@code Command} annotation is present, * otherwise this is an empty array and the synopsis is generated. * Applications may programmatically set this field to create a custom help message. */ public String[] customSynopsis() { return customSynopsis; } /** Sets the optional custom synopsis lines to use instead of the auto-generated synopsis. * @return this CommandSpec for method chaining */ public CommandSpec customSynopsis(String... customSynopsis) { this.customSynopsis = customSynopsis; return this; } /** Returns the optional heading preceding the description section. Initialized from {@link Command#descriptionHeading()}, or null. */ public String descriptionHeading() { return descriptionHeading; } /** Sets the heading preceding the description section. * @return this CommandSpec for method chaining */ public CommandSpec descriptionHeading(String newValue) {descriptionHeading = newValue; return this;} /** Returns the optional text lines to use as the description of the help message, displayed between the synopsis and the * options list. Initialized from {@link Command#description()} if the {@code Command} annotation is present, * otherwise this is an empty array and the help message has no description. * Applications may programmatically set this field to create a custom help message. */ public String[] description() { return description; } /** Sets the optional text lines to use as the description of the help message, displayed between the synopsis and the * options list. * @return this CommandSpec for method chaining */ public CommandSpec description(String... description) { this.description = description; return this; } /** Returns the optional heading preceding the parameter list. Initialized from {@link Command#parameterListHeading()}, or null. */ public String parameterListHeading() { return parameterListHeading; } /** Sets the optional heading preceding the parameter list. * @return this CommandSpec for method chaining */ public CommandSpec parameterListHeading(String newValue) {parameterListHeading = newValue; return this;} /** Returns the optional heading preceding the options list. Initialized from {@link Command#optionListHeading()}, or null. */ public String optionListHeading() { return optionListHeading; } /** Sets the heading preceding the options list. * @return this CommandSpec for method chaining */ public CommandSpec optionListHeading(String newValue) {optionListHeading = newValue; return this;} /** Returns whether the options list in the usage help message should be sorted alphabetically. */ public boolean sortOptions() { return sortOptions; } /** Sets whether the options list in the usage help message should be sorted alphabetically. * @return this CommandSpec for method chaining */ public CommandSpec sortOptions(boolean newValue) {sortOptions = newValue; return this;} /** Returns the character used to prefix required options in the options list. */ public char requiredOptionMarker() { return requiredOptionMarker; } /** Sets the character used to prefix required options in the options list. * @return this CommandSpec for method chaining */ public CommandSpec requiredOptionMarker(char newValue) {requiredOptionMarker = newValue; return this;} /** Returns whether the options list in the usage help message should show default values for all non-boolean options. */ public boolean showDefaultValues() { return showDefaultValues; } /** Sets whether the options list in the usage help message should show default values for all non-boolean options. * @return this CommandSpec for method chaining */ public CommandSpec showDefaultValues(boolean newValue) {showDefaultValues = newValue; return this;} /** Returns the optional heading preceding the subcommand list. Initialized from {@link Command#commandListHeading()}. {@code "Commands:%n"} by default. */ public String commandListHeading() { return commandListHeading; } /** Sets the optional heading preceding the subcommand list. * @return this CommandSpec for method chaining */ public CommandSpec commandListHeading(String newValue) {commandListHeading = newValue; return this;} /** Returns the optional heading preceding the footer section. Initialized from {@link Command#footerHeading()}, or null. */ public String footerHeading() { return footerHeading; } /** Sets the optional heading preceding the footer section. * @return this CommandSpec for method chaining */ public CommandSpec footerHeading(String newValue) {footerHeading = newValue; return this;} /** Returns the optional footer text lines displayed at the bottom of the help message. Initialized from * {@link Command#footer()} if the {@code Command} annotation is present, otherwise this is an empty array and * the help message has no footer. * Applications may programmatically set this field to create a custom help message. */ public String[] footer() { return footer; } /** Sets the optional footer text lines displayed at the bottom of the help message. * @return this CommandSpec for method chaining */ public CommandSpec footer(String... footer) { this.footer = footer; return this; } /** Returns a string representation of this command, used in error messages and trace messages. */ public String toString() { return toString; } /** Sets the string representation of this command, used in error messages and trace messages. * @param newValue the string representation * @return this CommandSpec for method chaining */ public CommandSpec withToString(String newValue) { this.toString = newValue; return this; } boolean isSeparatorInitialized() { return !empty(separator) && !CommandSpec.DEFAULT_SEPARATOR.equals(separator); } boolean isNameInitialized() { return !empty(name) && !CommandSpec.DEFAULT_COMMAND_NAME.equals(name); } boolean isSynopsisHeadingInitialized() { return !empty(synopsisHeading) && !CommandSpec.DEFAULT_SYNOPSIS_HEADING.equals(synopsisHeading); } boolean isCommandListHeadingInitialized() { return !empty(commandListHeading) && !CommandSpec.DEFAULT_COMMAND_LIST_HEADING.equals(commandListHeading); } boolean isRequiredOptionMarkerInitialized() { return requiredOptionMarker != null && CommandSpec.DEFAULT_REQUIRED_OPTION_MARKER != requiredOptionMarker; } boolean isAbbreviateSynopsisInitialized() { return abbreviateSynopsis != null && !CommandSpec.DEFAULT_ABBREVIATE_SYNOPSIS.equals(abbreviateSynopsis); } boolean isSortOptionsInitialized() { return sortOptions != null && !CommandSpec.DEFAULT_SORT_OPTIONS.equals(sortOptions); } boolean isShowDefaultValuesInitialized() { return showDefaultValues != null && !CommandSpec.DEFAULT_SHOW_DEFAULT_VALUES.equals(showDefaultValues); } boolean isVersionProviderInitialized() { return versionProvider != null && !(versionProvider instanceof NoVersionProvider);} boolean isVersionInitialized() { return !empty(version); } boolean isCustomSynopsisInitialized() { return !empty(customSynopsis); } boolean isDescriptionInitialized() { return !empty(description); } boolean isDescriptionHeadingInitialized() { return !empty(descriptionHeading); } boolean isHeaderInitialized() { return !empty(header); } boolean isHeaderHeadingInitialized() { return !empty(headerHeading); } boolean isFooterInitialized() { return !empty(footer); } boolean isFooterHeadingInitialized() { return !empty(footerHeading); } boolean isParameterListHeadingInitialized() { return !empty(parameterListHeading); } boolean isOptionListHeadingInitialized() { return !empty(optionListHeading); } } /** Models the shared attributes of {@link OptionSpec} and {@link PositionalParamSpec}. * @since 3.0 */ public abstract static class ArgSpec<T extends ArgSpec<T>> { private Range arity; private String[] description; private boolean required; private String paramLabel; private String splitRegex; private boolean hidden; private Class<?> type; private Class<?>[] auxiliaryTypes; private ITypeConverter<?>[] converters; private Object defaultValue; private Help.Visibility showDefaultValue; private String toString; private IGetter getter; private ISetter setter; /** Constructs a new {@code ArgSpec}. */ public ArgSpec() { getter = new ObjectGetterSetter(); setter = (ISetter) getter; } @SuppressWarnings("unchecked") protected T self() { return (T) this; } /** Ensures all attributes of this {@code ArgSpec} have a valid value; throws an {@link InitializationException} if this cannot be achieved. */ T validate() { if (description == null) { description = new String[0]; } if (splitRegex == null) { splitRegex = ""; } if (empty(paramLabel)) { paramLabel = "PARAM"; } if (arity() == null) { if (isOption()) { if (type == null || isBoolean(type)) { arity("0"); } else { arity("1"); } } else { arity("1"); } } if (type == null) { if (auxiliaryTypes == null || auxiliaryTypes.length == 0) { if (arity().isVariable || arity.max > 1) { type = isOption() ? boolean[].class : String[].class; } else { type = isOption() ? boolean.class : String.class; } } else { type = auxiliaryTypes[0]; } } if (auxiliaryTypes == null || auxiliaryTypes.length == 0) { if (type.isArray()) { auxiliaryTypes = new Class<?>[]{type.getComponentType()}; } else if (Collection.class.isAssignableFrom(type)) { // type is a collection but element type is unspecified auxiliaryTypes = new Class<?>[] {String.class}; // use String elements } else if (Map.class.isAssignableFrom(type)) { // type is a map but element type is unspecified auxiliaryTypes = new Class<?>[] {String.class, String.class}; // use String keys and String values } else { auxiliaryTypes = new Class<?>[] {type}; } } if (converters == null) { converters = new ITypeConverter<?>[0]; } if (showDefaultValue == null) { showDefaultValue = Help.Visibility.ON_DEMAND; } return self(); } /** Customizable getter for obtaining the current value of an option or positional parameter from the model. * @since 3.0 */ public static interface IGetter { <K> K get() throws Exception; } /** Customizable setter for modifying the value of an option or positional parameter in the model. * @since 3.0 */ public static interface ISetter { <K> K set(K value) throws Exception; } private static class ObjectGetterSetter implements IGetter, ISetter { private Object value; @SuppressWarnings("unchecked") public <K> K get() throws Exception { return (K) value; } public <K> K set(K value) throws Exception { @SuppressWarnings("unchecked") K result = value; this.value = value; return result; } } /** Returns whether this is a required option or positional parameter. * @see Option#required() */ public boolean required() { return required; } /** Returns the description of this option, used when generating the usage documentation. * @see Option#description() */ public String[] description() { return description; } /** Returns how many arguments this option or positional parameter requires. * @see Option#arity() */ public Range arity() { return arity; } /** Returns the name of the option or positional parameter used in the usage help message. * @see Option#paramLabel() {@link Parameters#paramLabel()} */ public String paramLabel() { return paramLabel; } /** Returns auxiliary type information used when the {@link #type()} is a generic {@code Collection}, {@code Map} or an abstract class. * @see Option#type() */ public Class<?>[] auxiliaryTypes() { return auxiliaryTypes; } /** Returns one or more {@link CommandLine.ITypeConverter type converters} to use to convert the command line * argument into a strongly typed value (or key-value pair for map fields). This is useful when a particular * option or positional parameter should use a custom conversion that is different from the normal conversion for the arg spec's type. * @see Option#converter() */ public ITypeConverter<?>[] converters() { return converters; } /** Returns a regular expression to split option parameter values or {@code ""} if the value should not be split. * @see Option#split() */ public String splitRegex() { return splitRegex; } /** Returns whether this option should be excluded from the usage message. * @see Option#hidden() */ public boolean hidden() { return hidden; } /** Returns the type to convert the option or positional parameter to before {@linkplain #setValue(Object) setting} the value. */ public Class<?> type() { return type; } /** Returns the default value of this option or positional parameter. */ public Object defaultValue() { return defaultValue; } /** Returns whether this option or positional parameter's default value should be shown in the usage help. */ public Help.Visibility showDefaultValue() { return showDefaultValue; } /** Sets whether this option or positional parameter's default value should be shown in the usage help. */ public T showDefaultValue(Help.Visibility visibility) { showDefaultValue = Assert.notNull(visibility, "visibility"); return self(); } /** Returns the {@link IGetter} that is responsible for getting the value of this argument. */ public IGetter getter() { return getter; } /** Returns the {@link ISetter} that is responsible for setting the value of this argument. */ public ISetter setter() { return setter; } /** Returns the current value of this argument. */ Object getValue() throws Exception { return getter.get(); } /** Sets the value of this argument to the specified value and returns the previous value. */ Object setValue(Object newValue) throws Exception { return setter.set(newValue); } /** Returns {@code true} if this argument's {@link #type()} is an array, a {@code Collection} or a {@code Map}, {@code false} otherwise. */ boolean isMultiValue() { return CommandLine.isMultiValue(type()); } /** Returns {@code true} if this argument is a named option, {@code false} otherwise. */ public abstract boolean isOption(); /** Returns {@code true} if this argument is a positional parameter, {@code false} otherwise. */ public abstract boolean isPositional(); /** Sets whether this is a required option or positional parameter. */ public T required(boolean required) { this.required = required; return self(); } /** Sets the description of this option, used when generating the usage documentation. */ public T description(String... description) { this.description = Assert.notNull(description, "description"); return self(); } /** Sets how many arguments this option or positional parameter requires. */ public T arity(String range) { return arity(Range.valueOf(range)); } /** Sets how many arguments this option or positional parameter requires. */ public T arity(Range arity) { this.arity = Assert.notNull(arity, "arity"); return self(); } /** Sets the name of the option or positional parameter used in the usage help message. */ public T paramLabel(String paramLabel) { this.paramLabel = Assert.notNull(paramLabel, "paramLabel"); return self(); } /** Sets auxiliary type information used when the {@link #type()} is a generic {@code Collection}, {@code Map} or an abstract class. */ public T auxiliaryTypes(Class<?>... types) { this.auxiliaryTypes = types; return self(); } /** Sets option/positional param-specific converter (or converters for Maps) . */ public T converters(ITypeConverter<?>... cs) { this.converters = cs; return self(); } /** Sets a regular expression to split option parameter values or {@code ""} if the value should not be split. */ public T splitRegex(String splitRegex) { this.splitRegex = splitRegex; return self(); } /** Sets whether this option should be excluded from the usage message. */ public T hidden(boolean hidden) { this.hidden = hidden; return self(); } /** Sets the type to convert the option or positional parameter to before {@linkplain #setValue(Object) setting} the value. */ public T type(Class<?> propertyType) { this.type = propertyType; return self(); } /** Sets the default value of this option or positional parameter to the specified value. */ public T defaultValue(Object defaultValue) { this.defaultValue = defaultValue; return self(); } /** Sets the {@link IGetter} that is responsible for getting the value of this argument to the specified value. */ public T getter(IGetter getter) { this.getter = getter; return self(); } /** Sets the {@link ISetter} that is responsible for setting the value of this argument to the specified value. */ public T setter(ISetter setter) { this.setter = setter; return self(); } /** Sets the string respresentation of this option or positional parameter to the specified value. */ public T withToString(String toString) { this.toString = toString; return self(); } /** Returns a string respresentation of this option or positional parameter. */ public String toString() { return toString; } private String[] splitValue(String value) { return splitRegex().length() == 0 ? new String[] {value} : value.split(splitRegex()); } public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof ArgSpec)) { return false; } ArgSpec<?> other = (ArgSpec<?>) obj; boolean result = Assert.equals(this.defaultValue, other.defaultValue) && Assert.equals(this.type, other.type) && Assert.equals(this.arity, other.arity) && Assert.equals(this.hidden, other.hidden) && Assert.equals(this.paramLabel, other.paramLabel) && Assert.equals(this.required, other.required) && Assert.equals(this.splitRegex, other.splitRegex) && Arrays.equals(this.description, other.description) && Arrays.equals(this.auxiliaryTypes, other.auxiliaryTypes) ; return result; } public int hashCode() { return 17 + 37 * Assert.hashCode(defaultValue) + 37 * Assert.hashCode(type) + 37 * Assert.hashCode(arity) + 37 * Assert.hashCode(hidden) + 37 * Assert.hashCode(paramLabel) + 37 * Assert.hashCode(required) + 37 * Assert.hashCode(splitRegex) + 37 * Arrays.hashCode(description) + 37 * Arrays.hashCode(auxiliaryTypes) ; } } /** The {@code OptionSpec} class models aspects of a <em>named option</em> of a {@linkplain CommandSpec command}, including whether * it is required or optional, the option parameters supported (or required) by the option, * and attributes for the usage help message describing the option. * <p> * An option has one ore more names. The option is matched when the parser encounters one of the option names in the command line arguments. * Depending on the option's {@link #arity() arity}, * the parser may expect it to have option parameters. The parser will call {@link #setValue(Object) setValue} on * the matched option for each of the option parameters encountered. * For multi-value options, the {@code type} may be an array, a {@code Collection} or a {@code Map}. In this case * the parser will get the data structure by calling {@link #getValue() getValue} and modify the contents of this data structure. * (In the case of arrays, the array is replaced with a new instance with additional elements.) * </p><p> * Before calling the setter, picocli converts the option parameter value from a String to the option parameter's type. * </p> * <ul> * <li>If a option-specific {@link #converters() converter} is configured, this will be used for type conversion. * If the option's type is a {@code Map}, the map may have different types for its keys and its values, so * {@link #converters() converters} should provide two converters: one for the map keys and one for the map values.</li> * <li>Otherwise, the option's {@link #type() type} is used to look up a converter in the list of * {@linkplain CommandLine#registerConverter(Class, ITypeConverter) registered converters}. For multi-value options, * the {@code type} may be an array, or a {@code Collection} or a {@code Map}. In that case the elements are converted * based on the option's {@link #auxiliaryTypes() auxiliaryTypes}. The auxiliaryType is used to look up * the converter(s) to use to convert the individual parameter values. * Maps may have different types for its keys and its values, so {@link #auxiliaryTypes() auxiliaryTypes} * should provide two types: one for the map keys and one for the map values.</li> * </ul> * <p> * {@code OptionSpec} objects are used by the picocli command line interpreter and help message generator. * Picocli can construct a {@code OptionSpec} automatically from fields and methods with {@link Option @Option} * annotations. Alternatively an {@code OptionSpec} can be constructed programmatically. * When an {@code OptionSpec} is created from an {@link Option @Option} -annotated field or method, this field is * set (or the method is invoked) when the option is matched and {@link #setValue(Object) setValue} is called. * Programmatically constructed {@code OptionSpec} instances will remember the value passed to the * {@link #setValue(Object) setValue} method so it can be retrieved with the {@link #getValue() getValue} method. * This behaviour can be customized by installing a custom {@link IGetter} and {@link ISetter} on the {@code OptionSpec}. * </p> * @since 3.0 */ public static class OptionSpec extends ArgSpec<OptionSpec> { private String[] names; private boolean help; private boolean usageHelp; private boolean versionHelp; public OptionSpec(String name, String... names) { this.names = new String[Assert.notNull(names, "names").length + 1]; this.names[0] = Assert.notNull(name, "name"); System.arraycopy(names, 0, this.names, 1, names.length); } public OptionSpec(String[] names) { this.names = copy(Assert.notNull(names, "names"), String.class); } protected OptionSpec self() { return this; } /** Ensures all attributes of this {@code OptionSpec} have a valid value; throws an {@link InitializationException} if this cannot be achieved. */ OptionSpec validate() { super.validate(); if (names == null || names.length == 0 || Arrays.asList(names).contains("")) { throw new InitializationException("Invalid names: " + Arrays.toString(names)); } if (toString() == null) { withToString("option " + names[0]); } return this; } public boolean isOption() { return true; } public boolean isPositional() { return false; } /** Returns one or more option names. At least one option name is required. * @see Option#names() */ public String[] names() { return names; } private boolean showDefaultValue(CommandSpec commandSpec) { if (showDefaultValue() == Help.Visibility.ALWAYS) { return true; } if (showDefaultValue() == Help.Visibility.NEVER) { return false; } boolean isBoolean = !isMultiValue() && isBoolean(auxiliaryTypes()[0]); return commandSpec != null && commandSpec.showDefaultValues() && defaultValue() != null && !help() && !versionHelp() && !usageHelp() && !isBoolean; } /** Returns whether this option disables validation of the other arguments. * @see Option#help() * @deprecated Use {@link #usageHelp()} and {@link #versionHelp()} instead. */ @Deprecated public boolean help() { return help; } /** Returns whether this option allows the user to request usage help. * @see Option#usageHelp() */ public boolean usageHelp() { return usageHelp; } /** Returns whether this option allows the user to request version information. * @see Option#versionHelp() */ public boolean versionHelp() { return versionHelp; } /** Replaces the option names with the specified values. At least one option name is required. * @return this OptionSpec instance to provide a fluent interface */ public OptionSpec names(String... names) { this.names = names; return self(); } /** Sets whether this option disables validation of the other arguments. */ public OptionSpec help(boolean help) { this.help = help; return self(); } /** Sets whether this option allows the user to request usage help. */ public OptionSpec usageHelp(boolean usageHelp) { this.usageHelp = usageHelp; return self(); } /** Sets whether this option allows the user to request version information.*/ public OptionSpec versionHelp(boolean versionHelp) { this.versionHelp = versionHelp; return self(); } public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof OptionSpec)) { return false; } OptionSpec other = (OptionSpec) obj; boolean result = super.equals(obj) && help == other.help && usageHelp == other.usageHelp && versionHelp == other.versionHelp && new HashSet<String>(Arrays.asList(names)).equals(new HashSet<String>(Arrays.asList(other.names))); return result; } public int hashCode() { return super.hashCode() + 37 * Assert.hashCode(help) + 37 * Assert.hashCode(usageHelp) + 37 * Assert.hashCode(versionHelp) + 37 * Arrays.hashCode(names); } } /** The {@code PositionalParamSpec} class models aspects of a <em>positional parameter</em> of a {@linkplain CommandSpec command}, including whether * it is required or optional, and attributes for the usage help message describing the positional parameter. * <p> * Positional parameters have an {@link #index() index} (or a range of indices). A positional parameter is matched when the parser * encounters a command line argument at that index. Named options and their parameters do not change the index counter, * so the command line can contain a mixture of positional parameters and named options. * </p><p> * Depending on the positional parameter's {@link #arity() arity}, the parser may consume multiple command line * arguments starting from the current index. The parser will call {@link #setValue(Object) setValue} on * the {@code PositionalParamSpec} for each of the parameters encountered. * For multi-value positional parameters, the {@code type} may be an array, a {@code Collection} or a {@code Map}. In this case * the parser will get the data structure by calling {@link #getValue() getValue} and modify the contents of this data structure. * (In the case of arrays, the array is replaced with a new instance with additional elements.) * </p><p> * Before calling the setter, picocli converts the positional parameter value from a String to the parameter's type. * </p> * <ul> * <li>If a positional parameter-specific {@link #converters() converter} is configured, this will be used for type conversion. * If the positional parameter's type is a {@code Map}, the map may have different types for its keys and its values, so * {@link #converters() converters} should provide two converters: one for the map keys and one for the map values.</li> * <li>Otherwise, the positional parameter's {@link #type() type} is used to look up a converter in the list of * {@linkplain CommandLine#registerConverter(Class, ITypeConverter) registered converters}. For multi-value positional parameters, * the {@code type} may be an array, or a {@code Collection} or a {@code Map}. In that case the elements are converted * based on the positional parameter's {@link #auxiliaryTypes() auxiliaryTypes}. The auxiliaryType is used to look up * the converter(s) to use to convert the individual parameter values. * Maps may have different types for its keys and its values, so {@link #auxiliaryTypes() auxiliaryTypes} * should provide two types: one for the map keys and one for the map values.</li> * </ul> * <p> * {@code PositionalParamSpec} objects are used by the picocli command line interpreter and help message generator. * Picocli can construct a {@code PositionalParamSpec} automatically from fields and methods with {@link Parameters @Parameters} * annotations. Alternatively an {@code PositionalParamSpec} can be constructed programmatically. * When an {@code PositionalParamSpec} is created from an {@link Parameters @Parameters} -annotated field or method, this field is * set (or the method is invoked) when the position is matched and {@link #setValue(Object) setValue} is called. * Programmatically constructed {@code PositionalParamSpec} instances will remember the value passed to the * {@link #setValue(Object) setValue} method so it can be retrieved with the {@link #getValue() getValue} method. * This behaviour can be customized by installing a custom {@link IGetter} and {@link ISetter} on the {@code PositionalParamSpec}. * </p> * @since 3.0 */ public static class PositionalParamSpec extends ArgSpec<PositionalParamSpec> { private Range index; private Range capacity; /** Ensures all attributes of this {@code PositionalParamSpec} have a valid value; throws an {@link InitializationException} if this cannot be achieved. */ PositionalParamSpec validate() { super.validate(); if (index() == null) { index("*");} if (capacity() == null) { capacity = Range.parameterCapacity(arity(), index()); } if (toString() == null) { withToString("positional parameter[" + index() + "]"); } return this; } public boolean isOption() { return false; } public boolean isPositional() { return true; } /** Returns an index or range specifying which of the command line arguments should be assigned to this positional parameter. * @see Parameters#index() */ public Range index() { return index; } private Range capacity() { return capacity; } /** Sets the index or range specifying which of the command line arguments should be assigned to this positional parameter. */ public PositionalParamSpec index(String range) { return index(Range.valueOf(range)); } /** Sets the index or range specifying which of the command line arguments should be assigned to this positional parameter. */ public PositionalParamSpec index(Range index) { this.index = index; return self(); } private boolean showDefaultValue(CommandSpec commandSpec) { if (showDefaultValue() == Help.Visibility.ALWAYS) { return true; } if (showDefaultValue() == Help.Visibility.NEVER) { return false; } boolean isBoolean = !isMultiValue() && isBoolean(auxiliaryTypes()[0]); return commandSpec != null && commandSpec.showDefaultValues() && defaultValue() != null && !isBoolean; } public int hashCode() { return super.hashCode() + 37 * Assert.hashCode(capacity) + 37 * Assert.hashCode(index); } public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof PositionalParamSpec)) { return false; } PositionalParamSpec other = (PositionalParamSpec) obj; return Assert.equals(this.capacity, other.capacity) && Assert.equals(this.index, other.index); } } /** * Helper class responsible for processing command line arguments. */ private class Interpreter { private final Map<Class<?>, ITypeConverter<?>> converterRegistry = new HashMap<Class<?>, ITypeConverter<?>>(); private boolean isHelpRequested; private int position; Interpreter() { registerBuiltInConverters(); } private void registerBuiltInConverters() { converterRegistry.put(Object.class, new BuiltIn.StringConverter()); converterRegistry.put(String.class, new BuiltIn.StringConverter()); converterRegistry.put(StringBuilder.class, new BuiltIn.StringBuilderConverter()); converterRegistry.put(CharSequence.class, new BuiltIn.CharSequenceConverter()); converterRegistry.put(Byte.class, new BuiltIn.ByteConverter()); converterRegistry.put(Byte.TYPE, new BuiltIn.ByteConverter()); converterRegistry.put(Boolean.class, new BuiltIn.BooleanConverter()); converterRegistry.put(Boolean.TYPE, new BuiltIn.BooleanConverter()); converterRegistry.put(Character.class, new BuiltIn.CharacterConverter()); converterRegistry.put(Character.TYPE, new BuiltIn.CharacterConverter()); converterRegistry.put(Short.class, new BuiltIn.ShortConverter()); converterRegistry.put(Short.TYPE, new BuiltIn.ShortConverter()); converterRegistry.put(Integer.class, new BuiltIn.IntegerConverter()); converterRegistry.put(Integer.TYPE, new BuiltIn.IntegerConverter()); converterRegistry.put(Long.class, new BuiltIn.LongConverter()); converterRegistry.put(Long.TYPE, new BuiltIn.LongConverter()); converterRegistry.put(Float.class, new BuiltIn.FloatConverter()); converterRegistry.put(Float.TYPE, new BuiltIn.FloatConverter()); converterRegistry.put(Double.class, new BuiltIn.DoubleConverter()); converterRegistry.put(Double.TYPE, new BuiltIn.DoubleConverter()); converterRegistry.put(File.class, new BuiltIn.FileConverter()); converterRegistry.put(URI.class, new BuiltIn.URIConverter()); converterRegistry.put(URL.class, new BuiltIn.URLConverter()); converterRegistry.put(Date.class, new BuiltIn.ISO8601DateConverter()); converterRegistry.put(Time.class, new BuiltIn.ISO8601TimeConverter()); converterRegistry.put(BigDecimal.class, new BuiltIn.BigDecimalConverter()); converterRegistry.put(BigInteger.class, new BuiltIn.BigIntegerConverter()); converterRegistry.put(Charset.class, new BuiltIn.CharsetConverter()); converterRegistry.put(InetAddress.class, new BuiltIn.InetAddressConverter()); converterRegistry.put(Pattern.class, new BuiltIn.PatternConverter()); converterRegistry.put(UUID.class, new BuiltIn.UUIDConverter()); converterRegistry.put(Currency.class, new BuiltIn.CurrencyConverter()); converterRegistry.put(TimeZone.class, new BuiltIn.TimeZoneConverter()); converterRegistry.put(ByteOrder.class, new BuiltIn.ByteOrderConverter()); converterRegistry.put(Class.class, new BuiltIn.ClassConverter()); converterRegistry.put(Connection.class, new BuiltIn.ConnectionConverter()); converterRegistry.put(Driver.class, new BuiltIn.DriverConverter()); converterRegistry.put(Timestamp.class, new BuiltIn.TimestampConverter()); converterRegistry.put(NetworkInterface.class, new BuiltIn.NetworkInterfaceConverter()); BuiltIn.registerIfAvailable(converterRegistry, tracer, "java.time.Duration", "parse", CharSequence.class); BuiltIn.registerIfAvailable(converterRegistry, tracer, "java.time.Instant", "parse", CharSequence.class); BuiltIn.registerIfAvailable(converterRegistry, tracer, "java.time.LocalDate", "parse", CharSequence.class); BuiltIn.registerIfAvailable(converterRegistry, tracer, "java.time.LocalDateTime", "parse", CharSequence.class); BuiltIn.registerIfAvailable(converterRegistry, tracer, "java.time.LocalTime", "parse", CharSequence.class); BuiltIn.registerIfAvailable(converterRegistry, tracer, "java.time.MonthDay", "parse", CharSequence.class); BuiltIn.registerIfAvailable(converterRegistry, tracer, "java.time.OffsetDateTime", "parse", CharSequence.class); BuiltIn.registerIfAvailable(converterRegistry, tracer, "java.time.OffsetTime", "parse", CharSequence.class); BuiltIn.registerIfAvailable(converterRegistry, tracer, "java.time.Period", "parse", CharSequence.class); BuiltIn.registerIfAvailable(converterRegistry, tracer, "java.time.Year", "parse", CharSequence.class); BuiltIn.registerIfAvailable(converterRegistry, tracer, "java.time.YearMonth", "parse", CharSequence.class); BuiltIn.registerIfAvailable(converterRegistry, tracer, "java.time.ZonedDateTime", "parse", CharSequence.class); BuiltIn.registerIfAvailable(converterRegistry, tracer, "java.time.ZoneId", "of", String.class); BuiltIn.registerIfAvailable(converterRegistry, tracer, "java.time.ZoneOffset", "of", String.class); BuiltIn.registerIfAvailable(converterRegistry, tracer, "java.nio.file.Path", "java.nio.file.Paths", "get", String.class, String[].class); } /** * Entry point into parsing command line arguments. * @param args the command line arguments * @return a list with all commands and subcommands initialized by this method * @throws ParameterException if the specified command line arguments are invalid */ List<CommandLine> parse(String... args) { Assert.notNull(args, "argument array"); if (tracer.isInfo()) {tracer.info("Parsing %d command line args %s%n", args.length, Arrays.toString(args));} List<String> expanded = new ArrayList<String>(); for (String arg : args) { addOrExpand(arg, expanded, new LinkedHashSet<String>()); } Stack<String> arguments = new Stack<String>(); arguments.addAll(reverseList(expanded)); List<CommandLine> result = new ArrayList<CommandLine>(); parse(result, arguments, args); return result; } private void addOrExpand(String arg, List<String> arguments, Set<String> visited) { if (expandAtFiles && !arg.equals("@") && arg.startsWith("@")) { arg = arg.substring(1); if (arg.startsWith("@")) { if (tracer.isInfo()) { tracer.info("Not expanding @-escaped argument %s (trimmed leading '@' char)%n", arg); } } else { if (tracer.isInfo()) { tracer.info("Expanding argument file @%s%n", arg); } expandArgumentFile(arg, arguments, visited); return; } } arguments.add(arg); } private void expandArgumentFile(String fileName, List<String> arguments, Set<String> visited) { File file = new File(fileName); if (!file.canRead()) { if (tracer.isInfo()) {tracer.info("File %s does not exist or cannot be read; treating argument literally%n", fileName);} arguments.add("@" + fileName); } else if (visited.contains(file.getAbsolutePath())) { if (tracer.isInfo()) {tracer.info("Already visited file %s; ignoring...%n", file.getAbsolutePath());} } else { expandValidArgumentFile(fileName, file, arguments, visited); } } private void expandValidArgumentFile(String fileName, File file, List<String> arguments, Set<String> visited) { visited.add(file.getAbsolutePath()); List<String> result = new ArrayList<String>(); LineNumberReader reader = null; try { reader = new LineNumberReader(new FileReader(file)); StreamTokenizer tok = new StreamTokenizer(reader); tok.resetSyntax(); tok.wordChars(' ', 255); tok.whitespaceChars(0, ' '); tok.commentChar('#'); tok.quoteChar('"'); tok.quoteChar('\''); while (tok.nextToken() != StreamTokenizer.TT_EOF) { addOrExpand(tok.sval, result, visited); } } catch (Exception ex) { throw new InitializationException("Could not read argument file @" + fileName, ex); } finally { if (reader != null) { try {reader.close();} catch (Exception ignored) {} } } if (tracer.isInfo()) {tracer.info("Expanded file @%s to arguments %s%n", fileName, result);} arguments.addAll(result); } private void clear() { position = 0; isHelpRequested = false; CommandLine.this.versionHelpRequested = false; CommandLine.this.usageHelpRequested = false; CommandLine.this.unmatchedArguments.clear(); } private void parse(List<CommandLine> parsedCommands, Stack<String> argumentStack, String[] originalArgs) { clear(); // first reset any state in case this CommandLine instance is being reused if (tracer.isDebug()) {tracer.debug("Initializing %s: %d options, %d positional parameters, %d required, %d subcommands.%n", commandSpec.toString(), new HashSet<ArgSpec<?>>(commandSpec.optionsMap().values()).size(), commandSpec.positionalParameters().size(), commandSpec.requiredArgs().size(), commandSpec .subcommands().size());} parsedCommands.add(CommandLine.this); List<ArgSpec<?>> required = new ArrayList<ArgSpec<?>>(commandSpec.requiredArgs()); Set<ArgSpec<?>> initialized = new HashSet<ArgSpec<?>>(); Collections.sort(required, new PositionalParametersSorter()); try { processArguments(parsedCommands, argumentStack, required, initialized, originalArgs); } catch (ParameterException ex) { throw ex; } catch (Exception ex) { int offendingArgIndex = originalArgs.length - argumentStack.size() - 1; String arg = offendingArgIndex >= 0 && offendingArgIndex < originalArgs.length ? originalArgs[offendingArgIndex] : "?"; throw ParameterException.create(CommandLine.this, ex, arg, offendingArgIndex, originalArgs); } if (!isAnyHelpRequested() && !required.isEmpty()) { for (ArgSpec<?> missing : required) { if (missing.isOption()) { throw MissingParameterException.create(CommandLine.this, required, commandSpec.separator()); } else { assertNoMissingParameters(missing, missing.arity().min, argumentStack); } } } if (!unmatchedArguments.isEmpty()) { if (!isUnmatchedArgumentsAllowed()) { throw new UnmatchedArgumentException(CommandLine.this, unmatchedArguments); } if (tracer.isWarn()) { tracer.warn("Unmatched arguments: %s%n", unmatchedArguments); } } } private void processArguments(List<CommandLine> parsedCommands, Stack<String> args, Collection<ArgSpec<?>> required, Set<ArgSpec<?>> initialized, String[] originalArgs) throws Exception { // arg must be one of: // 1. the "--" double dash separating options from positional arguments // 1. a stand-alone flag, like "-v" or "--verbose": no value required, must map to boolean or Boolean field // 2. a short option followed by an argument, like "-f file" or "-ffile": may map to any type of field // 3. a long option followed by an argument, like "-file out.txt" or "-file=out.txt" // 3. one or more remaining arguments without any associated options. Must be the last in the list. // 4. a combination of stand-alone options, like "-vxr". Equivalent to "-v -x -r", "-v true -x true -r true" // 5. a combination of stand-alone options and one option with an argument, like "-vxrffile" String separator = commandSpec.separator(); while (!args.isEmpty()) { String arg = args.pop(); if (tracer.isDebug()) {tracer.debug("Processing argument '%s'. Remainder=%s%n", arg, reverse(copy(args)));} // Double-dash separates options from positional arguments. // If found, then interpret the remaining args as positional parameters. if ("--".equals(arg)) { tracer.info("Found end-of-options delimiter '--'. Treating remainder as positional parameters.%n"); processRemainderAsPositionalParameters(required, initialized, args); return; // we are done } // if we find another command, we are done with the current command if (commandSpec.subcommands().containsKey(arg)) { if (!isAnyHelpRequested() && !required.isEmpty()) { // ensure current command portion is valid throw MissingParameterException.create(CommandLine.this, required, separator); } if (tracer.isDebug()) {tracer.debug("Found subcommand '%s' (%s)%n", arg, commandSpec.subcommands().get(arg).commandSpec.toString());} commandSpec.subcommands().get(arg).interpreter.parse(parsedCommands, args, originalArgs); return; // remainder done by the command } // First try to interpret the argument as a single option (as opposed to a compact group of options). // A single option may be without option parameters, like "-v" or "--verbose" (a boolean value), // or an option may have one or more option parameters. // A parameter may be attached to the option. boolean paramAttachedToOption = false; int separatorIndex = arg.indexOf(separator); if (separatorIndex > 0) { String key = arg.substring(0, separatorIndex); // be greedy. Consume the whole arg as an option if possible. if (commandSpec.optionsMap().containsKey(key) && !commandSpec.optionsMap().containsKey(arg)) { paramAttachedToOption = true; String optionParam = arg.substring(separatorIndex + separator.length()); args.push(optionParam); arg = key; if (tracer.isDebug()) {tracer.debug("Separated '%s' option from '%s' option parameter%n", key, optionParam);} } else { if (tracer.isDebug()) {tracer.debug("'%s' contains separator '%s' but '%s' is not a known option%n", arg, separator, key);} } } else { if (tracer.isDebug()) {tracer.debug("'%s' cannot be separated into <option>%s<option-parameter>%n", arg, separator);} } if (commandSpec.optionsMap().containsKey(arg)) { processStandaloneOption(required, initialized, arg, args, paramAttachedToOption); } // Compact (single-letter) options can be grouped with other options or with an argument. // only single-letter options can be combined with other options or with an argument else if (arg.length() > 2 && arg.startsWith("-")) { if (tracer.isDebug()) {tracer.debug("Trying to process '%s' as clustered short options%n", arg, args);} processClusteredShortOptions(required, initialized, arg, args); } // The argument could not be interpreted as an option. // We take this to mean that the remainder are positional arguments else { args.push(arg); if (tracer.isDebug()) {tracer.debug("Could not find option '%s', deciding whether to treat as unmatched option or positional parameter...%n", arg);} if (resemblesOption(arg)) { handleUnmatchedArguments(args.pop()); continue; } // #149 if (tracer.isDebug()) {tracer.debug("No option named '%s' found. Processing remainder as positional parameters%n", arg);} processPositionalParameter(required, initialized, args); } } } private boolean resemblesOption(String arg) { int count = 0; for (String optionName : commandSpec.optionsMap().keySet()) { for (int i = 0; i < arg.length(); i++) { if (optionName.length() > i && arg.charAt(i) == optionName.charAt(i)) { count++; } else { break; } } } boolean result = count > 0 && count * 10 >= commandSpec.optionsMap().size() * 9; // at least one prefix char in common with 9 out of 10 options if (tracer.isDebug()) {tracer.debug("%s %s an option: %d matching prefix chars out of %d option names%n", arg, (result ? "resembles" : "doesn't resemble"), count, commandSpec .optionsMap().size());} return result; } private void handleUnmatchedArguments(String arg) {Stack<String> args = new Stack<String>(); args.add(arg); handleUnmatchedArguments(args);} private void handleUnmatchedArguments(Stack<String> args) { while (!args.isEmpty()) { unmatchedArguments.add(args.pop()); } // addAll would give args in reverse order } private void processRemainderAsPositionalParameters(Collection<ArgSpec<?>> required, Set<ArgSpec<?>> initialized, Stack<String> args) throws Exception { while (!args.empty()) { processPositionalParameter(required, initialized, args); } } private void processPositionalParameter(Collection<ArgSpec<?>> required, Set<ArgSpec<?>> initialized, Stack<String> args) throws Exception { if (tracer.isDebug()) {tracer.debug("Processing next arg as a positional parameter at index=%d. Remainder=%s%n", position, reverse(copy(args)));} int consumed = 0; for (PositionalParamSpec positionalParam : commandSpec.positionalParameters()) { Range indexRange = positionalParam.index(); if (!indexRange.contains(position)) { continue; } Stack<String> argsCopy = copy(args); Range arity = positionalParam.arity(); if (tracer.isDebug()) {tracer.debug("Position %d is in index range %s. Trying to assign args to %s, arity=%s%n", position, indexRange, positionalParam, arity);} assertNoMissingParameters(positionalParam, arity.min, argsCopy); int originalSize = argsCopy.size(); applyOption(positionalParam, arity, argsCopy, initialized, "args[" + indexRange + "] at position " + position); int count = originalSize - argsCopy.size(); if (count > 0) { required.remove(positionalParam); } consumed = Math.max(consumed, count); } // remove processed args from the stack for (int i = 0; i < consumed; i++) { args.pop(); } position += consumed; if (tracer.isDebug()) {tracer.debug("Consumed %d arguments, moving position to index %d.%n", consumed, position);} if (consumed == 0 && !args.isEmpty()) { handleUnmatchedArguments(args.pop()); } } private void processStandaloneOption(Collection<ArgSpec<?>> required, Set<ArgSpec<?>> initialized, String arg, Stack<String> args, boolean paramAttachedToKey) throws Exception { ArgSpec<?> argSpec = commandSpec.optionsMap().get(arg); required.remove(argSpec); Range arity = argSpec.arity(); if (paramAttachedToKey) { arity = arity.min(Math.max(1, arity.min)); // if key=value, minimum arity is at least 1 } if (tracer.isDebug()) {tracer.debug("Found option named '%s': %s, arity=%s%n", arg, argSpec, arity);} applyOption(argSpec, arity, args, initialized, "option " + arg); } private void processClusteredShortOptions(Collection<ArgSpec<?>> required, Set<ArgSpec<?>> initialized, String arg, Stack<String> args) throws Exception { String prefix = arg.substring(0, 1); String cluster = arg.substring(1); boolean paramAttachedToOption = true; do { if (cluster.length() > 0 && commandSpec.posixOptionsMap().containsKey(cluster.charAt(0))) { ArgSpec<?> argSpec = commandSpec.posixOptionsMap().get(cluster.charAt(0)); Range arity = argSpec.arity(); String argDescription = "option " + prefix + cluster.charAt(0); if (tracer.isDebug()) {tracer.debug("Found option '%s%s' in %s: %s, arity=%s%n", prefix, cluster.charAt(0), arg, argSpec, arity);} required.remove(argSpec); cluster = cluster.length() > 0 ? cluster.substring(1) : ""; paramAttachedToOption = cluster.length() > 0; if (cluster.startsWith(commandSpec.separator())) {// attached with separator, like -f=FILE or -v=true cluster = cluster.substring(commandSpec.separator().length()); arity = arity.min(Math.max(1, arity.min)); // if key=value, minimum arity is at least 1 } if (arity.min > 0 && !empty(cluster)) { if (tracer.isDebug()) {tracer.debug("Trying to process '%s' as option parameter%n", cluster);} } // arity may be >= 1, or // arity <= 0 && !cluster.startsWith(separator) // e.g., boolean @Option("-v", arity=0, varargs=true); arg "-rvTRUE", remainder cluster="TRUE" if (!empty(cluster)) { args.push(cluster); // interpret remainder as option parameter (CAUTION: may be empty string!) } int argCount = args.size(); int consumed = applyOption(argSpec, arity, args, initialized, argDescription); // if cluster was consumed as a parameter or if this field was the last in the cluster we're done; otherwise continue do-while loop if (empty(cluster) || args.isEmpty() || args.size() < argCount) { return; } cluster = args.pop(); } else { // cluster is empty || cluster.charAt(0) is not a short option key if (cluster.length() == 0) { // we finished parsing a group of short options like -rxv return; // return normally and parse the next arg } // We get here when the remainder of the cluster group is neither an option, // nor a parameter that the last option could consume. if (arg.endsWith(cluster)) { args.push(paramAttachedToOption ? prefix + cluster : cluster); if (args.peek().equals(arg)) { // #149 be consistent between unmatched short and long options if (tracer.isDebug()) {tracer.debug("Could not match any short options in %s, deciding whether to treat as unmatched option or positional parameter...%n", arg);} if (resemblesOption(arg)) { handleUnmatchedArguments(args.pop()); return; } // #149 processPositionalParameter(required, initialized, args); return; } // remainder was part of a clustered group that could not be completely parsed if (tracer.isDebug()) {tracer.debug("No option found for %s in %s%n", cluster, arg);} handleUnmatchedArguments(args.pop()); } else { args.push(cluster); if (tracer.isDebug()) {tracer.debug("%s is not an option parameter for %s%n", cluster, arg);} processPositionalParameter(required, initialized, args); } return; } } while (true); } private int applyOption(ArgSpec<?> argSpec, Range arity, Stack<String> args, Set<ArgSpec<?>> initialized, String argDescription) throws Exception { updateHelpRequested(argSpec); assertNoMissingParameters(argSpec, arity.min, args); Class<?> cls = argSpec.type(); if (cls.isArray()) { return applyValuesToArrayField(argSpec, arity, args, cls, argDescription); } if (Collection.class.isAssignableFrom(cls)) { return applyValuesToCollectionField(argSpec, arity, args, cls, argDescription); } if (Map.class.isAssignableFrom(cls)) { return applyValuesToMapField(argSpec, arity, args, cls, argDescription); } cls = argSpec.auxiliaryTypes()[0]; // field may be interface/abstract type, use annotation to get concrete type return applyValueToSingleValuedField(argSpec, arity, args, cls, initialized, argDescription); } private int applyValueToSingleValuedField(ArgSpec<?> argSpec, Range arity, Stack<String> args, Class<?> cls, Set<ArgSpec<?>> initialized, String argDescription) throws Exception { boolean noMoreValues = args.isEmpty(); String value = args.isEmpty() ? null : trim(args.pop()); // unquote the value int result = arity.min; // the number or args we need to consume // special logic for booleans: BooleanConverter accepts only "true" or "false". if ((cls == Boolean.class || cls == Boolean.TYPE) && arity.min <= 0) { // boolean option with arity = 0..1 or 0..*: value MAY be a param if (arity.max > 0 && ("true".equalsIgnoreCase(value) || "false".equalsIgnoreCase(value))) { result = 1; // if it is a varargs we only consume 1 argument if it is a boolean value } else { if (value != null) { args.push(value); // we don't consume the value } Boolean currentValue = (Boolean) argSpec.getValue(); value = String.valueOf(currentValue == null ? true : !currentValue); // #147 toggle existing boolean value } } if (noMoreValues && value == null) { return 0; } ITypeConverter<?> converter = getTypeConverter(cls, argSpec, 0); Object newValue = tryConvert(argSpec, -1, converter, value, cls); Object oldValue = argSpec.getValue(); TraceLevel level = TraceLevel.INFO; String traceMessage = "Setting %s to '%3$s' (was '%2$s') for %4$s%n"; if (initialized != null) { if (initialized.contains(argSpec)) { if (!isOverwrittenOptionsAllowed()) { throw new OverwrittenOptionException(CommandLine.this, optionDescription("", argSpec, 0) + " should be specified only once"); } level = TraceLevel.WARN; traceMessage = "Overwriting %s value '%s' with '%s' for %s%n"; } initialized.add(argSpec); } if (tracer.level.isEnabled(level)) { level.print(tracer, traceMessage, argSpec.toString(), String.valueOf(oldValue), String.valueOf(newValue), argDescription); } argSpec.setValue(newValue); return result; } private int applyValuesToMapField(ArgSpec<?> argSpec, Range arity, Stack<String> args, Class<?> mapClass, String argDescription) throws Exception { Class<?>[] classes = argSpec.auxiliaryTypes(); if (classes.length < 2) { throw new ParameterException(CommandLine.this, argSpec.toString() + " needs two types (one for the map key, one for the value) but only has " + classes.length + " types configured."); } ITypeConverter<?> keyConverter = getTypeConverter(classes[0], argSpec, 0); ITypeConverter<?> valueConverter = getTypeConverter(classes[1], argSpec, 1); @SuppressWarnings("unchecked") Map<Object, Object> result = (Map<Object, Object>) argSpec.getValue(); if (result == null) { result = createMap(mapClass); argSpec.setValue(result); } int originalSize = result.size(); consumeMapArguments(argSpec, arity, args, classes, keyConverter, valueConverter, result, argDescription); return result.size() - originalSize; } private void consumeMapArguments(ArgSpec<?> argSpec, Range arity, Stack<String> args, Class<?>[] classes, ITypeConverter<?> keyConverter, ITypeConverter<?> valueConverter, Map<Object, Object> result, String argDescription) throws Exception { // first do the arity.min mandatory parameters for (int i = 0; i < arity.min; i++) { consumeOneMapArgument(argSpec, args, classes, keyConverter, valueConverter, result, i, argDescription); } // now process the varargs if any for (int i = arity.min; i < arity.max && !args.isEmpty(); i++) { if (argSpec.isOption()) { if (commandSpec.subcommands().containsKey(args.peek()) || isOption(args.peek())) { return; } } consumeOneMapArgument(argSpec, args, classes, keyConverter, valueConverter, result, i, argDescription); } } private void consumeOneMapArgument(ArgSpec<?> argSpec, Stack<String> args, Class<?>[] classes, ITypeConverter<?> keyConverter, ITypeConverter<?> valueConverter, Map<Object, Object> result, int index, String argDescription) throws Exception { String[] values = argSpec.splitValue(trim(args.pop())); for (String value : values) { String[] keyValue = value.split("="); if (keyValue.length < 2) { String splitRegex = argSpec.splitRegex(); if (splitRegex.length() == 0) { throw new ParameterException(CommandLine.this, "Value for option " + optionDescription("", argSpec, 0) + " should be in KEY=VALUE format but was " + value); } else { throw new ParameterException(CommandLine.this, "Value for option " + optionDescription("", argSpec, 0) + " should be in KEY=VALUE[" + splitRegex + "KEY=VALUE]... format but was " + value); } } Object mapKey = tryConvert(argSpec, index, keyConverter, keyValue[0], classes[0]); Object mapValue = tryConvert(argSpec, index, valueConverter, keyValue[1], classes[1]); result.put(mapKey, mapValue); if (tracer.isInfo()) {tracer.info("Putting [%s : %s] in %s<%s, %s> %s for %s%n", String.valueOf(mapKey), String.valueOf(mapValue), result.getClass().getSimpleName(), classes[0].getSimpleName(), classes[1].getSimpleName(), argSpec .toString(), argDescription);} } } private void checkMaxArityExceeded(Range arity, int remainder, ArgSpec<?> argSpec, String[] values) { if (values.length <= remainder) { return; } String desc = arity.max == remainder ? "" + remainder : arity + ", remainder=" + remainder; throw new MaxValuesforFieldExceededException(CommandLine.this, optionDescription("", argSpec, -1) + " max number of values (" + arity.max + ") exceeded: remainder is " + remainder + " but " + values.length + " values were specified: " + Arrays.toString(values)); } private int applyValuesToArrayField(ArgSpec<?> argSpec, Range arity, Stack<String> args, Class<?> cls, String argDescription) throws Exception { Object existing = argSpec.getValue(); int length = existing == null ? 0 : Array.getLength(existing); Class<?> type = argSpec.auxiliaryTypes()[0]; List<Object> converted = consumeArguments(argSpec, arity, args, type, argDescription); List<Object> newValues = new ArrayList<Object>(); for (int i = 0; i < length; i++) { newValues.add(Array.get(existing, i)); } for (Object obj : converted) { if (obj instanceof Collection<?>) { newValues.addAll((Collection<?>) obj); } else { newValues.add(obj); } } Object array = Array.newInstance(type, newValues.size()); argSpec.setValue(array); for (int i = 0; i < newValues.size(); i++) { Array.set(array, i, newValues.get(i)); } return converted.size(); // return how many args were consumed } @SuppressWarnings("unchecked") private int applyValuesToCollectionField(ArgSpec<?> argSpec, Range arity, Stack<String> args, Class<?> collectionType, String argDescription) throws Exception { Collection<Object> collection = (Collection<Object>) argSpec.getValue(); Class<?> type = argSpec.auxiliaryTypes()[0]; List<Object> converted = consumeArguments(argSpec, arity, args, type, argDescription); if (collection == null) { collection = createCollection(collectionType); argSpec.setValue(collection); } for (Object element : converted) { if (element instanceof Collection<?>) { collection.addAll((Collection<?>) element); } else { collection.add(element); } } return converted.size(); } private List<Object> consumeArguments(ArgSpec<?> argSpec, Range arity, Stack<String> args, Class<?> type, String argDescription) throws Exception { List<Object> result = new ArrayList<Object>(); // first do the arity.min mandatory parameters for (int i = 0; i < arity.min; i++) { consumeOneArgument(argSpec, arity, args, type, result, i, argDescription); } // now process the varargs if any for (int i = arity.min; i < arity.max && !args.isEmpty(); i++) { if (argSpec.isOption()) { // for vararg Options, we stop if we encounter '--', a command, or another option if (commandSpec.subcommands().containsKey(args.peek()) || isOption(args.peek())) { break; } } consumeOneArgument(argSpec, arity, args, type, result, i, argDescription); } if (result.isEmpty() && arity.min == 0 && arity.max <= 1 && isBoolean(type)) { return Arrays.asList((Object) Boolean.TRUE); } return result; } private int consumeOneArgument(ArgSpec<?> argSpec, Range arity, Stack<String> args, Class<?> type, List<Object> result, int index, String argDescription) throws Exception { String[] values = argSpec.splitValue(trim(args.pop())); ITypeConverter<?> converter = getTypeConverter(type, argSpec, 0); for (int j = 0; j < values.length; j++) { result.add(tryConvert(argSpec, index, converter, values[j], type)); if (tracer.isInfo()) { tracer.info("Adding [%s] to %s for %s%n", String.valueOf(result.get(result.size() - 1)), argSpec.toString(), argDescription); } } //checkMaxArityExceeded(arity, max, field, values); return ++index; } /** * Called when parsing varargs parameters for a multi-value option. * When an option is encountered, the remainder should not be interpreted as vararg elements. * @param arg the string to determine whether it is an option or not * @return true if it is an option, false otherwise */ private boolean isOption(String arg) { if ("--".equals(arg)) { return true; } // not just arg prefix: we may be in the middle of parsing -xrvfFILE if (commandSpec.optionsMap().containsKey(arg)) { // -v or -f or --file (not attached to param or other option) return true; } int separatorIndex = arg.indexOf(commandSpec.separator()); if (separatorIndex > 0) { // -f=FILE or --file==FILE (attached to param via separator) if (commandSpec.optionsMap().containsKey(arg.substring(0, separatorIndex))) { return true; } } return (arg.length() > 2 && arg.startsWith("-") && commandSpec.posixOptionsMap().containsKey(arg.charAt(1))); } private Object tryConvert(ArgSpec<?> argSpec, int index, ITypeConverter<?> converter, String value, Class<?> type) throws Exception { try { return converter.convert(value); } catch (TypeConversionException ex) { throw new ParameterException(CommandLine.this, ex.getMessage() + optionDescription(" for ", argSpec, index)); } catch (Exception other) { String desc = optionDescription(" for ", argSpec, index) + ": " + other; throw new ParameterException(CommandLine.this, "Could not convert '" + value + "' to " + type.getSimpleName() + desc, other); } } private String optionDescription(String prefix, ArgSpec<?> argSpec, int index) { String desc = ""; if (argSpec.isOption()) { desc = prefix + "option '" + ((OptionSpec) argSpec).names()[0] + "'"; if (index >= 0) { if (argSpec.arity().max > 1) { desc += " at index " + index; } desc += " (" + argSpec.paramLabel() + ")"; } } else { desc = prefix + "positional parameter at index " + ((PositionalParamSpec) argSpec).index() + " (" + argSpec.paramLabel() + ")"; } return desc; } private boolean isAnyHelpRequested() { return isHelpRequested || versionHelpRequested || usageHelpRequested; } private void updateHelpRequested(ArgSpec<?> argSpec) { if (argSpec.isOption()) { OptionSpec option = (OptionSpec) argSpec; isHelpRequested |= is(argSpec, "help", option.help()); CommandLine.this.versionHelpRequested |= is(argSpec, "versionHelp", option.versionHelp()); CommandLine.this.usageHelpRequested |= is(argSpec, "usageHelp", option.usageHelp()); } } private boolean is(ArgSpec<?> p, String attribute, boolean value) { if (value) { if (tracer.isInfo()) {tracer.info("%s has '%s' annotation: not validating required fields%n", p.toString(), attribute); }} return value; } @SuppressWarnings("unchecked") private Collection<Object> createCollection(Class<?> collectionClass) throws Exception { if (collectionClass.isInterface()) { if (List.class.isAssignableFrom(collectionClass)) { return new ArrayList<Object>(); } else if (SortedSet.class.isAssignableFrom(collectionClass)) { return new TreeSet<Object>(); } else if (Set.class.isAssignableFrom(collectionClass)) { return new LinkedHashSet<Object>(); } else if (Queue.class.isAssignableFrom(collectionClass)) { return new LinkedList<Object>(); // ArrayDeque is only available since 1.6 } return new ArrayList<Object>(); } // custom Collection implementation class must have default constructor return (Collection<Object>) collectionClass.newInstance(); } @SuppressWarnings("unchecked") private Map<Object, Object> createMap(Class<?> mapClass) throws Exception { try { // if it is an implementation class, instantiate it return (Map<Object, Object>) mapClass.getDeclaredConstructor().newInstance(); } catch (Exception ignored) {} return new LinkedHashMap<Object, Object>(); } private ITypeConverter<?> getTypeConverter(final Class<?> type, ArgSpec<?> argSpec, int index) { if (argSpec.converters().length > index) { return argSpec.converters()[index]; } if (converterRegistry.containsKey(type)) { return converterRegistry.get(type); } if (type.isEnum()) { return new ITypeConverter<Object>() { @SuppressWarnings("unchecked") public Object convert(String value) throws Exception { return Enum.valueOf((Class<Enum>) type, value); } }; } throw new MissingTypeConverterException(CommandLine.this, "No TypeConverter registered for " + type.getName() + " of " + argSpec); } private void assertNoMissingParameters(ArgSpec<?> argSpec, int arity, Stack<String> args) { if (arity > args.size()) { if (arity == 1) { if (argSpec.isOption()) { throw new MissingParameterException(CommandLine.this, "Missing required parameter for " + optionDescription("", argSpec, 0)); } Range indexRange = ((PositionalParamSpec) argSpec).index(); String sep = ""; String names = ""; int count = 0; List<PositionalParamSpec> positionalParameters = commandSpec.positionalParameters(); for (int i = indexRange.min; i < positionalParameters.size(); i++) { if (positionalParameters.get(i).arity().min > 0) { names += sep + positionalParameters.get(i).paramLabel(); sep = ", "; count++; } } String msg = "Missing required parameter"; Range paramArity = argSpec.arity(); if (paramArity.isVariable) { msg += "s at positions " + indexRange + ": "; } else { msg += (count > 1 ? "s: " : ": "); } throw new MissingParameterException(CommandLine.this, msg + names); } if (args.isEmpty()) { throw new MissingParameterException(CommandLine.this, optionDescription("", argSpec, 0) + " requires at least " + arity + " values, but none were specified."); } throw new MissingParameterException(CommandLine.this, optionDescription("", argSpec, 0) + " requires at least " + arity + " values, but only " + args.size() + " were specified: " + reverse(args)); } } private String trim(String value) { return unquote(value); } private String unquote(String value) { return value == null ? null : (value.length() > 1 && value.startsWith("\"") && value.endsWith("\"")) ? value.substring(1, value.length() - 1) : value; } } private static class PositionalParametersSorter implements Comparator<ArgSpec<?>> { private static final Range OPTION_INDEX = new Range(0, 0, false, true, "0"); public int compare(ArgSpec<?> p1, ArgSpec<?> p2) { int result = index(p1).compareTo(index(p2)); return (result == 0) ? p1.arity().compareTo(p2.arity()) : result; } private Range index(ArgSpec<?> arg) { return arg.isOption() ? OPTION_INDEX : ((PositionalParamSpec) arg).index(); } } /** * Inner class to group the built-in {@link ITypeConverter} implementations. */ private static class BuiltIn { static class StringConverter implements ITypeConverter<String> { public String convert(String value) { return value; } } static class StringBuilderConverter implements ITypeConverter<StringBuilder> { public StringBuilder convert(String value) { return new StringBuilder(value); } } static class CharSequenceConverter implements ITypeConverter<CharSequence> { public String convert(String value) { return value; } } /** Converts text to a {@code Byte} by delegating to {@link Byte#valueOf(String)}.*/ static class ByteConverter implements ITypeConverter<Byte> { public Byte convert(String value) { return Byte.valueOf(value); } } /** Converts {@code "true"} or {@code "false"} to a {@code Boolean}. Other values result in a ParameterException.*/ static class BooleanConverter implements ITypeConverter<Boolean> { public Boolean convert(String value) { if ("true".equalsIgnoreCase(value) || "false".equalsIgnoreCase(value)) { return Boolean.parseBoolean(value); } else { throw new TypeConversionException("'" + value + "' is not a boolean"); } } } static class CharacterConverter implements ITypeConverter<Character> { public Character convert(String value) { if (value.length() > 1) { throw new TypeConversionException("'" + value + "' is not a single character"); } return value.charAt(0); } } /** Converts text to a {@code Short} by delegating to {@link Short#valueOf(String)}.*/ static class ShortConverter implements ITypeConverter<Short> { public Short convert(String value) { return Short.valueOf(value); } } /** Converts text to an {@code Integer} by delegating to {@link Integer#valueOf(String)}.*/ static class IntegerConverter implements ITypeConverter<Integer> { public Integer convert(String value) { return Integer.valueOf(value); } } /** Converts text to a {@code Long} by delegating to {@link Long#valueOf(String)}.*/ static class LongConverter implements ITypeConverter<Long> { public Long convert(String value) { return Long.valueOf(value); } } static class FloatConverter implements ITypeConverter<Float> { public Float convert(String value) { return Float.valueOf(value); } } static class DoubleConverter implements ITypeConverter<Double> { public Double convert(String value) { return Double.valueOf(value); } } static class FileConverter implements ITypeConverter<File> { public File convert(String value) { return new File(value); } } static class URLConverter implements ITypeConverter<URL> { public URL convert(String value) throws MalformedURLException { return new URL(value); } } static class URIConverter implements ITypeConverter<URI> { public URI convert(String value) throws URISyntaxException { return new URI(value); } } /** Converts text in {@code yyyy-mm-dd} format to a {@code java.util.Date}. ParameterException on failure. */ static class ISO8601DateConverter implements ITypeConverter<Date> { public Date convert(String value) { try { return new SimpleDateFormat("yyyy-MM-dd").parse(value); } catch (ParseException e) { throw new TypeConversionException("'" + value + "' is not a yyyy-MM-dd date"); } } } /** Converts text in any of the following formats to a {@code java.sql.Time}: {@code HH:mm}, {@code HH:mm:ss}, * {@code HH:mm:ss.SSS}, {@code HH:mm:ss,SSS}. Other formats result in a ParameterException. */ static class ISO8601TimeConverter implements ITypeConverter<Time> { public Time convert(String value) { try { if (value.length() <= 5) { return new Time(new SimpleDateFormat("HH:mm").parse(value).getTime()); } else if (value.length() <= 8) { return new Time(new SimpleDateFormat("HH:mm:ss").parse(value).getTime()); } else if (value.length() <= 12) { try { return new Time(new SimpleDateFormat("HH:mm:ss.SSS").parse(value).getTime()); } catch (ParseException e2) { return new Time(new SimpleDateFormat("HH:mm:ss,SSS").parse(value).getTime()); } } } catch (ParseException ignored) { // ignored because we throw a ParameterException below } throw new TypeConversionException("'" + value + "' is not a HH:mm[:ss[.SSS]] time"); } } static class BigDecimalConverter implements ITypeConverter<BigDecimal> { public BigDecimal convert(String value) { return new BigDecimal(value); } } static class BigIntegerConverter implements ITypeConverter<BigInteger> { public BigInteger convert(String value) { return new BigInteger(value); } } static class CharsetConverter implements ITypeConverter<Charset> { public Charset convert(String s) { return Charset.forName(s); } } /** Converts text to a {@code InetAddress} by delegating to {@link InetAddress#getByName(String)}. */ static class InetAddressConverter implements ITypeConverter<InetAddress> { public InetAddress convert(String s) throws Exception { return InetAddress.getByName(s); } } static class PatternConverter implements ITypeConverter<Pattern> { public Pattern convert(String s) { return Pattern.compile(s); } } static class UUIDConverter implements ITypeConverter<UUID> { public UUID convert(String s) throws Exception { return UUID.fromString(s); } } static class CurrencyConverter implements ITypeConverter<Currency> { public Currency convert(String s) throws Exception { return Currency.getInstance(s); } } static class TimeZoneConverter implements ITypeConverter<TimeZone> { public TimeZone convert(String s) throws Exception { return TimeZone.getTimeZone(s); } } static class ByteOrderConverter implements ITypeConverter<ByteOrder> { public ByteOrder convert(String s) throws Exception { if (s.equalsIgnoreCase(ByteOrder.BIG_ENDIAN.toString())) { return ByteOrder.BIG_ENDIAN; } if (s.equalsIgnoreCase(ByteOrder.LITTLE_ENDIAN.toString())) { return ByteOrder.LITTLE_ENDIAN; } throw new TypeConversionException("'" + s + "' is not a valid ByteOrder"); } } static class ClassConverter implements ITypeConverter<Class<?>> { public Class<?> convert(String s) throws Exception { return Class.forName(s); } } static class NetworkInterfaceConverter implements ITypeConverter<NetworkInterface> { public NetworkInterface convert(String s) throws Exception { try { InetAddress addr = new InetAddressConverter().convert(s); return NetworkInterface.getByInetAddress(addr); } catch (Exception ex) { try { return NetworkInterface.getByName(s); } catch (Exception ex2) { throw new TypeConversionException("'" + s + "' is not an InetAddress or NetworkInterface name"); } } } } static class ConnectionConverter implements ITypeConverter<Connection> { public Connection convert(String s) throws Exception { return DriverManager.getConnection(s); } } static class DriverConverter implements ITypeConverter<Driver> { public Driver convert(String s) throws Exception { return DriverManager.getDriver(s); } } static class TimestampConverter implements ITypeConverter<Timestamp> { public Timestamp convert(String s) throws Exception { return Timestamp.valueOf(s); } } static void registerIfAvailable(Map<Class<?>, ITypeConverter<?>> registry, Tracer tracer, String fqcn, String factoryMethodName, Class<?>... paramTypes) { registerIfAvailable(registry, tracer, fqcn, fqcn, factoryMethodName, paramTypes); } static void registerIfAvailable(Map<Class<?>, ITypeConverter<?>> registry, Tracer tracer, String fqcn, String factoryClass, String factoryMethodName, Class<?>... paramTypes) { try { Class<?> cls = Class.forName(fqcn); Class<?> factory = Class.forName(factoryClass); Method method = factory.getDeclaredMethod(factoryMethodName, paramTypes); registry.put(cls, new ReflectionConverter(method, paramTypes)); } catch (Exception e) { if (!traced.contains(fqcn)) { tracer.debug("Could not register converter for %s: %s%n", fqcn, e.toString()); } traced.add(fqcn); } } static Set<String> traced = new HashSet<String>(); static class ReflectionConverter implements ITypeConverter<Object> { private final Method method; private Class<?>[] paramTypes; public ReflectionConverter(Method method, Class<?>... paramTypes) { this.method = Assert.notNull(method, "method"); this.paramTypes = Assert.notNull(paramTypes, "paramTypes"); } public Object convert(String s) { try { if (paramTypes.length > 1) { return method.invoke(null, s, new String[0]); } else { return method.invoke(null, s); } } catch (Exception e) { throw new TypeConversionException("Unable to convert " + s + " to " + method.getReturnType() + ": " + e.getMessage()); } } } private BuiltIn() {} // private constructor: never instantiate } @Command(subcommands = AutoHelpMixin.HelpCommand.class) static class AutoHelpMixin { @Option(names = {"-h", "--help"}, usageHelp = true, description = "Show this help message and exit.") private boolean helpRequested; @Option(names = {"-V", "--version"}, versionHelp = true, description = "Print version information and exit.") private boolean versionRequested; @Command(name = "help", header = "Displays help information about the specified command", synopsisHeading = "%nUsage: ", description = {"%nWhen no COMMAND is given, the usage help for the main command is displayed.", "If a COMMAND is specified, the help for that command is shown.%n"}) static class HelpCommand implements Runnable { @Option(names = {"-h", "--help"}, usageHelp = true, description = "Show usage help for the help command and exit.") private boolean helpRequested; @Parameters(paramLabel = "COMMAND", description = "The COMMAND to display the usage help message for.") private String[] commands = new String[0]; public void run() { } } } /** * A collection of methods and inner classes that provide fine-grained control over the contents and layout of * the usage help message to display to end users when help is requested or invalid input values were specified. * <h3>Layered API</h3> * <p>The {@link Command} annotation provides the easiest way to customize usage help messages. See * the <a href="https://remkop.github.io/picocli/index.html#_usage_help">Manual</a> for details.</p> * <p>This Help class provides high-level functions to create sections of the usage help message and headings * for these sections. Instead of calling the {@link CommandLine#usage(PrintWriter, CommandLine.Help.ColorScheme)} * method, application authors may want to create a custom usage help message by reorganizing sections in a * different order and/or adding custom sections.</p> * <p>Finally, the Help class contains inner classes and interfaces that can be used to create custom help messages.</p> * <h4>IOptionRenderer and IParameterRenderer</h4> * <p>Renders a field annotated with {@link Option} or {@link Parameters} to an array of {@link Text} values. * By default, these values are</p><ul> * <li>mandatory marker character (if the option/parameter is {@link Option#required() required})</li> * <li>short option name (empty for parameters)</li> * <li>comma or empty (empty for parameters)</li> * <li>long option names (the parameter {@link IParamLabelRenderer label} for parameters)</li> * <li>description</li> * </ul> * <p>Other components rely on this ordering.</p> * <h4>Layout</h4> * <p>Delegates to the renderers to create {@link Text} values for the annotated fields, and uses a * {@link TextTable} to display these values in tabular format. Layout is responsible for deciding which values * to display where in the table. By default, Layout shows one option or parameter per table row.</p> * <h4>TextTable</h4> * <p>Responsible for spacing out {@link Text} values according to the {@link Column} definitions the table was * created with. Columns have a width, indentation, and an overflow policy that decides what to do if a value is * longer than the column's width.</p> * <h4>Text</h4> * <p>Encapsulates rich text with styles and colors in a way that other components like {@link TextTable} are * unaware of the embedded ANSI escape codes.</p> */ public static class Help { /** Constant String holding the default program name, value defined in {@link CommandSpec#DEFAULT_COMMAND_NAME}. */ protected static final String DEFAULT_COMMAND_NAME = CommandSpec.DEFAULT_COMMAND_NAME; /** Constant String holding the default string that separates options from option parameters, value defined in {@link CommandSpec#DEFAULT_SEPARATOR}. */ protected static final String DEFAULT_SEPARATOR = CommandSpec.DEFAULT_SEPARATOR; private final static int usageHelpWidth = 80; private final static int optionsColumnWidth = 2 + 2 + 1 + 24; private final CommandSpec commandSpec; private final ColorScheme colorScheme; private final Map<String, Help> commands = new LinkedHashMap<String, Help>(); private IParamLabelRenderer parameterLabelRenderer; /** Constructs a new {@code Help} instance with a default color scheme, initialized from annotatations * on the specified class and superclasses. * @param command the annotated object to create usage help for */ public Help(Object command) { this(command, Ansi.AUTO); } /** Constructs a new {@code Help} instance with a default color scheme, initialized from annotatations * on the specified class and superclasses. * @param command the annotated object to create usage help for * @param ansi whether to emit ANSI escape codes or not */ public Help(Object command, Ansi ansi) { this(command, defaultColorScheme(ansi)); } /** Constructs a new {@code Help} instance with the specified color scheme, initialized from annotatations * on the specified class and superclasses. * @param command the annotated object to create usage help for * @param colorScheme the color scheme to use * @deprecated use {@link eu.interiot.gateway.commons.api.command.CommandLine.Help#Help(eu.interiot.gateway.commons.api.command.CommandLine.CommandSpec, eu.interiot.gateway.commons.api.command.CommandLine.Help.ColorScheme)} */ @Deprecated public Help(Object command, ColorScheme colorScheme) { this(CommandSpecBuilder.build(command, new DefaultFactory()), colorScheme); } /** Constructs a new {@code Help} instance with the specified color scheme, initialized from annotatations * on the specified class and superclasses. * @param commandSpec the command model to create usage help for * @param colorScheme the color scheme to use */ public Help(CommandSpec commandSpec, ColorScheme colorScheme) { this.commandSpec = Assert.notNull(commandSpec, "commandSpec"); this.addAllSubcommands(commandSpec.subcommands()); this.colorScheme = Assert.notNull(colorScheme, "colorScheme").applySystemProperties(); parameterLabelRenderer = createDefaultParamLabelRenderer(); // uses help separator } /** Returns the {@code CommandSpec} model that this Help was constructed with. * @since 3.0 */ CommandSpec commandSpec() { return commandSpec; } /** Returns the {@code ColorScheme} model that this Help was constructed with. * @since 3.0 */ public ColorScheme colorScheme() { return colorScheme; } /** Option and positional parameter value label renderer used for the synopsis line(s) and the option list. * By default initialized to the result of {@link #createDefaultParamLabelRenderer()}, which takes a snapshot * of the {@link CommandSpec#separator()} at construction time. If the separator is modified after Help construction, you * may need to re-initialize this field by calling {@link #createDefaultParamLabelRenderer()} again. */ public IParamLabelRenderer parameterLabelRenderer() {return parameterLabelRenderer;} /** Registers all specified subcommands with this Help. * @param commands maps the command names to the associated CommandLine object * @return this Help instance (for method chaining) * @see CommandLine#getSubcommands() */ public Help addAllSubcommands(Map<String, CommandLine> commands) { if (commands != null) { for (Map.Entry<String, CommandLine> entry : commands.entrySet()) { addSubcommand(entry.getKey(), entry.getValue()); } } return this; } /** Registers the specified subcommand with this Help. * @param commandName the name of the subcommand to display in the usage message * @param commandLine the {@code CommandLine} object to get more information from * @return this Help instance (for method chaining) */ Help addSubcommand(String commandName, CommandLine commandLine) { commands.put(commandName, new Help(commandLine.commandSpec)); return this; } /** Registers the specified subcommand with this Help. * @param commandName the name of the subcommand to display in the usage message * @param command the {@code CommandSpec} or {@code @Command} annotated object to get more information from * @return this Help instance (for method chaining) * @deprecated */ @Deprecated public Help addSubcommand(String commandName, Object command) { commands.put(commandName, new Help(CommandSpecBuilder.build(command, commandSpec.commandLine().factory))); return this; } List<OptionSpec> options() { return commandSpec.options(); } List<PositionalParamSpec> positionalParameters() { return commandSpec.positionalParameters(); } String commandName() { return commandSpec.name(); } /** Returns a synopsis for the command without reserving space for the synopsis heading. * @return a synopsis * @see #abbreviatedSynopsis() * @see #detailedSynopsis(Comparator, boolean) * @deprecated use {@link #synopsis(int)} instead */ @Deprecated public String synopsis() { return synopsis(0); } /** * Returns a synopsis for the command, reserving the specified space for the synopsis heading. * @param synopsisHeadingLength the length of the synopsis heading that will be displayed on the same line * @return a synopsis * @see #abbreviatedSynopsis() * @see #detailedSynopsis(Comparator, boolean) * @see #synopsisHeading */ public String synopsis(int synopsisHeadingLength) { if (!empty(commandSpec.customSynopsis())) { return customSynopsis(); } return commandSpec.abbreviateSynopsis() ? abbreviatedSynopsis() : detailedSynopsis(synopsisHeadingLength, createShortOptionArityAndNameComparator(), true); } /** Generates a generic synopsis like {@code <command name> [OPTIONS] [PARAM1 [PARAM2]...]}, omitting parts * that don't apply to the command (e.g., does not show [OPTIONS] if the command has no options). * @return a generic synopsis */ public String abbreviatedSynopsis() { StringBuilder sb = new StringBuilder(); if (!commandSpec.optionsMap().isEmpty()) { // only show if annotated object actually has options sb.append(" [OPTIONS]"); } // sb.append(" [--] "); // implied for (PositionalParamSpec positionalParam : commandSpec.positionalParameters()) { if (!positionalParam.hidden()) { sb.append(' ').append(parameterLabelRenderer().renderParameterLabel(positionalParam, ansi(), colorScheme.parameterStyles)); } } return colorScheme.commandText(commandSpec.name()).toString() + (sb.toString()) + System.getProperty("line.separator"); } /** Generates a detailed synopsis message showing all options and parameters. Follows the unix convention of * showing optional options and parameters in square brackets ({@code [ ]}). * @param optionSort comparator to sort options or {@code null} if options should not be sorted * @param clusterBooleanOptions {@code true} if boolean short options should be clustered into a single string * @return a detailed synopsis * @deprecated use {@link #detailedSynopsis(int, Comparator, boolean)} instead. */ @Deprecated public String detailedSynopsis(Comparator<OptionSpec> optionSort, boolean clusterBooleanOptions) { return detailedSynopsis(0, optionSort, clusterBooleanOptions); } /** Generates a detailed synopsis message showing all options and parameters. Follows the unix convention of * showing optional options and parameters in square brackets ({@code [ ]}). * @param synopsisHeadingLength the length of the synopsis heading that will be displayed on the same line * @param optionSort comparator to sort options or {@code null} if options should not be sorted * @param clusterBooleanOptions {@code true} if boolean short options should be clustered into a single string * @return a detailed synopsis * @since 3.0 */ public String detailedSynopsis(int synopsisHeadingLength, Comparator<OptionSpec> optionSort, boolean clusterBooleanOptions) { Text optionText = ansi().new Text(0); List<OptionSpec> options = new ArrayList<OptionSpec>(commandSpec.options()); // iterate in declaration order if (optionSort != null) { Collections.sort(options, optionSort);// iterate in specified sort order } if (clusterBooleanOptions) { // cluster all short boolean options into a single string List<OptionSpec> booleanOptions = new ArrayList<OptionSpec>(); StringBuilder clusteredRequired = new StringBuilder("-"); StringBuilder clusteredOptional = new StringBuilder("-"); for (OptionSpec option : options) { if (option.hidden()) { continue; } if (option.type() == boolean.class || option.type() == Boolean.class) { String shortestName = ShortestFirst.sort(option.names())[0]; if (shortestName.length() == 2 && shortestName.startsWith("-")) { booleanOptions.add(option); if (option.required()) { clusteredRequired.append(shortestName.substring(1)); } else { clusteredOptional.append(shortestName.substring(1)); } } } } options.removeAll(booleanOptions); if (clusteredRequired.length() > 1) { // initial length was 1 optionText = optionText.append(" ").append(colorScheme.optionText(clusteredRequired.toString())); } if (clusteredOptional.length() > 1) { // initial length was 1 optionText = optionText.append(" [").append(colorScheme.optionText(clusteredOptional.toString())).append("]"); } } for (OptionSpec option : options) { if (!option.hidden()) { if (option.required()) { optionText = appendOptionSynopsis(optionText, option, ShortestFirst.sort(option.names())[0], " ", ""); if (option.isMultiValue()) { optionText = appendOptionSynopsis(optionText, option, ShortestFirst.sort(option.names())[0], " [", "]..."); } } else { optionText = appendOptionSynopsis(optionText, option, ShortestFirst.sort(option.names())[0], " [", "]"); if (option.isMultiValue()) { optionText = optionText.append("..."); } } } } for (PositionalParamSpec positionalParam : commandSpec.positionalParameters()) { if (!positionalParam.hidden()) { optionText = optionText.append(" "); Text label = parameterLabelRenderer().renderParameterLabel(positionalParam, colorScheme.ansi(), colorScheme.parameterStyles); optionText = optionText.append(label); } } // Fix for #142: first line of synopsis overshoots max. characters String commandName = commandSpec.name(); int firstColumnLength = commandName.length() + synopsisHeadingLength; // synopsis heading ("Usage: ") may be on the same line, so adjust column width TextTable textTable = new TextTable(ansi(), firstColumnLength, usageHelpWidth - firstColumnLength); textTable.indentWrappedLines = 1; // don't worry about first line: options (2nd column) always start with a space // right-adjust the command name by length of synopsis heading Text PADDING = Ansi.OFF.new Text(stringOf('X', synopsisHeadingLength)); textTable.addRowValues(new Text[] {PADDING.append(colorScheme.commandText(commandName)), optionText}); return textTable.toString().substring(synopsisHeadingLength); // cut off leading synopsis heading spaces } private Text appendOptionSynopsis(Text optionText, OptionSpec option, String optionName, String prefix, String suffix) { Text optionParamText = parameterLabelRenderer().renderParameterLabel(option, colorScheme.ansi(), colorScheme.optionParamStyles); return optionText.append(prefix) .append(colorScheme.optionText(optionName)) .append(optionParamText) .append(suffix); } /** Returns the number of characters the synopsis heading will take on the same line as the synopsis. * @return the number of characters the synopsis heading will take on the same line as the synopsis. * @see #detailedSynopsis(int, Comparator, boolean) */ public int synopsisHeadingLength() { String[] lines = Ansi.OFF.new Text(commandSpec.synopsisHeading()).toString().split("\\r?\\n|\\r|%n", -1); return lines[lines.length - 1].length(); } /** * <p>Returns a description of the {@linkplain Option options} supported by the application. * This implementation {@linkplain #createShortOptionNameComparator() sorts options alphabetically}, and shows * only the {@linkplain Option#hidden() non-hidden} options in a {@linkplain TextTable tabular format} * using the {@linkplain #createDefaultOptionRenderer() default renderer} and {@linkplain Layout default layout}.</p> * @return the fully formatted option list * @see #optionList(Layout, Comparator, IParamLabelRenderer) */ public String optionList() { Comparator<OptionSpec> sortOrder = commandSpec.sortOptions() ? createShortOptionNameComparator() : null; return optionList(createDefaultLayout(), sortOrder, parameterLabelRenderer()); } /** Sorts all {@code Options} with the specified {@code comparator} (if the comparator is non-{@code null}), * then {@linkplain Layout#addOption(CommandLine.OptionSpec, CommandLine.Help.IParamLabelRenderer) adds} all non-hidden options to the * specified TextTable and returns the result of TextTable.toString(). * @param layout responsible for rendering the option list * @param optionSort determines in what order {@code Options} should be listed. Declared order if {@code null} * @param valueLabelRenderer used for options with a parameter * @return the fully formatted option list * @since 3.0 */ public String optionList(Layout layout, Comparator<OptionSpec> optionSort, IParamLabelRenderer valueLabelRenderer) { List<OptionSpec> options = new ArrayList<OptionSpec>(commandSpec.options()); // options are stored in order of declaration if (optionSort != null) { Collections.sort(options, optionSort); // default: sort options ABC } layout.addOptions(options, valueLabelRenderer); return layout.toString(); } /** * Returns the section of the usage help message that lists the parameters with their descriptions. * @return the section of the usage help message that lists the parameters */ public String parameterList() { return parameterList(createDefaultLayout(), parameterLabelRenderer()); } /** * Returns the section of the usage help message that lists the parameters with their descriptions. * @param layout the layout to use * @param paramLabelRenderer for rendering parameter names * @return the section of the usage help message that lists the parameters */ public String parameterList(Layout layout, IParamLabelRenderer paramLabelRenderer) { layout.addPositionalParameters(commandSpec.positionalParameters(), paramLabelRenderer); return layout.toString(); } private static String heading(Ansi ansi, String values, Object... params) { StringBuilder sb = join(ansi, new String[] {values}, new StringBuilder(), params); String result = sb.toString(); result = result.endsWith(System.getProperty("line.separator")) ? result.substring(0, result.length() - System.getProperty("line.separator").length()) : result; return result + new String(spaces(countTrailingSpaces(values))); } private static char[] spaces(int length) { char[] result = new char[length]; Arrays.fill(result, ' '); return result; } private static int countTrailingSpaces(String str) { if (str == null) {return 0;} int trailingSpaces = 0; for (int i = str.length() - 1; i >= 0 && str.charAt(i) == ' '; i--) { trailingSpaces++; } return trailingSpaces; } /** Formats each of the specified values and appends it to the specified StringBuilder. * @param ansi whether the result should contain ANSI escape codes or not * @param values the values to format and append to the StringBuilder * @param sb the StringBuilder to collect the formatted strings * @param params the parameters to pass to the format method when formatting each value * @return the specified StringBuilder */ public static StringBuilder join(Ansi ansi, String[] values, StringBuilder sb, Object... params) { if (values != null) { TextTable table = new TextTable(ansi, usageHelpWidth); table.indentWrappedLines = 0; for (String summaryLine : values) { Text[] lines = ansi.new Text(format(summaryLine, params)).splitLines(); for (Text line : lines) { table.addRowValues(line); } } table.toString(sb); } return sb; } private static String format(String formatString, Object... params) { return formatString == null ? "" : String.format(formatString, params); } /** Returns command custom synopsis as a string. A custom synopsis can be zero or more lines, and can be * specified declaratively with the {@link Command#customSynopsis()} annotation attribute or programmatically * by setting the Help instance's {@link Help#customSynopsis} field. * @param params Arguments referenced by the format specifiers in the synopsis strings * @return the custom synopsis lines combined into a single String (which may be empty) */ public String customSynopsis(Object... params) { return join(ansi(), commandSpec.customSynopsis(), new StringBuilder(), params).toString(); } /** Returns command description text as a string. Description text can be zero or more lines, and can be specified * declaratively with the {@link Command#description()} annotation attribute or programmatically by * setting the Help instance's {@link Help#description} field. * @param params Arguments referenced by the format specifiers in the description strings * @return the description lines combined into a single String (which may be empty) */ public String description(Object... params) { return join(ansi(), commandSpec.description(), new StringBuilder(), params).toString(); } /** Returns the command header text as a string. Header text can be zero or more lines, and can be specified * declaratively with the {@link Command#header()} annotation attribute or programmatically by * setting the Help instance's {@link Help#header} field. * @param params Arguments referenced by the format specifiers in the header strings * @return the header lines combined into a single String (which may be empty) */ public String header(Object... params) { return join(ansi(), commandSpec.header(), new StringBuilder(), params).toString(); } /** Returns command footer text as a string. Footer text can be zero or more lines, and can be specified * declaratively with the {@link Command#footer()} annotation attribute or programmatically by * setting the Help instance's {@link Help#footer} field. * @param params Arguments referenced by the format specifiers in the footer strings * @return the footer lines combined into a single String (which may be empty) */ public String footer(Object... params) { return join(ansi(), commandSpec.footer(), new StringBuilder(), params).toString(); } /** Returns the text displayed before the header text; the result of {@code String.format(headerHeading, params)}. * @param params the parameters to use to format the header heading * @return the formatted header heading */ public String headerHeading(Object... params) { return heading(ansi(), commandSpec.headerHeading(), params); } /** Returns the text displayed before the synopsis text; the result of {@code String.format(synopsisHeading, params)}. * @param params the parameters to use to format the synopsis heading * @return the formatted synopsis heading */ public String synopsisHeading(Object... params) { return heading(ansi(), commandSpec.synopsisHeading(), params); } /** Returns the text displayed before the description text; an empty string if there is no description, * otherwise the result of {@code String.format(descriptionHeading, params)}. * @param params the parameters to use to format the description heading * @return the formatted description heading */ public String descriptionHeading(Object... params) { return empty(commandSpec.descriptionHeading()) ? "" : heading(ansi(), commandSpec.descriptionHeading(), params); } /** Returns the text displayed before the positional parameter list; an empty string if there are no positional * parameters, otherwise the result of {@code String.format(parameterListHeading, params)}. * @param params the parameters to use to format the parameter list heading * @return the formatted parameter list heading */ public String parameterListHeading(Object... params) { return commandSpec.positionalParameters().isEmpty() ? "" : heading(ansi(), commandSpec.parameterListHeading(), params); } /** Returns the text displayed before the option list; an empty string if there are no options, * otherwise the result of {@code String.format(optionListHeading, params)}. * @param params the parameters to use to format the option list heading * @return the formatted option list heading */ public String optionListHeading(Object... params) { return commandSpec.optionsMap().isEmpty() ? "" : heading(ansi(), commandSpec.optionListHeading(), params); } /** Returns the text displayed before the command list; an empty string if there are no commands, * otherwise the result of {@code String.format(commandListHeading, params)}. * @param params the parameters to use to format the command list heading * @return the formatted command list heading */ public String commandListHeading(Object... params) { return commands.isEmpty() ? "" : heading(ansi(), commandSpec.commandListHeading(), params); } /** Returns the text displayed before the footer text; the result of {@code String.format(footerHeading, params)}. * @param params the parameters to use to format the footer heading * @return the formatted footer heading */ public String footerHeading(Object... params) { return heading(ansi(), commandSpec.footerHeading(), params); } /** Returns a 2-column list with command names and the first line of their header or (if absent) description. * @return a usage help section describing the added commands */ public String commandList() { if (commands.isEmpty()) { return ""; } int commandLength = maxLength(commands.keySet()); Help.TextTable textTable = new Help.TextTable(ansi(), new Help.Column(commandLength + 2, 2, Help.Column.Overflow.SPAN), new Help.Column(usageHelpWidth - (commandLength + 2), 2, Help.Column.Overflow.WRAP)); for (Map.Entry<String, Help> entry : commands.entrySet()) { Help help = entry.getValue(); CommandSpec command = help.commandSpec; String header = command.header() != null && command.header().length > 0 ? command.header()[0] : (command.description() != null && command.description().length > 0 ? command.description()[0] : ""); textTable.addRowValues(colorScheme.commandText(entry.getKey()), ansi().new Text(header)); } return textTable.toString(); } private static int maxLength(Collection<String> any) { List<String> strings = new ArrayList<String>(any); Collections.sort(strings, Collections.reverseOrder(Help.shortestFirst())); return strings.get(0).length(); } private static String join(String[] names, int offset, int length, String separator) { if (names == null) { return ""; } StringBuilder result = new StringBuilder(); for (int i = offset; i < offset + length; i++) { result.append((i > offset) ? separator : "").append(names[i]); } return result.toString(); } private static String stringOf(char chr, int length) { char[] buff = new char[length]; Arrays.fill(buff, chr); return new String(buff); } /** Returns a {@code Layout} instance configured with the user preferences captured in this Help instance. * @return a Layout */ public Layout createDefaultLayout() { return new Layout(colorScheme, new TextTable(colorScheme.ansi()), createDefaultOptionRenderer(), createDefaultParameterRenderer()); } /** Returns a new default OptionRenderer which converts {@link OptionSpec Options} to five columns of text to match * the default {@linkplain TextTable TextTable} column layout. The first row of values looks like this: * <ol> * <li>the required option marker</li> * <li>2-character short option name (or empty string if no short option exists)</li> * <li>comma separator (only if both short option and long option exist, empty string otherwise)</li> * <li>comma-separated string with long option name(s)</li> * <li>first element of the {@link OptionSpec#description()} array</li> * </ol> * <p>Following this, there will be one row for each of the remaining elements of the {@link * OptionSpec#description()} array, and these rows look like {@code {"", "", "", "", option.description()[i]}}.</p> * <p>If configured, this option renderer adds an additional row to display the default field value.</p> * @return a new default OptionRenderer */ public IOptionRenderer createDefaultOptionRenderer() { DefaultOptionRenderer result = new DefaultOptionRenderer(); result.requiredMarker = String.valueOf(commandSpec.requiredOptionMarker()); if (commandSpec.showDefaultValues()) { result.commandSpec = this.commandSpec; } return result; } /** Returns a new minimal OptionRenderer which converts {@link OptionSpec Options} to a single row with two columns * of text: an option name and a description. If multiple names or descriptions exist, the first value is used. * @return a new minimal OptionRenderer */ public static IOptionRenderer createMinimalOptionRenderer() { return new MinimalOptionRenderer(); } /** Returns a new default ParameterRenderer which converts {@linkplain PositionalParamSpec positional parameters} to four columns of * text to match the default {@linkplain TextTable TextTable} column layout. The first row of values looks like this: * <ol> * <li>empty string </li> * <li>empty string </li> * <li>parameter(s) label as rendered by the {@link IParamLabelRenderer}</li> * <li>first element of the {@link PositionalParamSpec#description()} array</li> * </ol> * <p>Following this, there will be one row for each of the remaining elements of the {@link * PositionalParamSpec#description()} array, and these rows look like {@code {"", "", "", param.description()[i]}}.</p> * <p>If configured, this parameter renderer adds an additional row to display the default field value.</p> * @return a new default ParameterRenderer */ public IParameterRenderer createDefaultParameterRenderer() { DefaultParameterRenderer result = new DefaultParameterRenderer(); result.requiredMarker = String.valueOf(commandSpec.requiredOptionMarker()); return result; } /** Returns a new minimal ParameterRenderer which converts {@linkplain PositionalParamSpec positional parameters} * to a single row with two columns of text: an option name and a description. If multiple descriptions exist, the first value is used. * @return a new minimal ParameterRenderer */ public static IParameterRenderer createMinimalParameterRenderer() { return new MinimalParameterRenderer(); } /** Returns a value renderer that returns the {@code paramLabel} if defined or the field name otherwise. * @return a new minimal ParamLabelRenderer */ public static IParamLabelRenderer createMinimalParamLabelRenderer() { return new IParamLabelRenderer() { public Text renderParameterLabel(ArgSpec<?> argSpec, Ansi ansi, List<IStyle> styles) { return ansi.apply(argSpec.paramLabel(), styles); } public String separator() { return ""; } }; } /** Returns a new default value renderer that separates option parameters from their option name * with the specified separator string, surrounds optional parameters with {@code '['} and {@code ']'} * characters and uses ellipses ("...") to indicate that any number of a parameter are allowed. * @return a new default ParamLabelRenderer */ public IParamLabelRenderer createDefaultParamLabelRenderer() { return new DefaultParamLabelRenderer(commandSpec); } /** Sorts {@link OptionSpec OptionSpecs} by their option name in case-insensitive alphabetic order. If an * option has multiple names, the shortest name is used for the sorting. Help options follow non-help options. * @return a comparator that sorts OptionSpecs by their option name in case-insensitive alphabetic order */ public static Comparator<OptionSpec> createShortOptionNameComparator() { return new SortByShortestOptionNameAlphabetically(); } /** Sorts {@link OptionSpec OptionSpecs} by their option {@linkplain Range#max max arity} first, by * {@linkplain Range#min min arity} next, and by {@linkplain #createShortOptionNameComparator() option name} last. * @return a comparator that sorts OptionSpecs by arity first, then their option name */ public static Comparator<OptionSpec> createShortOptionArityAndNameComparator() { return new SortByOptionArityAndNameAlphabetically(); } /** Sorts short strings before longer strings. * @return a comparators that sorts short strings before longer strings */ public static Comparator<String> shortestFirst() { return new ShortestFirst(); } /** Returns whether ANSI escape codes are enabled or not. * @return whether ANSI escape codes are enabled or not */ public Ansi ansi() { return colorScheme.ansi; } /** Controls the visibility of certain aspects of the usage help message. */ public enum Visibility { ALWAYS, NEVER, ON_DEMAND } /** When customizing online help for {@link OptionSpec Option} details, a custom {@code IOptionRenderer} can be * used to create textual representation of an Option in a tabular format: one or more rows, each containing * one or more columns. The {@link Layout Layout} is responsible for placing these text values in the * {@link TextTable TextTable}. */ public interface IOptionRenderer { /** * Returns a text representation of the specified option and its parameter(s) if any. * @param option the command line option to show online usage help for * @param parameterLabelRenderer responsible for rendering option parameters to text * @param scheme color scheme for applying ansi color styles to options and option parameters * @return a 2-dimensional array of text values: one or more rows, each containing one or more columns * @since 3.0 */ Text[][] render(OptionSpec option, IParamLabelRenderer parameterLabelRenderer, ColorScheme scheme); } /** The DefaultOptionRenderer converts {@link OptionSpec Options} to five columns of text to match the default * {@linkplain TextTable TextTable} column layout. The first row of values looks like this: * <ol> * <li>the required option marker (if the option is required)</li> * <li>2-character short option name (or empty string if no short option exists)</li> * <li>comma separator (only if both short option and long option exist, empty string otherwise)</li> * <li>comma-separated string with long option name(s)</li> * <li>first element of the {@link OptionSpec#description()} array</li> * </ol> * <p>Following this, there will be one row for each of the remaining elements of the {@link * OptionSpec#description()} array, and these rows look like {@code {"", "", "", option.description()[i]}}.</p> */ static class DefaultOptionRenderer implements IOptionRenderer { public String requiredMarker = " "; public CommandSpec commandSpec; private String sep; private boolean showDefault; public Text[][] render(OptionSpec option, IParamLabelRenderer paramLabelRenderer, ColorScheme scheme) { String[] names = ShortestFirst.sort(option.names()); int shortOptionCount = names[0].length() == 2 ? 1 : 0; String shortOption = shortOptionCount > 0 ? names[0] : ""; sep = shortOptionCount > 0 && names.length > 1 ? "," : ""; String longOption = join(names, shortOptionCount, names.length - shortOptionCount, ", "); Text longOptionText = createLongOptionText(option, paramLabelRenderer, scheme, longOption); showDefault = option.showDefaultValue(commandSpec); String requiredOption = option.required() ? requiredMarker : ""; return renderDescriptionLines(option, scheme, requiredOption, shortOption, longOptionText, option.defaultValue()); } private Text createLongOptionText(OptionSpec option, IParamLabelRenderer renderer, ColorScheme scheme, String longOption) { Text paramLabelText = renderer.renderParameterLabel(option, scheme.ansi(), scheme.optionParamStyles); // if no long option, fill in the space between the short option name and the param label value if (paramLabelText.length > 0 && longOption.length() == 0) { sep = renderer.separator(); // #181 paramLabelText may be =LABEL or [=LABEL...] int sepStart = paramLabelText.plainString().indexOf(sep); Text prefix = paramLabelText.substring(0, sepStart); paramLabelText = prefix.append(paramLabelText.substring(sepStart + sep.length())); } Text longOptionText = scheme.optionText(longOption); longOptionText = longOptionText.append(paramLabelText); return longOptionText; } private Text[][] renderDescriptionLines(OptionSpec option, ColorScheme scheme, String requiredOption, String shortOption, Text longOptionText, Object defaultValue) { Text EMPTY = Ansi.EMPTY_TEXT; List<Text[]> result = new ArrayList<Text[]>(); Text[] descriptionFirstLines = scheme.ansi().new Text(str(option.description(), 0)).splitLines(); if (descriptionFirstLines.length == 0 || (descriptionFirstLines.length == 1 && descriptionFirstLines[0].plain.length() == 0)) { if (showDefault) { descriptionFirstLines = new Text[]{scheme.ansi().new Text(" Default: " + defaultValue)}; showDefault = false; // don't show the default value twice } else { descriptionFirstLines = new Text[]{ EMPTY }; } } result.add(new Text[] { scheme.optionText(requiredOption), scheme.optionText(shortOption), scheme.ansi().new Text(sep), longOptionText, descriptionFirstLines[0] }); for (int i = 1; i < descriptionFirstLines.length; i++) { result.add(new Text[] { EMPTY, EMPTY, EMPTY, EMPTY, descriptionFirstLines[i] }); } for (int i = 1; i < option.description().length; i++) { Text[] descriptionNextLines = scheme.ansi().new Text(option.description()[i]).splitLines(); for (Text line : descriptionNextLines) { result.add(new Text[] { EMPTY, EMPTY, EMPTY, EMPTY, line }); } } if (showDefault) { result.add(new Text[] { EMPTY, EMPTY, EMPTY, EMPTY, scheme.ansi().new Text(" Default: " + defaultValue) }); } return result.toArray(new Text[result.size()][]); } } /** The MinimalOptionRenderer converts {@link OptionSpec Options} to a single row with two columns of text: an * option name and a description. If multiple names or description lines exist, the first value is used. */ static class MinimalOptionRenderer implements IOptionRenderer { public Text[][] render(OptionSpec option, IParamLabelRenderer parameterLabelRenderer, ColorScheme scheme) { Text optionText = scheme.optionText(option.names()[0]); Text paramLabelText = parameterLabelRenderer.renderParameterLabel(option, scheme.ansi(), scheme.optionParamStyles); optionText = optionText.append(paramLabelText); return new Text[][] {{ optionText, scheme.ansi().new Text(option.description().length == 0 ? "" : option.description()[0]) }}; } } /** The MinimalParameterRenderer converts {@linkplain PositionalParamSpec positional parameters} to a single row with two columns of * text: the parameters label and a description. If multiple description lines exist, the first value is used. */ static class MinimalParameterRenderer implements IParameterRenderer { public Text[][] render(PositionalParamSpec param, IParamLabelRenderer parameterLabelRenderer, ColorScheme scheme) { return new Text[][] {{ parameterLabelRenderer.renderParameterLabel(param, scheme.ansi(), scheme.parameterStyles), scheme.ansi().new Text(param.description().length == 0 ? "" : param.description()[0]) }}; } } /** When customizing online help for {@linkplain PositionalParamSpec positional parameters} details, a custom {@code IParameterRenderer} * can be used to create textual representation of a Parameters field in a tabular format: one or more rows, * each containing one or more columns. The {@link Layout Layout} is responsible for placing these text * values in the {@link TextTable TextTable}. */ public interface IParameterRenderer { /** * Returns a text representation of the specified positional parameter. * @param param the positional parameter to show online usage help for * @param parameterLabelRenderer responsible for rendering parameter labels to text * @param scheme color scheme for applying ansi color styles to positional parameters * @return a 2-dimensional array of text values: one or more rows, each containing one or more columns * @since 3.0 */ Text[][] render(PositionalParamSpec param, IParamLabelRenderer parameterLabelRenderer, ColorScheme scheme); } /** The DefaultParameterRenderer converts {@linkplain PositionalParamSpec positional parameters} to five columns of text to match the * default {@linkplain TextTable TextTable} column layout. The first row of values looks like this: * <ol> * <li>the required option marker (if the parameter's arity is to have at least one value)</li> * <li>empty string </li> * <li>empty string </li> * <li>parameter(s) label as rendered by the {@link IParamLabelRenderer}</li> * <li>first element of the {@link PositionalParamSpec#description()} array</li> * </ol> * <p>Following this, there will be one row for each of the remaining elements of the {@link * PositionalParamSpec#description()} array, and these rows look like {@code {"", "", "", param.description()[i]}}.</p> */ static class DefaultParameterRenderer implements IParameterRenderer { public String requiredMarker = " "; public Text[][] render(PositionalParamSpec param, IParamLabelRenderer paramLabelRenderer, ColorScheme scheme) { Text label = paramLabelRenderer.renderParameterLabel(param, scheme.ansi(), scheme.parameterStyles); Text requiredParameter = scheme.parameterText(param.arity().min > 0 ? requiredMarker : ""); Text EMPTY = Ansi.EMPTY_TEXT; List<Text[]> result = new ArrayList<Text[]>(); Text[] descriptionFirstLines = scheme.ansi().new Text(str(param.description(), 0)).splitLines(); if (descriptionFirstLines.length == 0) { descriptionFirstLines = new Text[]{ EMPTY }; } result.add(new Text[] { requiredParameter, EMPTY, EMPTY, label, descriptionFirstLines[0] }); for (int i = 1; i < descriptionFirstLines.length; i++) { result.add(new Text[] { EMPTY, EMPTY, EMPTY, EMPTY, descriptionFirstLines[i] }); } for (int i = 1; i < param.description().length; i++) { Text[] descriptionNextLines = scheme.ansi().new Text(param.description()[i]).splitLines(); for (Text line : descriptionNextLines) { result.add(new Text[] { EMPTY, EMPTY, EMPTY, EMPTY, line }); } } return result.toArray(new Text[result.size()][]); } } /** When customizing online usage help for an option parameter or a positional parameter, a custom * {@code IParamLabelRenderer} can be used to render the parameter name or label to a String. */ public interface IParamLabelRenderer { /** Returns a text rendering of the option parameter or positional parameter; returns an empty string * {@code ""} if the option is a boolean and does not take a parameter. * @param argSpec the named or positional parameter with a parameter label * @param ansi determines whether ANSI escape codes should be emitted or not * @param styles the styles to apply to the parameter label * @return a text rendering of the Option parameter or positional parameter * @since 3.0 */ Text renderParameterLabel(ArgSpec<?> argSpec, Ansi ansi, List<IStyle> styles); /** Returns the separator between option name and param label. * @return the separator between option name and param label */ String separator(); } /** * DefaultParamLabelRenderer separates option parameters from their {@linkplain OptionSpec option names} with a * {@linkplain CommandLine.CommandSpec#separator() separator} string, surrounds optional values * with {@code '['} and {@code ']'} characters and uses ellipses ("...") to indicate that any number of * values is allowed for options or parameters with variable arity. */ static class DefaultParamLabelRenderer implements IParamLabelRenderer { private final CommandSpec commandSpec; /** Constructs a new DefaultParamLabelRenderer with the specified separator string. */ public DefaultParamLabelRenderer(CommandSpec commandSpec) { this.commandSpec = Assert.notNull(commandSpec, "commandSpec"); } public String separator() { return commandSpec.separator(); } public Text renderParameterLabel(ArgSpec<?> argSpec, Ansi ansi, List<IStyle> styles) { Text result = ansi.new Text(""); String sep = argSpec.isOption() ? separator() : ""; Text paramName = ansi.apply(argSpec.paramLabel(), styles); if (!empty(argSpec.splitRegex())) { paramName = paramName.append("[" + argSpec.splitRegex()).append(paramName).append("]..."); } // #194 Range capacity = argSpec.isOption() ? argSpec.arity() : ((PositionalParamSpec)argSpec).capacity(); for (int i = 0; i < capacity.min; i++) { result = result.append(sep).append(paramName); sep = " "; } if (capacity.isVariable) { if (result.length == 0) { // arity="*" or arity="0..*" result = result.append(sep + "[").append(paramName).append("]..."); } else if (!result.plainString().endsWith("...")) { // getSplitRegex param may already end with "..." result = result.append("..."); } } else { sep = result.length == 0 ? (argSpec.isOption() ? separator() : "") : " "; for (int i = capacity.min; i < capacity.max; i++) { if (sep.trim().length() == 0) { result = result.append(sep + "[").append(paramName); } else { result = result.append("[" + sep).append(paramName); } sep = " "; } for (int i = capacity.min; i < capacity.max; i++) { result = result.append("]"); } } return result; } } /** Use a Layout to format usage help text for options and parameters in tabular format. * <p>Delegates to the renderers to create {@link Text} values for the annotated fields, and uses a * {@link TextTable} to display these values in tabular format. Layout is responsible for deciding which values * to display where in the table. By default, Layout shows one option or parameter per table row.</p> * <p>Customize by overriding the {@link #layout(CommandLine.ArgSpec, CommandLine.Help.Ansi.Text[][])} method.</p> * @see IOptionRenderer rendering options to text * @see IParameterRenderer rendering parameters to text * @see TextTable showing values in a tabular format */ public static class Layout { protected final ColorScheme colorScheme; protected final TextTable table; protected IOptionRenderer optionRenderer; protected IParameterRenderer parameterRenderer; /** Constructs a Layout with the specified color scheme, a new default TextTable, the * {@linkplain Help#createDefaultOptionRenderer() default option renderer}, and the * {@linkplain Help#createDefaultParameterRenderer() default parameter renderer}. * @param colorScheme the color scheme to use for common, auto-generated parts of the usage help message */ public Layout(ColorScheme colorScheme) { this(colorScheme, new TextTable(colorScheme.ansi())); } /** Constructs a Layout with the specified color scheme, the specified TextTable, the * {@linkplain Help#createDefaultOptionRenderer() default option renderer}, and the * {@linkplain Help#createDefaultParameterRenderer() default parameter renderer}. * @param colorScheme the color scheme to use for common, auto-generated parts of the usage help message * @param textTable the TextTable to lay out parts of the usage help message in tabular format */ public Layout(ColorScheme colorScheme, TextTable textTable) { this(colorScheme, textTable, new DefaultOptionRenderer(), new DefaultParameterRenderer()); } /** Constructs a Layout with the specified color scheme, the specified TextTable, the * specified option renderer and the specified parameter renderer. * @param colorScheme the color scheme to use for common, auto-generated parts of the usage help message * @param optionRenderer the object responsible for rendering Options to Text * @param parameterRenderer the object responsible for rendering Parameters to Text * @param textTable the TextTable to lay out parts of the usage help message in tabular format */ public Layout(ColorScheme colorScheme, TextTable textTable, IOptionRenderer optionRenderer, IParameterRenderer parameterRenderer) { this.colorScheme = Assert.notNull(colorScheme, "colorScheme"); this.table = Assert.notNull(textTable, "textTable"); this.optionRenderer = Assert.notNull(optionRenderer, "optionRenderer"); this.parameterRenderer = Assert.notNull(parameterRenderer, "parameterRenderer"); } /** * Copies the specified text values into the correct cells in the {@link TextTable}. This implementation * delegates to {@link TextTable#addRowValues(CommandLine.Help.Ansi.Text...)} for each row of values. * <p>Subclasses may override.</p> * @param argSpec the Option or Parameters * @param cellValues the text values representing the Option/Parameters, to be displayed in tabular form * @since 3.0 */ public void layout(ArgSpec<?> argSpec, Text[][] cellValues) { for (Text[] oneRow : cellValues) { table.addRowValues(oneRow); } } /** Calls {@link #addOption(CommandLine.OptionSpec, CommandLine.Help.IParamLabelRenderer)} for all non-hidden Options in the list. * @param options options to add usage descriptions for * @param paramLabelRenderer object that knows how to render option parameters * @since 3.0 */ public void addOptions(List<OptionSpec> options, IParamLabelRenderer paramLabelRenderer) { for (OptionSpec option : options) { if (!option.hidden()) { addOption(option, paramLabelRenderer); } } } /** * Delegates to the {@link #optionRenderer option renderer} of this layout to obtain * text values for the specified {@link OptionSpec}, and then calls the {@link #layout(CommandLine.ArgSpec, CommandLine.Help.Ansi.Text[][])} * method to write these text values into the correct cells in the TextTable. * @param option the option argument * @param paramLabelRenderer knows how to render option parameters * @since 3.0 */ public void addOption(OptionSpec option, IParamLabelRenderer paramLabelRenderer) { Text[][] values = optionRenderer.render(option, paramLabelRenderer, colorScheme); layout(option, values); } /** Calls {@link #addPositionalParameter(CommandLine.PositionalParamSpec, CommandLine.Help.IParamLabelRenderer)} for all non-hidden Parameters in the list. * @param params positional parameters to add usage descriptions for * @param paramLabelRenderer knows how to render option parameters * @since 3.0 */ public void addPositionalParameters(List<PositionalParamSpec> params, IParamLabelRenderer paramLabelRenderer) { for (PositionalParamSpec param : params) { if (!param.hidden()) { addPositionalParameter(param, paramLabelRenderer); } } } /** * Delegates to the {@link #parameterRenderer parameter renderer} of this layout * to obtain text values for the specified {@linkplain PositionalParamSpec positional parameter}, and then calls * {@link #layout(CommandLine.ArgSpec, CommandLine.Help.Ansi.Text[][])} to write these text values into the correct cells in the TextTable. * @param param the positional parameter * @param paramLabelRenderer knows how to render option parameters * @since 3.0 */ public void addPositionalParameter(PositionalParamSpec param, IParamLabelRenderer paramLabelRenderer) { Text[][] values = parameterRenderer.render(param, paramLabelRenderer, colorScheme); layout(param, values); } /** Returns the section of the usage help message accumulated in the TextTable owned by this layout. */ @Override public String toString() { return table.toString(); } } /** Sorts short strings before longer strings. */ static class ShortestFirst implements Comparator<String> { public int compare(String o1, String o2) { return o1.length() - o2.length(); } /** Sorts the specified array of Strings shortest-first and returns it. */ public static String[] sort(String[] names) { Arrays.sort(names, new ShortestFirst()); return names; } } /** Sorts {@code OptionSpec} instances by their name in case-insensitive alphabetic order. If an option has * multiple names, the shortest name is used for the sorting. Help options follow non-help options. */ static class SortByShortestOptionNameAlphabetically implements Comparator<OptionSpec> { public int compare(OptionSpec o1, OptionSpec o2) { if (o1 == null) { return 1; } else if (o2 == null) { return -1; } // options before params String[] names1 = ShortestFirst.sort(o1.names()); String[] names2 = ShortestFirst.sort(o2.names()); int result = names1[0].toUpperCase().compareTo(names2[0].toUpperCase()); // case insensitive sort result = result == 0 ? -names1[0].compareTo(names2[0]) : result; // lower case before upper case return o1.help() == o2.help() ? result : o2.help() ? -1 : 1; // help options come last } } /** Sorts {@code OptionSpec} instances by their max arity first, then their min arity, then delegates to super class. */ static class SortByOptionArityAndNameAlphabetically extends SortByShortestOptionNameAlphabetically { public int compare(OptionSpec o1, OptionSpec o2) { Range arity1 = o1.arity(); Range arity2 = o2.arity(); int result = arity1.max - arity2.max; if (result == 0) { result = arity1.min - arity2.min; } if (result == 0) { // arity is same if (o1.isMultiValue() && !o2.isMultiValue()) { result = 1; } // f1 > f2 if (!o1.isMultiValue() && o2.isMultiValue()) { result = -1; } // f1 < f2 } return result == 0 ? super.compare(o1, o2) : result; } } /** * <p>Responsible for spacing out {@link Text} values according to the {@link Column} definitions the table was * created with. Columns have a width, indentation, and an overflow policy that decides what to do if a value is * longer than the column's width.</p> */ public static class TextTable { /** * Helper class to index positions in a {@code Help.TextTable}. * @since 2.0 */ public static class Cell { /** Table column index (zero based). */ public final int column; /** Table row index (zero based). */ public final int row; /** Constructs a new Cell with the specified coordinates in the table. * @param column the zero-based table column * @param row the zero-based table row */ public Cell(int column, int row) { this.column = column; this.row = row; } } /** The column definitions of this table. */ public final Column[] columns; /** The {@code char[]} slots of the {@code TextTable} to copy text values into. */ protected final List<Text> columnValues = new ArrayList<Text>(); /** By default, indent wrapped lines by 2 spaces. */ public int indentWrappedLines = 2; private final Ansi ansi; /** Constructs a TextTable with five columns as follows: * <ol> * <li>required option/parameter marker (width: 2, indent: 0, TRUNCATE on overflow)</li> * <li>short option name (width: 2, indent: 0, TRUNCATE on overflow)</li> * <li>comma separator (width: 1, indent: 0, TRUNCATE on overflow)</li> * <li>long option name(s) (width: 24, indent: 1, SPAN multiple columns on overflow)</li> * <li>description line(s) (width: 51, indent: 1, WRAP to next row on overflow)</li> * </ol> * @param ansi whether to emit ANSI escape codes or not */ public TextTable(Ansi ansi) { // "* -c, --create Creates a ...." this(ansi, new Column[] { new Column(2, 0, TRUNCATE), // "*" new Column(2, 0, TRUNCATE), // "-c" new Column(1, 0, TRUNCATE), // "," new Column(optionsColumnWidth - 2 - 2 - 1 , 1, SPAN), // " --create" new Column(usageHelpWidth - optionsColumnWidth, 1, WRAP) // " Creates a ..." }); } /** Constructs a new TextTable with columns with the specified width, all SPANning multiple columns on * overflow except the last column which WRAPS to the next row. * @param ansi whether to emit ANSI escape codes or not * @param columnWidths the width of the table columns (all columns have zero indent) */ public TextTable(Ansi ansi, int... columnWidths) { this.ansi = Assert.notNull(ansi, "ansi"); columns = new Column[columnWidths.length]; for (int i = 0; i < columnWidths.length; i++) { columns[i] = new Column(columnWidths[i], 0, i == columnWidths.length - 1 ? SPAN: WRAP); } } /** Constructs a {@code TextTable} with the specified columns. * @param ansi whether to emit ANSI escape codes or not * @param columns columns to construct this TextTable with */ public TextTable(Ansi ansi, Column... columns) { this.ansi = Assert.notNull(ansi, "ansi"); this.columns = Assert.notNull(columns, "columns"); if (columns.length == 0) { throw new IllegalArgumentException("At least one column is required"); } } /** Returns the {@code Text} slot at the specified row and column to write a text value into. * @param row the row of the cell whose Text to return * @param col the column of the cell whose Text to return * @return the Text object at the specified row and column * @since 2.0 */ public Text textAt(int row, int col) { return columnValues.get(col + (row * columns.length)); } /** Returns the {@code Text} slot at the specified row and column to write a text value into. * @param row the row of the cell whose Text to return * @param col the column of the cell whose Text to return * @return the Text object at the specified row and column * @deprecated use {@link #textAt(int, int)} instead */ @Deprecated public Text cellAt(int row, int col) { return textAt(row, col); } /** Returns the current number of rows of this {@code TextTable}. * @return the current number of rows in this TextTable */ public int rowCount() { return columnValues.size() / columns.length; } /** Adds the required {@code char[]} slots for a new row to the {@link #columnValues} field. */ public void addEmptyRow() { for (int i = 0; i < columns.length; i++) { columnValues.add(ansi.new Text(columns[i].width)); } } /** Delegates to {@link #addRowValues(CommandLine.Help.Ansi.Text...)}. * @param values the text values to display in each column of the current row */ public void addRowValues(String... values) { Text[] array = new Text[values.length]; for (int i = 0; i < array.length; i++) { array[i] = values[i] == null ? Ansi.EMPTY_TEXT : ansi.new Text(values[i]); } addRowValues(array); } /** * Adds a new {@linkplain TextTable#addEmptyRow() empty row}, then calls {@link * TextTable#putValue(int, int, CommandLine.Help.Ansi.Text) putValue} for each of the specified values, adding more empty rows * if the return value indicates that the value spanned multiple columns or was wrapped to multiple rows. * @param values the values to write into a new row in this TextTable * @throws IllegalArgumentException if the number of values exceeds the number of Columns in this table */ public void addRowValues(Text... values) { if (values.length > columns.length) { throw new IllegalArgumentException(values.length + " values don't fit in " + columns.length + " columns"); } addEmptyRow(); for (int col = 0; col < values.length; col++) { int row = rowCount() - 1;// write to last row: previous value may have wrapped to next row Cell cell = putValue(row, col, values[col]); // add row if a value spanned/wrapped and there are still remaining values if ((cell.row != row || cell.column != col) && col != values.length - 1) { addEmptyRow(); } } } /** * Writes the specified value into the cell at the specified row and column and returns the last row and * column written to. Depending on the Column's {@link Column#overflow Overflow} policy, the value may span * multiple columns or wrap to multiple rows when larger than the column width. * @param row the target row in the table * @param col the target column in the table to write to * @param value the value to write * @return a Cell indicating the position in the table that was last written to (since 2.0) * @throws IllegalArgumentException if the specified row exceeds the table's {@linkplain * TextTable#rowCount() row count} * @since 2.0 (previous versions returned a {@code java.awt.Point} object) */ public Cell putValue(int row, int col, Text value) { if (row > rowCount() - 1) { throw new IllegalArgumentException("Cannot write to row " + row + ": rowCount=" + rowCount()); } if (value == null || value.plain.length() == 0) { return new Cell(col, row); } Column column = columns[col]; int indent = column.indent; switch (column.overflow) { case TRUNCATE: copy(value, textAt(row, col), indent); return new Cell(col, row); case SPAN: int startColumn = col; do { boolean lastColumn = col == columns.length - 1; int charsWritten = lastColumn ? copy(BreakIterator.getLineInstance(), value, textAt(row, col), indent) : copy(value, textAt(row, col), indent); value = value.substring(charsWritten); indent = 0; if (value.length > 0) { // value did not fit in column ++col; // write remainder of value in next column } if (value.length > 0 && col >= columns.length) { // we filled up all columns on this row addEmptyRow(); row++; col = startColumn; indent = column.indent + indentWrappedLines; } } while (value.length > 0); return new Cell(col, row); case WRAP: BreakIterator lineBreakIterator = BreakIterator.getLineInstance(); do { int charsWritten = copy(lineBreakIterator, value, textAt(row, col), indent); value = value.substring(charsWritten); indent = column.indent + indentWrappedLines; if (value.length > 0) { // value did not fit in column ++row; // write remainder of value in next row addEmptyRow(); } } while (value.length > 0); return new Cell(col, row); } throw new IllegalStateException(column.overflow.toString()); } private static int length(Text str) { return str.length; // TODO count some characters as double length } private int copy(BreakIterator line, Text text, Text columnValue, int offset) { // Deceive the BreakIterator to ensure no line breaks after '-' character line.setText(text.plainString().replace("-", "\u00ff")); int done = 0; for (int start = line.first(), end = line.next(); end != BreakIterator.DONE; start = end, end = line.next()) { Text word = text.substring(start, end); //.replace("\u00ff", "-"); // not needed if (columnValue.maxLength >= offset + done + length(word)) { done += copy(word, columnValue, offset + done); // TODO localized length } else { break; } } if (done == 0 && length(text) > columnValue.maxLength) { // The value is a single word that is too big to be written to the column. Write as much as we can. done = copy(text, columnValue, offset); } return done; } private static int copy(Text value, Text destination, int offset) { int length = Math.min(value.length, destination.maxLength - offset); value.getStyledChars(value.from, length, destination, offset); return length; } /** Copies the text representation that we built up from the options into the specified StringBuilder. * @param text the StringBuilder to write into * @return the specified StringBuilder object (to allow method chaining and a more fluid API) */ public StringBuilder toString(StringBuilder text) { int columnCount = this.columns.length; StringBuilder row = new StringBuilder(usageHelpWidth); for (int i = 0; i < columnValues.size(); i++) { Text column = columnValues.get(i); row.append(column.toString()); row.append(new String(spaces(columns[i % columnCount].width - column.length))); if (i % columnCount == columnCount - 1) { int lastChar = row.length() - 1; while (lastChar >= 0 && row.charAt(lastChar) == ' ') {lastChar--;} // rtrim row.setLength(lastChar + 1); text.append(row.toString()).append(System.getProperty("line.separator")); row.setLength(0); } } //if (Ansi.enabled()) { text.append(Style.reset.off()); } return text; } public String toString() { return toString(new StringBuilder()).toString(); } } /** Columns define the width, indent (leading number of spaces in a column before the value) and * {@linkplain Overflow Overflow} policy of a column in a {@linkplain TextTable TextTable}. */ public static class Column { /** Policy for handling text that is longer than the column width: * span multiple columns, wrap to the next row, or simply truncate the portion that doesn't fit. */ public enum Overflow { TRUNCATE, SPAN, WRAP } /** Column width in characters */ public final int width; /** Indent (number of empty spaces at the start of the column preceding the text value) */ public final int indent; /** Policy that determines how to handle values larger than the column width. */ public final Overflow overflow; public Column(int width, int indent, Overflow overflow) { this.width = width; this.indent = indent; this.overflow = Assert.notNull(overflow, "overflow"); } } /** All usage help message are generated with a color scheme that assigns certain styles and colors to common * parts of a usage message: the command name, options, positional parameters and option parameters. * Users may customize these styles by creating Help with a custom color scheme. * <p>Note that these options and styles may not be rendered if ANSI escape codes are not * {@linkplain Ansi#enabled() enabled}.</p> * @see Help#defaultColorScheme(Ansi) */ public static class ColorScheme { public final List<IStyle> commandStyles = new ArrayList<IStyle>(); public final List<IStyle> optionStyles = new ArrayList<IStyle>(); public final List<IStyle> parameterStyles = new ArrayList<IStyle>(); public final List<IStyle> optionParamStyles = new ArrayList<IStyle>(); private final Ansi ansi; /** Constructs a new ColorScheme with {@link Help.Ansi#AUTO}. */ public ColorScheme() { this(Ansi.AUTO); } /** Constructs a new ColorScheme with the specified Ansi enabled mode. * @param ansi whether to emit ANSI escape codes or not */ public ColorScheme(Ansi ansi) {this.ansi = Assert.notNull(ansi, "ansi"); } /** Adds the specified styles to the registered styles for commands in this color scheme and returns this color scheme. * @param styles the styles to add to the registered styles for commands in this color scheme * @return this color scheme to enable method chaining for a more fluent API */ public ColorScheme commands(IStyle... styles) { return addAll(commandStyles, styles); } /** Adds the specified styles to the registered styles for options in this color scheme and returns this color scheme. * @param styles the styles to add to registered the styles for options in this color scheme * @return this color scheme to enable method chaining for a more fluent API */ public ColorScheme options(IStyle... styles) { return addAll(optionStyles, styles);} /** Adds the specified styles to the registered styles for positional parameters in this color scheme and returns this color scheme. * @param styles the styles to add to registered the styles for parameters in this color scheme * @return this color scheme to enable method chaining for a more fluent API */ public ColorScheme parameters(IStyle... styles) { return addAll(parameterStyles, styles);} /** Adds the specified styles to the registered styles for option parameters in this color scheme and returns this color scheme. * @param styles the styles to add to the registered styles for option parameters in this color scheme * @return this color scheme to enable method chaining for a more fluent API */ public ColorScheme optionParams(IStyle... styles) { return addAll(optionParamStyles, styles);} /** Returns a Text with all command styles applied to the specified command string. * @param command the command string to apply the registered command styles to * @return a Text with all command styles applied to the specified command string */ public Ansi.Text commandText(String command) { return ansi().apply(command, commandStyles); } /** Returns a Text with all option styles applied to the specified option string. * @param option the option string to apply the registered option styles to * @return a Text with all option styles applied to the specified option string */ public Ansi.Text optionText(String option) { return ansi().apply(option, optionStyles); } /** Returns a Text with all parameter styles applied to the specified parameter string. * @param parameter the parameter string to apply the registered parameter styles to * @return a Text with all parameter styles applied to the specified parameter string */ public Ansi.Text parameterText(String parameter) { return ansi().apply(parameter, parameterStyles); } /** Returns a Text with all optionParam styles applied to the specified optionParam string. * @param optionParam the option parameter string to apply the registered option parameter styles to * @return a Text with all option parameter styles applied to the specified option parameter string */ public Ansi.Text optionParamText(String optionParam) { return ansi().apply(optionParam, optionParamStyles); } /** Replaces colors and styles in this scheme with ones specified in system properties, and returns this scheme. * Supported property names:<ul> * <li>{@code picocli.color.commands}</li> * <li>{@code picocli.color.options}</li> * <li>{@code picocli.color.parameters}</li> * <li>{@code picocli.color.optionParams}</li> * </ul><p>Property values can be anything that {@link Help.Ansi.Style#parse(String)} can handle.</p> * @return this ColorScheme */ public ColorScheme applySystemProperties() { replace(commandStyles, System.getProperty("picocli.color.commands")); replace(optionStyles, System.getProperty("picocli.color.options")); replace(parameterStyles, System.getProperty("picocli.color.parameters")); replace(optionParamStyles, System.getProperty("picocli.color.optionParams")); return this; } private void replace(List<IStyle> styles, String property) { if (property != null) { styles.clear(); addAll(styles, Style.parse(property)); } } private ColorScheme addAll(List<IStyle> styles, IStyle... add) { styles.addAll(Arrays.asList(add)); return this; } public Ansi ansi() { return ansi; } } /** Creates and returns a new {@link ColorScheme} initialized with picocli default values: commands are bold, * options and parameters use a yellow foreground, and option parameters use italic. * @param ansi whether the usage help message should contain ANSI escape codes or not * @return a new default color scheme */ public static ColorScheme defaultColorScheme(Ansi ansi) { return new ColorScheme(ansi) .commands(Style.bold) .options(Style.fg_yellow) .parameters(Style.fg_yellow) .optionParams(Style.italic); } /** Provides methods and inner classes to support using ANSI escape codes in usage help messages. */ public enum Ansi { /** Only emit ANSI escape codes if the platform supports it and system property {@code "picocli.ansi"} * is not set to any value other than {@code "true"} (case insensitive). */ AUTO, /** Forced ON: always emit ANSI escape code regardless of the platform. */ ON, /** Forced OFF: never emit ANSI escape code regardless of the platform. */ OFF; static Text EMPTY_TEXT = OFF.new Text(0); static final boolean isWindows = System.getProperty("os.name").startsWith("Windows"); static final boolean isXterm = System.getenv("TERM") != null && System.getenv("TERM").startsWith("xterm"); static final boolean ISATTY = calcTTY(); // http://stackoverflow.com/questions/1403772/how-can-i-check-if-a-java-programs-input-output-streams-are-connected-to-a-term static final boolean calcTTY() { if (isWindows && isXterm) { return true; } // Cygwin uses pseudo-tty and console is always null... try { return System.class.getDeclaredMethod("console").invoke(null) != null; } catch (Throwable reflectionFailed) { return true; } } private static boolean ansiPossible() { return ISATTY && (!isWindows || isXterm); } /** Returns {@code true} if ANSI escape codes should be emitted, {@code false} otherwise. * @return ON: {@code true}, OFF: {@code false}, AUTO: if system property {@code "picocli.ansi"} is * defined then return its boolean value, otherwise return whether the platform supports ANSI escape codes */ public boolean enabled() { if (this == ON) { return true; } if (this == OFF) { return false; } return (System.getProperty("picocli.ansi") == null ? ansiPossible() : Boolean.getBoolean("picocli.ansi")); } /** Defines the interface for an ANSI escape sequence. */ public interface IStyle { /** The Control Sequence Introducer (CSI) escape sequence {@value}. */ String CSI = "\u001B["; /** Returns the ANSI escape code for turning this style on. * @return the ANSI escape code for turning this style on */ String on(); /** Returns the ANSI escape code for turning this style off. * @return the ANSI escape code for turning this style off */ String off(); } /** * A set of pre-defined ANSI escape code styles and colors, and a set of convenience methods for parsing * text with embedded markup style names, as well as convenience methods for converting * styles to strings with embedded escape codes. */ public enum Style implements IStyle { reset(0, 0), bold(1, 21), faint(2, 22), italic(3, 23), underline(4, 24), blink(5, 25), reverse(7, 27), fg_black(30, 39), fg_red(31, 39), fg_green(32, 39), fg_yellow(33, 39), fg_blue(34, 39), fg_magenta(35, 39), fg_cyan(36, 39), fg_white(37, 39), bg_black(40, 49), bg_red(41, 49), bg_green(42, 49), bg_yellow(43, 49), bg_blue(44, 49), bg_magenta(45, 49), bg_cyan(46, 49), bg_white(47, 49), ; private final int startCode; private final int endCode; Style(int startCode, int endCode) {this.startCode = startCode; this.endCode = endCode; } public String on() { return CSI + startCode + "m"; } public String off() { return CSI + endCode + "m"; } /** Returns the concatenated ANSI escape codes for turning all specified styles on. * @param styles the styles to generate ANSI escape codes for * @return the concatenated ANSI escape codes for turning all specified styles on */ public static String on(IStyle... styles) { StringBuilder result = new StringBuilder(); for (IStyle style : styles) { result.append(style.on()); } return result.toString(); } /** Returns the concatenated ANSI escape codes for turning all specified styles off. * @param styles the styles to generate ANSI escape codes for * @return the concatenated ANSI escape codes for turning all specified styles off */ public static String off(IStyle... styles) { StringBuilder result = new StringBuilder(); for (IStyle style : styles) { result.append(style.off()); } return result.toString(); } /** Parses the specified style markup and returns the associated style. * The markup may be one of the Style enum value names, or it may be one of the Style enum value * names when {@code "fg_"} is prepended, or it may be one of the indexed colors in the 256 color palette. * @param str the case-insensitive style markup to convert, e.g. {@code "blue"} or {@code "fg_blue"}, * or {@code "46"} (indexed color) or {@code "0;5;0"} (RGB components of an indexed color) * @return the IStyle for the specified converter */ public static IStyle fg(String str) { try { return Style.valueOf(str.toLowerCase(ENGLISH)); } catch (Exception ignored) {} try { return Style.valueOf("fg_" + str.toLowerCase(ENGLISH)); } catch (Exception ignored) {} return new Palette256Color(true, str); } /** Parses the specified style markup and returns the associated style. * The markup may be one of the Style enum value names, or it may be one of the Style enum value * names when {@code "bg_"} is prepended, or it may be one of the indexed colors in the 256 color palette. * @param str the case-insensitive style markup to convert, e.g. {@code "blue"} or {@code "bg_blue"}, * or {@code "46"} (indexed color) or {@code "0;5;0"} (RGB components of an indexed color) * @return the IStyle for the specified converter */ public static IStyle bg(String str) { try { return Style.valueOf(str.toLowerCase(ENGLISH)); } catch (Exception ignored) {} try { return Style.valueOf("bg_" + str.toLowerCase(ENGLISH)); } catch (Exception ignored) {} return new Palette256Color(false, str); } /** Parses the specified comma-separated sequence of style descriptors and returns the associated * styles. For each markup, strings starting with {@code "bg("} are delegated to * {@link #bg(String)}, others are delegated to {@link #bg(String)}. * @param commaSeparatedCodes one or more descriptors, e.g. {@code "bg(blue),underline,red"} * @return an array with all styles for the specified descriptors */ public static IStyle[] parse(String commaSeparatedCodes) { String[] codes = commaSeparatedCodes.split(","); IStyle[] styles = new IStyle[codes.length]; for(int i = 0; i < codes.length; ++i) { if (codes[i].toLowerCase(ENGLISH).startsWith("fg(")) { int end = codes[i].indexOf(')'); styles[i] = Style.fg(codes[i].substring(3, end < 0 ? codes[i].length() : end)); } else if (codes[i].toLowerCase(ENGLISH).startsWith("bg(")) { int end = codes[i].indexOf(')'); styles[i] = Style.bg(codes[i].substring(3, end < 0 ? codes[i].length() : end)); } else { styles[i] = Style.fg(codes[i]); } } return styles; } } /** Defines a palette map of 216 colors: 6 * 6 * 6 cube (216 colors): * 16 + 36 * r + 6 * g + b (0 &lt;= r, g, b &lt;= 5). */ static class Palette256Color implements IStyle { private final int fgbg; private final int color; Palette256Color(boolean foreground, String color) { this.fgbg = foreground ? 38 : 48; String[] rgb = color.split(";"); if (rgb.length == 3) { this.color = 16 + 36 * Integer.decode(rgb[0]) + 6 * Integer.decode(rgb[1]) + Integer.decode(rgb[2]); } else { this.color = Integer.decode(color); } } public String on() { return String.format(CSI + "%d;5;%dm", fgbg, color); } public String off() { return CSI + (fgbg + 1) + "m"; } } private static class StyledSection { int startIndex, length; String startStyles, endStyles; StyledSection(int start, int len, String style1, String style2) { startIndex = start; length = len; startStyles = style1; endStyles = style2; } StyledSection withStartIndex(int newStart) { return new StyledSection(newStart, length, startStyles, endStyles); } } /** * Returns a new Text object where all the specified styles are applied to the full length of the * specified plain text. * @param plainText the string to apply all styles to. Must not contain markup! * @param styles the styles to apply to the full plain text * @return a new Text object */ public Text apply(String plainText, List<IStyle> styles) { if (plainText.length() == 0) { return new Text(0); } Text result = new Text(plainText.length()); IStyle[] all = styles.toArray(new IStyle[styles.size()]); result.sections.add(new StyledSection( 0, plainText.length(), Style.on(all), Style.off(reverse(all)) + Style.reset.off())); result.plain.append(plainText); result.length = result.plain.length(); return result; } private static <T> T[] reverse(T[] all) { for (int i = 0; i < all.length / 2; i++) { T temp = all[i]; all[i] = all[all.length - i - 1]; all[all.length - i - 1] = temp; } return all; } /** Encapsulates rich text with styles and colors. Text objects may be constructed with Strings containing * markup like {@code @|bg(red),white,underline some text|@}, and this class converts the markup to ANSI * escape codes. * <p> * Internally keeps both an enriched and a plain text representation to allow layout components to calculate * text width while remaining unaware of the embedded ANSI escape codes.</p> */ public class Text implements Cloneable { private final int maxLength; private int from; private int length; private StringBuilder plain = new StringBuilder(); private List<StyledSection> sections = new ArrayList<StyledSection>(); /** Constructs a Text with the specified max length (for use in a TextTable Column). * @param maxLength max length of this text */ public Text(int maxLength) { this.maxLength = maxLength; } /** * Constructs a Text with the specified String, which may contain markup like * {@code @|bg(red),white,underline some text|@}. * @param input the string with markup to parse */ public Text(String input) { maxLength = -1; plain.setLength(0); int i = 0; while (true) { int j = input.indexOf("@|", i); if (j == -1) { if (i == 0) { plain.append(input); length = plain.length(); return; } plain.append(input.substring(i, input.length())); length = plain.length(); return; } plain.append(input.substring(i, j)); int k = input.indexOf("|@", j); if (k == -1) { plain.append(input); length = plain.length(); return; } j += 2; String spec = input.substring(j, k); String[] items = spec.split(" ", 2); if (items.length == 1) { plain.append(input); length = plain.length(); return; } IStyle[] styles = Style.parse(items[0]); addStyledSection(plain.length(), items[1].length(), Style.on(styles), Style.off(reverse(styles)) + Style.reset.off()); plain.append(items[1]); i = k + 2; } } private void addStyledSection(int start, int length, String startStyle, String endStyle) { sections.add(new StyledSection(start, length, startStyle, endStyle)); } public Object clone() { try { return super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException(e); } } public Text[] splitLines() { List<Text> result = new ArrayList<Text>(); boolean trailingEmptyString = plain.length() == 0; int start = 0, end = 0; for (int i = 0; i < plain.length(); i++, end = i) { char c = plain.charAt(i); boolean eol = c == '\n'; eol |= (c == '\r' && i + 1 < plain.length() && plain.charAt(i + 1) == '\n' && ++i > 0); // \r\n eol |= c == '\r'; if (eol) { result.add(this.substring(start, end)); trailingEmptyString = i == plain.length() - 1; start = i + 1; } } if (start < plain.length() || trailingEmptyString) { result.add(this.substring(start, plain.length())); } return result.toArray(new Text[result.size()]); } /** Returns a new {@code Text} instance that is a substring of this Text. Does not modify this instance! * @param start index in the plain text where to start the substring * @return a new Text instance that is a substring of this Text */ public Text substring(int start) { return substring(start, length); } /** Returns a new {@code Text} instance that is a substring of this Text. Does not modify this instance! * @param start index in the plain text where to start the substring * @param end index in the plain text where to end the substring * @return a new Text instance that is a substring of this Text */ public Text substring(int start, int end) { Text result = (Text) clone(); result.from = from + start; result.length = end - start; return result; } /** Returns a new {@code Text} instance with the specified text appended. Does not modify this instance! * @param string the text to append * @return a new Text instance */ public Text append(String string) { return append(new Text(string)); } /** Returns a new {@code Text} instance with the specified text appended. Does not modify this instance! * @param other the text to append * @return a new Text instance */ public Text append(Text other) { Text result = (Text) clone(); result.plain = new StringBuilder(plain.toString().substring(from, from + length)); result.from = 0; result.sections = new ArrayList<StyledSection>(); for (StyledSection section : sections) { result.sections.add(section.withStartIndex(section.startIndex - from)); } result.plain.append(other.plain.toString().substring(other.from, other.from + other.length)); for (StyledSection section : other.sections) { int index = result.length + section.startIndex - other.from; result.sections.add(section.withStartIndex(index)); } result.length = result.plain.length(); return result; } /** * Copies the specified substring of this Text into the specified destination, preserving the markup. * @param from start of the substring * @param length length of the substring * @param destination destination Text to modify * @param offset indentation (padding) */ public void getStyledChars(int from, int length, Text destination, int offset) { if (destination.length < offset) { for (int i = destination.length; i < offset; i++) { destination.plain.append(' '); } destination.length = offset; } for (StyledSection section : sections) { destination.sections.add(section.withStartIndex(section.startIndex - from + destination.length)); } destination.plain.append(plain.toString().substring(from, from + length)); destination.length = destination.plain.length(); } /** Returns the plain text without any formatting. * @return the plain text without any formatting */ public String plainString() { return plain.toString().substring(from, from + length); } public boolean equals(Object obj) { return toString().equals(String.valueOf(obj)); } public int hashCode() { return toString().hashCode(); } /** Returns a String representation of the text with ANSI escape codes embedded, unless ANSI is * {@linkplain Ansi#enabled()} not enabled}, in which case the plain text is returned. * @return a String representation of the text with ANSI escape codes embedded (if enabled) */ public String toString() { if (!Ansi.this.enabled()) { return plain.toString().substring(from, from + length); } if (length == 0) { return ""; } StringBuilder sb = new StringBuilder(plain.length() + 20 * sections.size()); StyledSection current = null; int end = Math.min(from + length, plain.length()); for (int i = from; i < end; i++) { StyledSection section = findSectionContaining(i); if (section != current) { if (current != null) { sb.append(current.endStyles); } if (section != null) { sb.append(section.startStyles); } current = section; } sb.append(plain.charAt(i)); } if (current != null) { sb.append(current.endStyles); } return sb.toString(); } private StyledSection findSectionContaining(int index) { for (StyledSection section : sections) { if (index >= section.startIndex && index < section.startIndex + section.length) { return section; } } return null; } } } } /** * Utility class providing some defensive coding convenience methods. */ private static final class Assert { /** * Throws a NullPointerException if the specified object is null. * @param object the object to verify * @param description error message * @param <T> type of the object to check * @return the verified object */ static <T> T notNull(T object, String description) { if (object == null) { throw new NullPointerException(description); } return object; } static boolean equals(Object obj1, Object obj2) { return obj1 == null ? obj2 == null : obj1.equals(obj2); } static int hashCode(Object obj) {return obj == null ? 0 : obj.hashCode(); } static int hashCode(boolean bool) {return bool ? 1 : 0; } private Assert() {} // private constructor: never instantiate } private enum TraceLevel { OFF, WARN, INFO, DEBUG; public boolean isEnabled(TraceLevel other) { return ordinal() >= other.ordinal(); } private void print(Tracer tracer, String msg, Object... params) { if (tracer.level.isEnabled(this)) { tracer.stream.printf(prefix(msg), params); } } private String prefix(String msg) { return "[picocli " + this + "] " + msg; } static TraceLevel lookup(String key) { return key == null ? WARN : empty(key) || "true".equalsIgnoreCase(key) ? INFO : valueOf(key); } } private static class Tracer { TraceLevel level = TraceLevel.lookup(System.getProperty("picocli.trace")); PrintStream stream = System.err; void warn (String msg, Object... params) { TraceLevel.WARN.print(this, msg, params); } void info (String msg, Object... params) { TraceLevel.INFO.print(this, msg, params); } void debug(String msg, Object... params) { TraceLevel.DEBUG.print(this, msg, params); } boolean isWarn() { return level.isEnabled(TraceLevel.WARN); } boolean isInfo() { return level.isEnabled(TraceLevel.INFO); } boolean isDebug() { return level.isEnabled(TraceLevel.DEBUG); } } /** Base class of all exceptions thrown by {@code picocli.CommandLine}. * @since 2.0 */ public static class PicocliException extends RuntimeException { private static final long serialVersionUID = -2574128880125050818L; public PicocliException(String msg) { super(msg); } public PicocliException(String msg, Exception ex) { super(msg, ex); } } /** Exception indicating a problem during {@code CommandLine} initialization. * @since 2.0 */ public static class InitializationException extends PicocliException { private static final long serialVersionUID = 8423014001666638895L; public InitializationException(String msg) { super(msg); } public InitializationException(String msg, Exception ex) { super(msg, ex); } } /** Exception indicating a problem while invoking a command or subcommand. * @since 2.0 */ public static class ExecutionException extends PicocliException { private static final long serialVersionUID = 7764539594267007998L; private final CommandLine commandLine; public ExecutionException(CommandLine commandLine, String msg) { super(msg); this.commandLine = Assert.notNull(commandLine, "commandLine"); } public ExecutionException(CommandLine commandLine, String msg, Exception ex) { super(msg, ex); this.commandLine = Assert.notNull(commandLine, "commandLine"); } /** Returns the {@code CommandLine} object for the (sub)command that could not be invoked. * @return the {@code CommandLine} object for the (sub)command where invocation failed. */ public CommandLine getCommandLine() { return commandLine; } } /** Exception thrown by {@link ITypeConverter} implementations to indicate a String could not be converted. */ public static class TypeConversionException extends PicocliException { private static final long serialVersionUID = 4251973913816346114L; public TypeConversionException(String msg) { super(msg); } } /** Exception indicating something went wrong while parsing command line options. */ public static class ParameterException extends PicocliException { private static final long serialVersionUID = 1477112829129763139L; private final CommandLine commandLine; /** Constructs a new ParameterException with the specified CommandLine and error message. * @param commandLine the command or subcommand whose input was invalid * @param msg describes the problem * @since 2.0 */ public ParameterException(CommandLine commandLine, String msg) { super(msg); this.commandLine = Assert.notNull(commandLine, "commandLine"); } /** Constructs a new ParameterException with the specified CommandLine and error message. * @param commandLine the command or subcommand whose input was invalid * @param msg describes the problem * @param ex the exception that caused this ParameterException * @since 2.0 */ public ParameterException(CommandLine commandLine, String msg, Exception ex) { super(msg, ex); this.commandLine = Assert.notNull(commandLine, "commandLine"); } /** Returns the {@code CommandLine} object for the (sub)command whose input could not be parsed. * @return the {@code CommandLine} object for the (sub)command where parsing failed. * @since 2.0 */ public CommandLine getCommandLine() { return commandLine; } private static ParameterException create(CommandLine cmd, Exception ex, String arg, int i, String[] args) { String msg = ex.getClass().getSimpleName() + ": " + ex.getLocalizedMessage() + " while processing argument at or before arg[" + i + "] '" + arg + "' in " + Arrays.toString(args) + ": " + ex.toString(); return new ParameterException(cmd, msg, ex); } } /** * Exception indicating that a required parameter was not specified. */ public static class MissingParameterException extends ParameterException { private static final long serialVersionUID = 5075678535706338753L; public MissingParameterException(CommandLine commandLine, String msg) { super(commandLine, msg); } private static MissingParameterException create(CommandLine cmd, Collection<ArgSpec<?>> missing, String separator) { if (missing.size() == 1) { return new MissingParameterException(cmd, "Missing required option '" + describe(missing.iterator().next(), separator) + "'"); } List<String> names = new ArrayList<String>(missing.size()); for (ArgSpec<?> argSpec : missing) { names.add(describe(argSpec, separator)); } return new MissingParameterException(cmd, "Missing required options " + names.toString()); } private static String describe(ArgSpec<?> argSpec, String separator) { String prefix = (argSpec.isOption()) ? ((OptionSpec) argSpec).names()[0] + separator : "params[" + ((PositionalParamSpec) argSpec).index() + "]" + separator; return prefix + argSpec.paramLabel(); } } /** * Exception indicating that multiple fields have been annotated with the same Option name. */ public static class DuplicateOptionAnnotationsException extends InitializationException { private static final long serialVersionUID = -3355128012575075641L; public DuplicateOptionAnnotationsException(String msg) { super(msg); } private static DuplicateOptionAnnotationsException create(String name, ArgSpec<?> argSpec1, ArgSpec<?> argSpec2) { return new DuplicateOptionAnnotationsException("Option name '" + name + "' is used by both " + argSpec1.toString() + " and " + argSpec2.toString()); } } /** Exception indicating that there was a gap in the indices of the fields annotated with {@link Parameters}. */ public static class ParameterIndexGapException extends InitializationException { private static final long serialVersionUID = -1520981133257618319L; public ParameterIndexGapException(String msg) { super(msg); } } /** Exception indicating that a command line argument could not be mapped to any of the fields annotated with * {@link Option} or {@link Parameters}. */ public static class UnmatchedArgumentException extends ParameterException { private static final long serialVersionUID = -8700426380701452440L; public UnmatchedArgumentException(CommandLine commandLine, String msg) { super(commandLine, msg); } public UnmatchedArgumentException(CommandLine commandLine, Stack<String> args) { this(commandLine, new ArrayList<String>(reverse(args))); } public UnmatchedArgumentException(CommandLine commandLine, List<String> args) { this(commandLine, "Unmatched argument" + (args.size() == 1 ? " " : "s ") + args); } } /** Exception indicating that more values were specified for an option or parameter than its {@link Option#arity() arity} allows. */ public static class MaxValuesforFieldExceededException extends ParameterException { private static final long serialVersionUID = 6536145439570100641L; public MaxValuesforFieldExceededException(CommandLine commandLine, String msg) { super(commandLine, msg); } } /** Exception indicating that an option for a single-value option field has been specified multiple times on the command line. */ public static class OverwrittenOptionException extends ParameterException { private static final long serialVersionUID = 1338029208271055776L; public OverwrittenOptionException(CommandLine commandLine, String msg) { super(commandLine, msg); } } /** * Exception indicating that an annotated field had a type for which no {@link ITypeConverter} was * {@linkplain #registerConverter(Class, ITypeConverter) registered}. */ public static class MissingTypeConverterException extends ParameterException { private static final long serialVersionUID = -6050931703233083760L; public MissingTypeConverterException(CommandLine commandLine, String msg) { super(commandLine, msg); } } }
[ "\"TERM\"", "\"TERM\"" ]
[]
[ "TERM" ]
[]
["TERM"]
java
1
0
xonsh/timings.py
"""Timing related functionality for the xonsh shell. The following time_it alias and Timer was forked from the IPython project: * Copyright (c) 2008-2014, IPython Development Team * Copyright (C) 2001-2007 Fernando Perez <[email protected]> * Copyright (c) 2001, Janko Hauser <[email protected]> * Copyright (c) 2001, Nathaniel Gray <[email protected]> """ import gc import itertools import math import os import sys import time import timeit from xonsh.built_ins import XSH from xonsh.events import events from xonsh.lazyasd import lazybool, lazyobject from xonsh.platform import ON_WINDOWS @lazybool def _HAVE_RESOURCE(): try: import resource as r have = True except ImportError: # There is no distinction of user/system time under windows, so we # just use time.perf_counter() for everything... have = False return have @lazyobject def resource(): import resource as r return r @lazyobject def clocku(): if _HAVE_RESOURCE: def clocku(): """clocku() -> floating point number Return the *USER* CPU time in seconds since the start of the process.""" return resource.getrusage(resource.RUSAGE_SELF)[0] else: clocku = time.perf_counter return clocku @lazyobject def clocks(): if _HAVE_RESOURCE: def clocks(): """clocks() -> floating point number Return the *SYSTEM* CPU time in seconds since the start of the process.""" return resource.getrusage(resource.RUSAGE_SELF)[1] else: clocks = time.perf_counter return clocks @lazyobject def clock(): if _HAVE_RESOURCE: def clock(): """clock() -> floating point number Return the *TOTAL USER+SYSTEM* CPU time in seconds since the start of the process.""" u, s = resource.getrusage(resource.RUSAGE_SELF)[:2] return u + s else: clock = time.perf_counter return clock @lazyobject def clock2(): if _HAVE_RESOURCE: def clock2(): """clock2() -> (t_user,t_system) Similar to clock(), but return a tuple of user/system times.""" return resource.getrusage(resource.RUSAGE_SELF)[:2] else: def clock2(): """Under windows, system CPU time can't be measured. This just returns perf_counter() and zero.""" return time.perf_counter(), 0.0 return clock2 def format_time(timespan, precision=3): """Formats the timespan in a human readable form""" if timespan >= 60.0: # we have more than a minute, format that in a human readable form parts = [("d", 60 * 60 * 24), ("h", 60 * 60), ("min", 60), ("s", 1)] time = [] leftover = timespan for suffix, length in parts: value = int(leftover / length) if value > 0: leftover = leftover % length time.append(f"{str(value)}{suffix}") if leftover < 1: break return " ".join(time) # Unfortunately the unicode 'micro' symbol can cause problems in # certain terminals. # See bug: https://bugs.launchpad.net/ipython/+bug/348466 # Try to prevent crashes by being more secure than it needs to # E.g. eclipse is able to print a mu, but has no sys.stdout.encoding set. units = ["s", "ms", "us", "ns"] # the save value if hasattr(sys.stdout, "encoding") and sys.stdout.encoding: try: "\xb5".encode(sys.stdout.encoding) units = ["s", "ms", "\xb5s", "ns"] except Exception: pass scaling = [1, 1e3, 1e6, 1e9] if timespan > 0.0: order = min(-int(math.floor(math.log10(timespan)) // 3), 3) else: order = 3 return "{1:.{0}g} {2}".format(precision, timespan * scaling[order], units[order]) class Timer(timeit.Timer): """Timer class that explicitly uses self.inner which is an undocumented implementation detail of CPython, not shared by PyPy. """ # Timer.timeit copied from CPython 3.4.2 def timeit(self, number=timeit.default_number): """Time 'number' executions of the main statement. To be precise, this executes the setup statement once, and then returns the time it takes to execute the main statement a number of times, as a float measured in seconds. The argument is the number of times through the loop, defaulting to one million. The main statement, the setup statement and the timer function to be used are passed to the constructor. """ it = itertools.repeat(None, number) gcold = gc.isenabled() gc.disable() try: timing = self.inner(it, self.timer) finally: if gcold: gc.enable() return timing INNER_TEMPLATE = """ def inner(_it, _timer): #setup _t0 = _timer() for _i in _it: {stmt} _t1 = _timer() return _t1 - _t0 """ def timeit_alias(args, stdin=None): """Runs timing study on arguments.""" # some real args number = 0 quiet = False repeat = 3 precision = 3 # setup ctx = XSH.ctx timer = Timer(timer=clock) stmt = " ".join(args) innerstr = INNER_TEMPLATE.format(stmt=stmt) # Track compilation time so it can be reported if too long # Minimum time above which compilation time will be reported tc_min = 0.1 t0 = clock() innercode = XSH.builtins.compilex( innerstr, filename="<xonsh-timeit>", mode="exec", glbs=ctx ) tc = clock() - t0 # get inner func ns = {} XSH.builtins.execx(innercode, glbs=ctx, locs=ns, mode="exec") timer.inner = ns["inner"] # Check if there is a huge difference between the best and worst timings. worst_tuning = 0 if number == 0: # determine number so that 0.2 <= total time < 2.0 number = 1 for _ in range(1, 10): time_number = timer.timeit(number) worst_tuning = max(worst_tuning, time_number / number) if time_number >= 0.2: break number *= 10 all_runs = timer.repeat(repeat, number) best = min(all_runs) / number # print some debug info if not quiet: worst = max(all_runs) / number if worst_tuning: worst = max(worst, worst_tuning) # Check best timing is greater than zero to avoid a # ZeroDivisionError. # In cases where the slowest timing is less than 10 microseconds # we assume that it does not really matter if the fastest # timing is 4 times faster than the slowest timing or not. if worst > 4 * best and best > 0 and worst > 1e-5: print( ( "The slowest run took {:0.2f} times longer than the " "fastest. This could mean that an intermediate result " "is being cached." ).format(worst / best) ) print( "{} loops, best of {}: {} per loop".format( number, repeat, format_time(best, precision) ) ) if tc > tc_min: print(f"Compiler time: {tc:.2f} s") return _timings = {"start": clock()} def setup_timings(argv): global _timings if "--timings" in argv: events.doc( "on_timingprobe", """ on_timingprobe(name: str) -> None Fired to insert some timings into the startuptime list """, ) @events.on_timingprobe def timing_on_timingprobe(name, **kw): global _timings _timings[name] = clock() @events.on_post_cmdloop def timing_on_post_cmdloop(**kw): global _timings _timings["on_post_cmdloop"] = clock() @events.on_post_init def timing_on_post_init(**kw): global _timings _timings["on_post_init"] = clock() @events.on_post_rc def timing_on_post_rc(**kw): global _timings _timings["on_post_rc"] = clock() @events.on_postcommand def timing_on_postcommand(**kw): global _timings _timings["on_postcommand"] = clock() @events.on_pre_cmdloop def timing_on_pre_cmdloop(**kw): global _timings _timings["on_pre_cmdloop"] = clock() @events.on_pre_rc def timing_on_pre_rc(**kw): global _timings _timings["on_pre_rc"] = clock() @events.on_precommand def timing_on_precommand(**kw): global _timings _timings["on_precommand"] = clock() @events.on_ptk_create def timing_on_ptk_create(**kw): global _timings _timings["on_ptk_create"] = clock() @events.on_chdir def timing_on_chdir(**kw): global _timings _timings["on_chdir"] = clock() @events.on_pre_prompt_format def timing_on_pre_prompt_format(**kw): global _timings _timings["on_pre_prompt_format"] = clock() @events.on_post_prompt def timing_on_post_prompt(**kw): global _timings _timings = {"on_post_prompt": clock()} @events.on_pre_prompt def timing_on_pre_prompt(**kw): global _timings _timings["on_pre_prompt"] = clock() times = list(_timings.items()) times = sorted(times, key=lambda x: x[1]) width = max(len(s) for s, _ in times) + 2 header_format = f"|{{:<{width}}}|{{:^11}}|{{:^11}}|" entry_format = f"|{{:<{width}}}|{{:^11.3f}}|{{:^11.3f}}|" sepline = "|{}|{}|{}|".format("-" * width, "-" * 11, "-" * 11) # Print result table print(" Debug level: {}".format(os.getenv("XONSH_DEBUG", "Off"))) print(sepline) print(header_format.format("Event name", "Time (s)", "Delta (s)")) print(sepline) prevtime = tstart = times[0][1] for name, ts in times: print(entry_format.format(name, ts - tstart, ts - prevtime)) prevtime = ts print(sepline)
[]
[]
[ "XONSH_DEBUG" ]
[]
["XONSH_DEBUG"]
python
1
0
venv/Lib/site-packages/jupyter_client/tests/signalkernel.py
"""Test kernel for signalling subprocesses""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import os from subprocess import Popen, PIPE import sys import time from ipykernel.displayhook import ZMQDisplayHook from ipykernel.kernelbase import Kernel from ipykernel.kernelapp import IPKernelApp class SignalTestKernel(Kernel): """Kernel for testing subprocess signaling""" implementation = 'signaltest' implementation_version = '0.0' banner = '' def __init__(self, **kwargs): kwargs.pop('user_ns', None) super().__init__(**kwargs) self.children = [] def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False): code = code.strip() reply = { 'status': 'ok', 'user_expressions': {}, } if code == 'start': child = Popen(['bash', '-i', '-c', 'sleep 30'], stderr=PIPE) self.children.append(child) reply['user_expressions']['pid'] = self.children[-1].pid elif code == 'check': reply['user_expressions']['poll'] = [ child.poll() for child in self.children ] elif code == 'env': reply['user_expressions']['env'] = os.getenv("TEST_VARS", "") elif code == 'sleep': try: time.sleep(10) except KeyboardInterrupt: reply['user_expressions']['interrupted'] = True else: reply['user_expressions']['interrupted'] = False else: reply['status'] = 'error' reply['ename'] = 'Error' reply['evalue'] = code reply['traceback'] = ['no such command: %s' % code] return reply def kernel_info_request(self, *args, **kwargs): """Add delay to kernel_info_request triggers slow-response code in KernelClient.wait_for_ready """ return super().kernel_info_request(*args, **kwargs) class SignalTestApp(IPKernelApp): kernel_class = SignalTestKernel def init_io(self): # Overridden to disable stdout/stderr capture self.displayhook = ZMQDisplayHook(self.session, self.iopub_socket) if __name__ == '__main__': # make startup artificially slow, # so that we exercise client logic for slow-starting kernels time.sleep(2) SignalTestApp.launch_instance()
[]
[]
[ "TEST_VARS" ]
[]
["TEST_VARS"]
python
1
0
test/e2eprovider/server/server.go
//go:build e2e // +build e2e /* Copyright 2021 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package server import ( "context" "encoding/json" "fmt" "net" "net/http" "os" "strings" "sync" "sigs.k8s.io/secrets-store-csi-driver/provider/v1alpha1" "sigs.k8s.io/secrets-store-csi-driver/test/e2eprovider/types" "google.golang.org/grpc" "k8s.io/klog/v2" "sigs.k8s.io/yaml" ) var ( secrets = map[string]string{ "foo": "secret", "fookey": `-----BEGIN PUBLIC KEY----- This is mock key -----END PUBLIC KEY-----`, } // podCache is a map of pod UID to check if secret has been rotated. podCache = map[string]bool{} podUIDAttribute = "csi.storage.k8s.io/pod.uid" serviceAccountTokensAttribute = "csi.storage.k8s.io/serviceAccount.tokens" //nolint // RWMutex is to safely access podCache m sync.RWMutex ) // Server is a mock csi-provider server type Server struct { grpcServer *grpc.Server socketPath string network string } // NewE2EProviderServer returns a mock csi-provider grpc server func NewE2EProviderServer(endpoint string) (*Server, error) { var network, address string if strings.HasPrefix(strings.ToLower(endpoint), "unix://") || strings.HasPrefix(strings.ToLower(endpoint), "tcp://") { s := strings.SplitN(endpoint, "://", 2) if s[1] != "" { network = s[0] address = s[1] } else { return nil, fmt.Errorf("invalid endpoint: %s", endpoint) } } server := grpc.NewServer() s := &Server{ grpcServer: server, socketPath: address, network: network, } v1alpha1.RegisterCSIDriverProviderServer(server, s) return s, nil } // GetSocketPath returns the socket path func (s *Server) GetSocketPath() string { return s.socketPath } // Start starts the mock csi-provider server func (s *Server) Start() error { listener, err := net.Listen(s.network, s.GetSocketPath()) if err != nil { return err } klog.InfoS("Listening for connections", "address", listener.Addr()) go func() { if err := s.grpcServer.Serve(listener); err != nil { return } }() return nil } // Stop stops the mock csi-provider server func (s *Server) Stop() { s.grpcServer.GracefulStop() } // Mount implements provider csi-provider method func (s *Server) Mount(ctx context.Context, req *v1alpha1.MountRequest) (*v1alpha1.MountResponse, error) { var attrib, secret map[string]string var filePermission os.FileMode var err error resp := &v1alpha1.MountResponse{ ObjectVersion: []*v1alpha1.ObjectVersion{}, } if err = json.Unmarshal([]byte(req.GetAttributes()), &attrib); err != nil { return nil, fmt.Errorf("failed to unmarshal attributes, error: %w", err) } if err = json.Unmarshal([]byte(req.GetSecrets()), &secret); err != nil { return nil, fmt.Errorf("failed to unmarshal secrets, error: %w", err) } if err = json.Unmarshal([]byte(req.GetPermission()), &filePermission); err != nil { return nil, fmt.Errorf("failed to unmarshal file permission, error: %w", err) } if len(req.GetTargetPath()) == 0 { return nil, fmt.Errorf("missing target path") } objectsStrings := attrib["objects"] if objectsStrings == "" { return nil, fmt.Errorf("objects is not set") } var objects types.StringArray err = yaml.Unmarshal([]byte(objectsStrings), &objects) if err != nil { return nil, fmt.Errorf("failed to yaml unmarshal objects, error: %w", err) } mockSecretsStoreObjects := []types.MockSecretsStoreObject{} for i, object := range objects.Array { var mockSecretsStoreObject types.MockSecretsStoreObject err = yaml.Unmarshal([]byte(object), &mockSecretsStoreObject) if err != nil { return nil, fmt.Errorf("unmarshal failed for keyVaultObjects at index %d, error: %w", i, err) } mockSecretsStoreObjects = append(mockSecretsStoreObjects, mockSecretsStoreObject) } for _, mockSecretsStoreObject := range mockSecretsStoreObjects { secretFile, version, err := getSecret(mockSecretsStoreObject.ObjectName, attrib[podUIDAttribute]) if err != nil { return nil, fmt.Errorf("failed to get secret, error: %w", err) } resp.Files = append(resp.Files, secretFile) resp.ObjectVersion = append(resp.ObjectVersion, version) } // if validate token flag is set, we want to check the service account tokens as passed // as part of the mount attributes. // In case of 1.21+, kubelet will generate the token and pass it as part of the volume context. // The driver will pass this to the provider as part of the mount request. // For 1.20, the driver will generate the token and pass it to the provider as part of the mount request. // Irrespective of the kubernetes version, the rotation handler in the driver will generate the token // and pass it to the provider as part of the mount request. // VALIDATE_TOKENS_AUDIENCE environment variable will be a comma separated list of audiences configured in the csidriver object // If this env var is not set, this could mean we are running an older version of driver. tokenAudiences := os.Getenv("VALIDATE_TOKENS_AUDIENCE") klog.InfoS("tokenAudiences", "tokenAudiences", tokenAudiences) if tokenAudiences != "" { if err := validateTokens(tokenAudiences, attrib[serviceAccountTokensAttribute]); err != nil { return nil, fmt.Errorf("failed to validate token, error: %w", err) } } m.Lock() podCache[attrib[podUIDAttribute]] = true m.Unlock() return resp, nil } func getSecret(secretName, podUID string) (*v1alpha1.File, *v1alpha1.ObjectVersion, error) { secretVersion := "v1" secretContent := secrets[secretName] // If pod found in cache, then it means that pod is being called for the second time for rotation // In this case, we should return the 'rotated' secret. m.RLock() if ok := podCache[podUID]; ok { if os.Getenv("ROTATION_ENABLED") == "true" { // ROTAION_ENABLED is set to true only when rotation tests are running secretVersion = "v2" secretContent = "rotated" } } m.RUnlock() secretFile := &v1alpha1.File{ Path: secretName, Contents: []byte(secretContent), } version := &v1alpha1.ObjectVersion{ Id: fmt.Sprintf("secret/%s", secretName), Version: secretVersion, } return secretFile, version, nil } // Version implements provider csi-provider method func (s *Server) Version(ctx context.Context, req *v1alpha1.VersionRequest) (*v1alpha1.VersionResponse, error) { return &v1alpha1.VersionResponse{ Version: "v1alpha1", RuntimeName: "E2EMockProvider", RuntimeVersion: "v0.0.10", }, nil } // RotationHandler enables rotation response for the mock provider func RotationHandler(w http.ResponseWriter, r *http.Request) { // enable rotation response os.Setenv("ROTATION_ENABLED", r.FormValue("rotated")) klog.InfoS("Rotation response enabled") } // ValidateTokenAudienceHandler enables token validation for the mock provider // This is only required because older version of the driver don't generate a token // TODO(aramase): remove this after the supported driver releases are v1.1.0+ func ValidateTokenAudienceHandler(w http.ResponseWriter, r *http.Request) { // enable rotation response os.Setenv("VALIDATE_TOKENS_AUDIENCE", r.FormValue("audience")) klog.InfoS("Validation for token requests audience", "audience", os.Getenv("VALIDATE_TOKENS_AUDIENCE")) } // validateTokens checks there are tokens for distinct audiences in the // service account token attribute. func validateTokens(tokenAudiences, saTokens string) error { ta := strings.Split(strings.TrimSpace(tokenAudiences), ",") if saTokens == "" { return fmt.Errorf("service account tokens is not set") } tokens := make(map[string]interface{}) if err := json.Unmarshal([]byte(saTokens), &tokens); err != nil { return fmt.Errorf("failed to unmarshal service account tokens, error: %w", err) } for _, a := range ta { if _, ok := tokens[a]; !ok { return fmt.Errorf("service account token for audience %s is not set", a) } klog.InfoS("Validated service account token", "audience", a) } return nil }
[ "\"VALIDATE_TOKENS_AUDIENCE\"", "\"ROTATION_ENABLED\"", "\"VALIDATE_TOKENS_AUDIENCE\"" ]
[]
[ "ROTATION_ENABLED", "VALIDATE_TOKENS_AUDIENCE" ]
[]
["ROTATION_ENABLED", "VALIDATE_TOKENS_AUDIENCE"]
go
2
0
cmd/L/main.go
package main import ( "bytes" "context" "flag" "fmt" "io/ioutil" "log" "net" "os" "path/filepath" "strconv" p9client "9fans.net/go/plan9/client" "github.com/fhs/acme-lsp/internal/golang_org_x_tools/jsonrpc2" "github.com/fhs/acme-lsp/internal/lsp" "github.com/fhs/acme-lsp/internal/lsp/acmelsp" "github.com/fhs/acme-lsp/internal/lsp/acmelsp/config" "github.com/fhs/acme-lsp/internal/lsp/cmd" "github.com/fhs/acme-lsp/internal/lsp/protocol" "github.com/fhs/acme-lsp/internal/lsp/proxy" "github.com/pkg/errors" ) //go:generate ../../scripts/mkdocs.sh const mainDoc = `The program L sends messages to the Language Server Protocol proxy server acme-lsp. L is usually run from within the acme text editor, where $winid environment variable is set to the ID of currently focused window. It sends this ID to acme-lsp, which uses it to compute the context for LSP commands. Note: L merely asks acme-lsp to run an LSP command--any output of the command is printed to stdout by acme-lsp, not L. If L is run outside of acme (therefore $winid is not set), L will attempt to find the focused window ID by connecting to acmefocused (https://godoc.org/github.com/fhs/acme-lsp/cmd/acmefocused). Usage: L <sub-command> [args...] List of sub-commands: comp [-e] Print candidate completions at current cursor position. If -e (edit) flag is given and there is only one candidate, the completion is applied instead of being printed. def Find where identifier at the cursor position is define and send the location to the plumber. fmt Organize imports and format current window buffer. hov Show more information about the identifier under the cursor ("hover"). refs List locations where the identifier under the cursor is used ("references"). rn <newname> Rename the identifier under the cursor to newname. sig Show signature help for the function, method, etc. under the cursor. syms List symbols in the current file. type Find where the type of identifier at the cursor position is define and send the location to the plumber. assist [comp|hov|sig] A new window is created where completion (comp), hover (hov), or signature help (sig) output is shown depending on the cursor position in the focused window and the text surrounding the cursor. If the optional argument is given, the output will be limited to only that command. Note: this is a very experimental feature, and may not be very useful in practice. ws List current set of workspace directories. ws+ [directories...] Add given directories to the set of workspace directories. Current working directory is added if no directory is specified. ws- [directories...] Remove given directories to the set of workspace directories. Current working directory is removed if no directory is specified. ` func usage() { os.Stderr.Write([]byte(mainDoc)) fmt.Fprintf(os.Stderr, "\n") flag.PrintDefaults() os.Exit(2) } func main() { flag.Usage = usage cfg := cmd.Setup(config.ProxyFlags) err := run(cfg, flag.Args()) if err != nil { log.Fatalf("%v", err) } } func run(cfg *config.Config, args []string) error { ctx := context.Background() if len(args) == 0 { usage() } conn, err := net.Dial(cfg.ProxyNetwork, cfg.ProxyAddress) if err != nil { return fmt.Errorf("dial failed: %v", err) } defer conn.Close() stream := jsonrpc2.NewHeaderStream(conn, conn) ctx, rpc, server := proxy.NewClient(ctx, stream, nil) go rpc.Run(ctx) ver, err := server.Version(ctx) if err != nil { return err } if ver != proxy.Version { return fmt.Errorf("acme-lsp speaks protocol version %v but L speaks version %v (make sure they come from the same release)", ver, proxy.Version) } switch args[0] { case "ws": folders, err := server.WorkspaceFolders(ctx) if err != nil { return err } for _, d := range folders { fmt.Printf("%v\n", d.Name) } return nil case "ws+": dirs, err := dirsOrCurrentDir(args[1:]) if err != nil { return err } return server.DidChangeWorkspaceFolders(ctx, &protocol.DidChangeWorkspaceFoldersParams{ Event: protocol.WorkspaceFoldersChangeEvent{ Added: dirs, }, }) case "ws-": dirs, err := dirsOrCurrentDir(args[1:]) if err != nil { return err } return server.DidChangeWorkspaceFolders(ctx, &protocol.DidChangeWorkspaceFoldersParams{ Event: protocol.WorkspaceFoldersChangeEvent{ Removed: dirs, }, }) case "win", "assist": // "win" is deprecated args = args[1:] sm := &acmelsp.UnitServerMatcher{Server: server} if len(args) == 0 { return acmelsp.Assist(sm, "auto") } switch args[0] { case "comp", "sig", "hov", "auto": return acmelsp.Assist(sm, args[0]) } return fmt.Errorf("unknown assist command %q", args[0]) } winid, err := getWinID() if err != nil { return err } rc := acmelsp.NewRemoteCmd(server, winid) // In case the window has unsaved changes (it's dirty), sync changes with LSP server. err = rc.DidChange(ctx) if err != nil { return fmt.Errorf("DidChange failed: %v", err) } switch args[0] { case "comp": args = args[1:] return rc.Completion(ctx, len(args) > 0 && args[0] == "-e") case "def": return rc.Definition(ctx) case "fmt": return rc.OrganizeImportsAndFormat(ctx) case "hov": return rc.Hover(ctx) case "refs": return rc.References(ctx) case "rn": args = args[1:] if len(args) < 1 { usage() } return rc.Rename(ctx, args[0]) case "sig": return rc.SignatureHelp(ctx) case "syms": return rc.DocumentSymbol(ctx) case "type": return rc.TypeDefinition(ctx) } return fmt.Errorf("unknown command %q", args[0]) } func getWinID() (int, error) { winid, err := getFocusedWinID(filepath.Join(p9client.Namespace(), "acmefocused")) if err != nil { return 0, errors.Wrap(err, "could not get focused window ID") } n, err := strconv.Atoi(winid) if err != nil { return 0, errors.Wrapf(err, "failed to parse $winid") } return n, nil } func dirsOrCurrentDir(dirs []string) ([]protocol.WorkspaceFolder, error) { if len(dirs) == 0 { d, err := os.Getwd() if err != nil { return nil, err } dirs = []string{d} } return lsp.DirsToWorkspaceFolders(dirs) } func getFocusedWinID(addr string) (string, error) { winid := os.Getenv("winid") if winid == "" { conn, err := net.Dial("unix", addr) if err != nil { return "", errors.Wrap(err, "$winid is empty and could not dial acmefocused") } defer conn.Close() b, err := ioutil.ReadAll(conn) if err != nil { return "", errors.Wrap(err, "$winid is empty and could not read acmefocused") } return string(bytes.TrimSpace(b)), nil } return winid, nil }
[ "\"winid\"" ]
[]
[ "winid" ]
[]
["winid"]
go
1
0
frameworks/Go/go-std/hello_mysql.go
package main import ( "database/sql" "encoding/json" "flag" "fmt" "html/template" "log" "math/rand" "net" "net/http" "os" "os/exec" "runtime" "sort" "strconv" _ "github.com/go-sql-driver/mysql" ) type Message struct { Message string `json:"message"` } type World struct { Id uint16 `json:"id"` RandomNumber uint16 `json:"randomNumber"` } type Fortune struct { Id uint16 `json:"id"` Message string `json:"message"` } const ( // Content helloWorldString = "Hello, World!" fortuneHTML = `<!DOCTYPE html> <html> <head> <title>Fortunes</title> </head> <body> <table> <tr> <th>id</th> <th>message</th> </tr> {{range .}} <tr> <td>{{.Id}}</td> <td>{{.Message}}</td> </tr> {{end}} </table> </body> </html>` // Databases // // `interpolateParams=true` enables client side parameter interpolation. // It reduces round trips without prepared statement. // // We can see difference between prepared statement and interpolation by comparing go-std-mysql and go-std-mysql-interpolate connectionString = "benchmarkdbuser:benchmarkdbpass@tcp(%s:3306)/hello_world?interpolateParams=true" worldSelect = "SELECT id, randomNumber FROM World WHERE id = ?" worldUpdate = "UPDATE World SET randomNumber = ? WHERE id = ?" fortuneSelect = "SELECT id, message FROM Fortune" worldRowCount = 10000 maxConnections = 256 ) var ( helloWorldBytes = []byte(helloWorldString) // Templates tmpl = template.Must(template.New("fortune.html").Parse(fortuneHTML)) // Database db *sql.DB worldSelectPrepared *sql.Stmt worldUpdatePrepared *sql.Stmt fortuneSelectPrepared *sql.Stmt ) var prefork = flag.Bool("prefork", false, "use prefork") var child = flag.Bool("child", false, "is child proc") func initDB() { var err error var dbhost = os.Getenv("DBHOST") if dbhost == "" { dbhost = "localhost" } db, err = sql.Open("mysql", fmt.Sprintf(connectionString, dbhost)) if err != nil { log.Fatalf("Error opening database: %v", err) } db.SetMaxIdleConns(maxConnections) db.SetMaxOpenConns(maxConnections) worldSelectPrepared, err = db.Prepare(worldSelect) if err != nil { log.Fatal(err) } worldUpdatePrepared, err = db.Prepare(worldUpdate) if err != nil { log.Fatal(err) } fortuneSelectPrepared, err = db.Prepare(fortuneSelect) if err != nil { log.Fatal(err) } } func main() { var listener net.Listener flag.Parse() if !*prefork { runtime.GOMAXPROCS(runtime.NumCPU()) } else { listener = doPrefork() } initDB() http.HandleFunc("/json", jsonHandler) http.HandleFunc("/db", dbHandler) http.HandleFunc("/dbInterpolate", dbInterpolateHandler) http.HandleFunc("/queries", queriesHandler) http.HandleFunc("/queriesInterpolate", queriesInterpolateHandler) http.HandleFunc("/fortune", fortuneHandler) http.HandleFunc("/fortuneInterpolate", fortuneInterpolateHandler) http.HandleFunc("/update", updateHandler) http.HandleFunc("/updateInterpolate", updateInterpolateHandler) http.HandleFunc("/plaintext", plaintextHandler) if !*prefork { http.ListenAndServe(":8080", nil) } else { http.Serve(listener, nil) } } func doPrefork() (listener net.Listener) { var err error var fl *os.File var tcplistener *net.TCPListener if !*child { var addr *net.TCPAddr addr, err = net.ResolveTCPAddr("tcp", ":8080") if err != nil { log.Fatal(err) } tcplistener, err = net.ListenTCP("tcp", addr) if err != nil { log.Fatal(err) } fl, err = tcplistener.File() if err != nil { log.Fatal(err) } children := make([]*exec.Cmd, runtime.NumCPU()/2) for i := range children { children[i] = exec.Command(os.Args[0], "-prefork", "-child") children[i].Stdout = os.Stdout children[i].Stderr = os.Stderr children[i].ExtraFiles = []*os.File{fl} err = children[i].Start() if err != nil { log.Fatal(err) } } for _, ch := range children { var err error = ch.Wait() if err != nil { log.Print(err) } } os.Exit(0) } else { fl = os.NewFile(3, "") listener, err = net.FileListener(fl) if err != nil { log.Fatal(err) } runtime.GOMAXPROCS(2) } return listener } func getQueriesParam(r *http.Request) int { n := 1 if nStr := r.URL.Query().Get("queries"); len(nStr) > 0 { n, _ = strconv.Atoi(nStr) } if n < 1 { n = 1 } else if n > 500 { n = 500 } return n } // Test 1: JSON serialization func jsonHandler(w http.ResponseWriter, r *http.Request) { w.Header().Set("Server", "Go") w.Header().Set("Content-Type", "application/json") json.NewEncoder(w).Encode(&Message{helloWorldString}) } // Test 2: Single database query func dbHandler(w http.ResponseWriter, r *http.Request) { var world World err := worldSelectPrepared.QueryRow(rand.Intn(worldRowCount)+1).Scan(&world.Id, &world.RandomNumber) if err != nil { log.Fatalf("Error scanning world row: %s", err.Error()) } w.Header().Set("Server", "Go") w.Header().Set("Content-Type", "application/json") json.NewEncoder(w).Encode(&world) } func dbInterpolateHandler(w http.ResponseWriter, r *http.Request) { var world World err := db.QueryRow(worldSelect, rand.Intn(worldRowCount)+1).Scan(&world.Id, &world.RandomNumber) if err != nil { log.Fatalf("Error scanning world row: %s", err.Error()) } w.Header().Set("Server", "Go") w.Header().Set("Content-Type", "application/json") json.NewEncoder(w).Encode(&world) } // Test 3: Multiple database queries func queriesHandler(w http.ResponseWriter, r *http.Request) { n := getQueriesParam(r) world := make([]World, n) for i := 0; i < n; i++ { err := worldSelectPrepared.QueryRow(rand.Intn(worldRowCount)+1).Scan(&world[i].Id, &world[i].RandomNumber) if err != nil { log.Fatalf("Error scanning world row: %v", err) } } w.Header().Set("Server", "Go") w.Header().Set("Content-Type", "application/json") json.NewEncoder(w).Encode(world) } func queriesInterpolateHandler(w http.ResponseWriter, r *http.Request) { n := getQueriesParam(r) world := make([]World, n) for i := 0; i < n; i++ { err := db.QueryRow(worldSelect, rand.Intn(worldRowCount)+1).Scan(&world[i].Id, &world[i].RandomNumber) if err != nil { log.Fatalf("Error scanning world row: %v", err) } } w.Header().Set("Server", "Go") w.Header().Set("Content-Type", "application/json") json.NewEncoder(w).Encode(world) } // Test 4: Fortunes func fortuneHandler(w http.ResponseWriter, r *http.Request) { rows, err := fortuneSelectPrepared.Query() if err != nil { log.Fatalf("Error preparing statement: %v", err) } fortunes := fetchFortunes(rows) fortunes = append(fortunes, &Fortune{Message: "Additional fortune added at request time."}) sort.Sort(ByMessage{fortunes}) w.Header().Set("Server", "Go") w.Header().Set("Content-Type", "text/html; charset=utf-8") if err := tmpl.Execute(w, fortunes); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } } func fortuneInterpolateHandler(w http.ResponseWriter, r *http.Request) { rows, err := db.Query(fortuneSelect) if err != nil { log.Fatalf("Error preparing statement: %v", err) } fortunes := fetchFortunes(rows) fortunes = append(fortunes, &Fortune{Message: "Additional fortune added at request time."}) sort.Sort(ByMessage{fortunes}) w.Header().Set("Server", "Go") w.Header().Set("Content-Type", "text/html; charset=utf-8") if err := tmpl.Execute(w, fortunes); err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } } func fetchFortunes(rows *sql.Rows) Fortunes { defer rows.Close() fortunes := make(Fortunes, 0, 16) for rows.Next() { //Fetch rows fortune := Fortune{} if err := rows.Scan(&fortune.Id, &fortune.Message); err != nil { log.Fatalf("Error scanning fortune row: %s", err.Error()) } fortunes = append(fortunes, &fortune) } return fortunes } // Test 5: Database updates func updateHandler(w http.ResponseWriter, r *http.Request) { n := getQueriesParam(r) world := make([]World, n) for i := 0; i < n; i++ { if err := worldSelectPrepared.QueryRow(rand.Intn(worldRowCount)+1).Scan(&world[i].Id, &world[i].RandomNumber); err != nil { log.Fatalf("Error scanning world row: %v", err) } world[i].RandomNumber = uint16(rand.Intn(worldRowCount) + 1) if _, err := worldUpdatePrepared.Exec(world[i].RandomNumber, world[i].Id); err != nil { log.Fatalf("Error updating world row: %v", err) } } w.Header().Set("Server", "Go") w.Header().Set("Content-Type", "application/json") encoder := json.NewEncoder(w) encoder.Encode(world) } func updateInterpolateHandler(w http.ResponseWriter, r *http.Request) { n := getQueriesParam(r) world := make([]World, n) for i := 0; i < n; i++ { if err := db.QueryRow(worldSelect, rand.Intn(worldRowCount)+1).Scan(&world[i].Id, &world[i].RandomNumber); err != nil { log.Fatalf("Error scanning world row: %v", err) } world[i].RandomNumber = uint16(rand.Intn(worldRowCount) + 1) if _, err := db.Exec(worldUpdate, world[i].RandomNumber, world[i].Id); err != nil { log.Fatalf("Error updating world row: %v", err) } } w.Header().Set("Server", "Go") w.Header().Set("Content-Type", "application/json") encoder := json.NewEncoder(w) encoder.Encode(world) } // Test 6: Plaintext func plaintextHandler(w http.ResponseWriter, r *http.Request) { w.Header().Set("Server", "Go") w.Header().Set("Content-Type", "text/plain") w.Write(helloWorldBytes) } type Fortunes []*Fortune func (s Fortunes) Len() int { return len(s) } func (s Fortunes) Swap(i, j int) { s[i], s[j] = s[j], s[i] } type ByMessage struct{ Fortunes } func (s ByMessage) Less(i, j int) bool { return s.Fortunes[i].Message < s.Fortunes[j].Message }
[ "\"DBHOST\"" ]
[]
[ "DBHOST" ]
[]
["DBHOST"]
go
1
0
cloudinitd/nosetests/service_unit_tests.py
import os import uuid from cloudinitd.cb_iaas import IaaSTestInstance from cloudinitd.exceptions import APIUsageException from cloudinitd.pollables import InstanceHostnamePollable from cloudinitd.user_api import CloudInitD import unittest class ServiceUnitTests(unittest.TestCase): def test_baddir_name(self): try: cb = CloudInitD("baddir", db_name="badname") fail = True except APIUsageException, apiex: fail = False self.assertFalse(fail) def test_bad_opts1(self): try: cb = CloudInitD("/tmp") fail = True except APIUsageException, apiex: fail = False self.assertFalse(fail) def test_service_poll(self): x = None if 'CLOUDINITD_TESTENV' in os.environ: x = os.environ['CLOUDINITD_TESTENV'] os.environ['CLOUDINITD_TESTENV'] = "1" h1 = str(uuid.uuid1()) instance = IaaSTestInstance(h1, time_to_hostname=1) p = InstanceHostnamePollable(instance=instance) p.start() rc = False while not rc: rc = p.poll() h2 = p.get_hostname() self.assertEquals(h1, h2) i = p.get_instance() self.assertEqual(instance, i) if x: os.environ['CLOUDINITD_TESTENV'] = x else: del(os.environ['CLOUDINITD_TESTENV']) if __name__ == '__main__': unittest.main()
[]
[]
[ "CLOUDINITD_TESTENV" ]
[]
["CLOUDINITD_TESTENV"]
python
1
0
test/test/conftest.py
import os import shutil from pathlib import Path import pytest from pyspark.sql import SparkSession from test import test_spark_feature_library def path_to_test_data() -> Path: return Path(os.path.dirname(__file__)).joinpath(os.pardir, "data") def path_department_test() -> Path: return path_to_test_data().joinpath("Department_Information.csv").absolute() def path_employee_test() -> Path: return path_to_test_data().joinpath("Employee_Information.csv").absolute() def path_student_counceling_test() -> Path: return path_to_test_data().joinpath("Student_Counceling_Information.csv").absolute() def path_student_performance_test() -> Path: return path_to_test_data().joinpath("Student_Performance_Data.csv.gz").absolute() @pytest.fixture def path_test_spark_feature_library() -> Path: return Path(os.path.dirname(test_spark_feature_library.__file__)) @pytest.fixture def path_test_feature_lists() -> Path: return Path(os.path.dirname(__file__)).joinpath("test_feature_lists") @pytest.fixture(scope="session") def path_tmp_folder() -> Path: return Path(os.path.dirname(__file__)).joinpath(os.pardir).joinpath("temp") @pytest.fixture(scope="session") def path_conf_folder() -> Path: return Path(os.path.dirname(__file__)).joinpath(os.pardir).joinpath("conf") def get_or_create_spark_session() -> SparkSession: """ Create and/or retrieve an Apache Spark Session. :return: a live Spark Session """ spark = SparkSession.builder.getOrCreate() spark.sparkContext.setLogLevel("ERROR") log4j = spark._jvm.org.apache.log4j logger = log4j.LogManager.getLogger("ERROR") # spark.conf.set("spark.sql.execution.arrow.enabled", "true") return spark @pytest.fixture(scope="session") def spark_session() -> SparkSession: spark = get_or_create_spark_session() yield spark # this will run whenever the last test of the session has # completed spark.stop() @pytest.fixture(scope="session", autouse=True) def clean_tmp_folder(path_tmp_folder: Path,) -> None: def _clean_tmp(): if path_tmp_folder.exists(): for subpath in os.listdir(path_tmp_folder.as_posix()): shutil.rmtree( path_tmp_folder.joinpath(subpath).as_posix(), ignore_errors=True ) _clean_tmp() yield _clean_tmp() @pytest.fixture(scope="session", autouse=True) def set_spark_conf_dir(path_conf_folder: Path): os.environ["SPARK_CONF_DIR"] = path_conf_folder.as_posix()
[]
[]
[ "SPARK_CONF_DIR" ]
[]
["SPARK_CONF_DIR"]
python
1
0
app/pkg/log/log.go
package log import ( "github.com/ansidev/gin-starter-project/constant" "go.uber.org/zap" "go.uber.org/zap/zapcore" "log" "os" "strings" "time" ) var ( l *zap.Logger s *zap.SugaredLogger Debugz func(message string, fields ...zap.Field) Infoz func(message string, fields ...zap.Field) Warnz func(message string, fields ...zap.Field) Errorz func(message string, fields ...zap.Field) Fatalz func(message string, fields ...zap.Field) Panicz func(message string, fields ...zap.Field) Debug func(args ...interface{}) Info func(args ...interface{}) Warn func(args ...interface{}) Error func(args ...interface{}) Fatal func(args ...interface{}) Panic func(args ...interface{}) Sync func() error ) func L() *zap.Logger { return l } // InitLogger init logger for application // encoding: console, json func InitLogger(encoding string) { level := GetLogLevel() config := zap.Config{ Encoding: encoding, Level: level, OutputPaths: []string{"stderr"}, ErrorOutputPaths: []string{"stderr"}, EncoderConfig: zapcore.EncoderConfig{ MessageKey: "message", TimeKey: "time", LevelKey: "level", CallerKey: "caller", EncodeCaller: zapcore.ShortCallerEncoder, EncodeLevel: zapcore.CapitalLevelEncoder, EncodeTime: timeEncoder, }, } l, _ = config.Build() s = l.Sugar() injectFunctions() } func GetLogLevel() zap.AtomicLevel { logLevel := os.Getenv("LOG_LEVEL") level, err := zap.ParseAtomicLevel(strings.ToLower(logLevel)) if err != nil { log.Fatal("Could not parse log level:", err) } return level } func timeEncoder(t time.Time, enc zapcore.PrimitiveArrayEncoder) { enc.AppendString(t.Format(constant.DateTimeFormat)) } func injectFunctions() { Debugz = l.Debug Infoz = l.Info Warnz = l.Warn Errorz = l.Error Fatalz = l.Fatal Panicz = l.Panic Debug = s.Debug Info = s.Info Warn = s.Warn Error = s.Error Fatal = s.Fatal Panic = s.Panic Sync = l.Sync }
[ "\"LOG_LEVEL\"" ]
[]
[ "LOG_LEVEL" ]
[]
["LOG_LEVEL"]
go
1
0
tools/java/org.hl7.fhir.tools.core/src/org/hl7/fhir/tools/publisher/Publisher.java
package org.hl7.fhir.tools.publisher; /* Copyright (c) 2011+, HL7, Inc All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of HL7 nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.StringReader; import java.io.StringWriter; import java.io.UnsupportedEncodingException; import java.io.Writer; import java.net.URL; import java.net.URLDecoder; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.zip.ZipEntry; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.transform.OutputKeys; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.URIResolver; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.stream.StreamSource; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.hl7.fhir.convertors.VersionConvertor_30_40; import org.hl7.fhir.definitions.Config; import org.hl7.fhir.definitions.generators.specification.DataTypeTableGenerator; import org.hl7.fhir.definitions.generators.specification.DictHTMLGenerator; import org.hl7.fhir.definitions.generators.specification.FhirTurtleGenerator; import org.hl7.fhir.definitions.generators.specification.JsonSpecGenerator; import org.hl7.fhir.definitions.generators.specification.MappingsGenerator; import org.hl7.fhir.definitions.generators.specification.ProfileGenerator; import org.hl7.fhir.definitions.generators.specification.ResourceTableGenerator; import org.hl7.fhir.definitions.generators.specification.ReviewSpreadsheetGenerator; import org.hl7.fhir.definitions.generators.specification.SchematronGenerator; import org.hl7.fhir.definitions.generators.specification.SvgGenerator; import org.hl7.fhir.definitions.generators.specification.TerminologyNotesGenerator; import org.hl7.fhir.definitions.generators.specification.ToolResourceUtilities; import org.hl7.fhir.definitions.generators.specification.TurtleSpecGenerator; import org.hl7.fhir.definitions.generators.specification.W5TurtleGenerator; import org.hl7.fhir.definitions.generators.specification.XPathQueryGenerator; import org.hl7.fhir.definitions.generators.specification.XmlSpecGenerator; import org.hl7.fhir.definitions.generators.xsd.SchemaGenerator; import org.hl7.fhir.definitions.model.BindingSpecification; import org.hl7.fhir.definitions.model.Compartment; import org.hl7.fhir.definitions.model.ConstraintStructure; import org.hl7.fhir.definitions.model.DefinedCode; import org.hl7.fhir.definitions.model.DefinedStringPattern; import org.hl7.fhir.definitions.model.Definitions; import org.hl7.fhir.definitions.model.Definitions.NamespacePair; import org.hl7.fhir.definitions.model.Definitions.PageInformation; import org.hl7.fhir.definitions.model.Dictionary; import org.hl7.fhir.definitions.model.ElementDefn; import org.hl7.fhir.definitions.model.Example; import org.hl7.fhir.definitions.model.Example.ExampleType; import org.hl7.fhir.definitions.model.ImplementationGuideDefn; import org.hl7.fhir.definitions.model.LogicalModel; import org.hl7.fhir.definitions.model.Operation; import org.hl7.fhir.definitions.model.PrimitiveType; import org.hl7.fhir.definitions.model.Profile; import org.hl7.fhir.definitions.model.ProfiledType; import org.hl7.fhir.definitions.model.ResourceDefn; import org.hl7.fhir.definitions.model.SearchParameterDefn; import org.hl7.fhir.definitions.model.SearchParameterDefn.SearchType; import org.hl7.fhir.definitions.model.TypeDefn; import org.hl7.fhir.definitions.model.WorkGroup; import org.hl7.fhir.definitions.parsers.IgParser; import org.hl7.fhir.definitions.parsers.IgParser.GuidePageKind; import org.hl7.fhir.definitions.parsers.SourceParser; import org.hl7.fhir.definitions.validation.ConceptMapValidator; import org.hl7.fhir.definitions.validation.FHIRPathUsage; import org.hl7.fhir.definitions.validation.ResourceValidator; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.igtools.publisher.SpecMapManager; import org.hl7.fhir.r5.conformance.ProfileComparer; import org.hl7.fhir.r5.conformance.ProfileComparer.ProfileComparison; import org.hl7.fhir.r5.conformance.ProfileUtilities; import org.hl7.fhir.r5.conformance.ShExGenerator; import org.hl7.fhir.r5.conformance.ShExGenerator.HTMLLinkPolicy; import org.hl7.fhir.r5.elementmodel.Manager; import org.hl7.fhir.r5.elementmodel.Manager.FhirFormat; import org.hl7.fhir.r5.elementmodel.ParserBase.ValidationPolicy; import org.hl7.fhir.r5.formats.FormatUtilities; import org.hl7.fhir.r5.formats.IParser; import org.hl7.fhir.r5.formats.IParser.OutputStyle; import org.hl7.fhir.r5.formats.JsonParser; import org.hl7.fhir.r5.formats.RdfParser; import org.hl7.fhir.r5.formats.XmlParser; import org.hl7.fhir.r5.model.Bundle; import org.hl7.fhir.r5.model.Bundle.BundleEntryComponent; import org.hl7.fhir.r5.model.Bundle.BundleType; import org.hl7.fhir.r5.model.CanonicalType; import org.hl7.fhir.r5.model.CapabilityStatement; import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementKind; import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestComponent; import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestResourceComponent; import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestResourceSearchParamComponent; import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementRestSecurityComponent; import org.hl7.fhir.r5.model.CapabilityStatement.CapabilityStatementSoftwareComponent; import org.hl7.fhir.r5.model.CapabilityStatement.ConditionalDeleteStatus; import org.hl7.fhir.r5.model.CapabilityStatement.ReferenceHandlingPolicy; import org.hl7.fhir.r5.model.CapabilityStatement.ResourceInteractionComponent; import org.hl7.fhir.r5.model.CapabilityStatement.RestfulCapabilityMode; import org.hl7.fhir.r5.model.CapabilityStatement.SystemInteractionComponent; import org.hl7.fhir.r5.model.CapabilityStatement.SystemRestfulInteraction; import org.hl7.fhir.r5.model.CapabilityStatement.TypeRestfulInteraction; import org.hl7.fhir.r5.model.CodeSystem; import org.hl7.fhir.r5.model.CodeSystem.ConceptDefinitionComponent; import org.hl7.fhir.r5.model.CodeType; import org.hl7.fhir.r5.model.CompartmentDefinition; import org.hl7.fhir.r5.model.CompartmentDefinition.CompartmentDefinitionResourceComponent; import org.hl7.fhir.r5.model.CompartmentDefinition.CompartmentType; import org.hl7.fhir.r5.model.ConceptMap; import org.hl7.fhir.r5.model.ConceptMap.ConceptMapGroupComponent; import org.hl7.fhir.r5.model.ConceptMap.SourceElementComponent; import org.hl7.fhir.r5.model.ConceptMap.TargetElementComponent; import org.hl7.fhir.r5.model.Constants; import org.hl7.fhir.r5.model.ContactDetail; import org.hl7.fhir.r5.model.ContactPoint; import org.hl7.fhir.r5.model.ContactPoint.ContactPointSystem; import org.hl7.fhir.r5.model.DomainResource; import org.hl7.fhir.r5.model.ElementDefinition; import org.hl7.fhir.r5.model.ElementDefinition.TypeRefComponent; import org.hl7.fhir.r5.model.Enumerations.BindingStrength; import org.hl7.fhir.r5.model.Enumerations.ConceptMapRelationship; import org.hl7.fhir.r5.model.Enumerations.FHIRVersion; import org.hl7.fhir.r5.model.Enumerations.PublicationStatus; import org.hl7.fhir.r5.model.Enumerations.SearchParamType; import org.hl7.fhir.r5.model.Factory; import org.hl7.fhir.r5.model.ImplementationGuide.ImplementationGuideDefinitionPageComponent; import org.hl7.fhir.r5.model.Meta; import org.hl7.fhir.r5.model.MetadataResource; import org.hl7.fhir.r5.model.NamingSystem; import org.hl7.fhir.r5.model.NamingSystem.NamingSystemIdentifierType; import org.hl7.fhir.r5.model.NamingSystem.NamingSystemType; import org.hl7.fhir.r5.model.NamingSystem.NamingSystemUniqueIdComponent; import org.hl7.fhir.r5.model.Narrative; import org.hl7.fhir.r5.model.Narrative.NarrativeStatus; import org.hl7.fhir.r5.model.OperationDefinition; import org.hl7.fhir.r5.model.Questionnaire; import org.hl7.fhir.r5.model.Resource; import org.hl7.fhir.r5.model.ResourceType; import org.hl7.fhir.r5.model.SearchParameter; import org.hl7.fhir.r5.model.StringType; import org.hl7.fhir.r5.model.StructureDefinition; import org.hl7.fhir.r5.model.StructureDefinition.StructureDefinitionKind; import org.hl7.fhir.r5.model.StructureDefinition.TypeDerivationRule; import org.hl7.fhir.r5.model.UriType; import org.hl7.fhir.r5.model.ValueSet; import org.hl7.fhir.r5.terminologies.CodeSystemUtilities; import org.hl7.fhir.r5.terminologies.LoincToDEConvertor; import org.hl7.fhir.r5.terminologies.ValueSetExpander.ValueSetExpansionOutcome; import org.hl7.fhir.r5.terminologies.ValueSetUtilities; import org.hl7.fhir.r5.utils.EOperationOutcome; import org.hl7.fhir.r5.utils.FHIRPathEngine; import org.hl7.fhir.r5.utils.GraphQLSchemaGenerator; import org.hl7.fhir.r5.utils.GraphQLSchemaGenerator.FHIROperationType; import org.hl7.fhir.r5.utils.NarrativeGenerator; import org.hl7.fhir.r5.utils.QuestionnaireBuilder; import org.hl7.fhir.r5.utils.ResourceUtilities; import org.hl7.fhir.r5.utils.StructureMapUtilities; import org.hl7.fhir.r5.utils.ToolingExtensions; import org.hl7.fhir.r5.validation.BaseValidator; import org.hl7.fhir.r5.validation.ProfileValidator; import org.hl7.fhir.r5.validation.XmlValidator; import org.hl7.fhir.rdf.RDFValidator; import org.hl7.fhir.tools.converters.CDAGenerator; import org.hl7.fhir.tools.converters.DSTU3ValidationConvertor; import org.hl7.fhir.tools.converters.SpecNPMPackageGenerator; import org.hl7.fhir.tools.converters.ValueSetImporterV2; import org.hl7.fhir.tools.converters.ValueSetImporterV3; import org.hl7.fhir.tools.implementations.XMLToolsGenerator; import org.hl7.fhir.tools.implementations.java.JavaGenerator; import org.hl7.fhir.tools.publisher.ExampleInspector.EValidationFailed; import org.hl7.fhir.utilities.CSFile; import org.hl7.fhir.utilities.CSFileInputStream; import org.hl7.fhir.utilities.CloseProtectedZipInputStream; import org.hl7.fhir.utilities.CommaSeparatedStringBuilder; import org.hl7.fhir.utilities.IniFile; import org.hl7.fhir.utilities.Logger.LogMessageType; import org.hl7.fhir.utilities.NDJsonWriter; import org.hl7.fhir.utilities.TextFile; import org.hl7.fhir.utilities.Utilities; import org.hl7.fhir.utilities.XsltUtilities; import org.hl7.fhir.utilities.ZipGenerator; import org.hl7.fhir.utilities.json.JSONUtil; import org.hl7.fhir.utilities.validation.ValidationMessage; import org.hl7.fhir.utilities.validation.ValidationMessage.IssueSeverity; import org.hl7.fhir.utilities.validation.ValidationMessage.IssueType; import org.hl7.fhir.utilities.validation.ValidationMessage.Source; import org.hl7.fhir.utilities.xhtml.NodeType; import org.hl7.fhir.utilities.xhtml.XhtmlComposer; import org.hl7.fhir.utilities.xhtml.XhtmlDocument; import org.hl7.fhir.utilities.xhtml.XhtmlNode; import org.hl7.fhir.utilities.xhtml.XhtmlParser; import org.hl7.fhir.utilities.xml.XMLUtil; import org.hl7.fhir.utilities.xml.XhtmlGenerator; import org.hl7.fhir.utilities.xml.XmlGenerator; import org.junit.runner.JUnitCore; import org.junit.runner.Result; import org.junit.runner.notification.Failure; import org.stringtemplate.v4.ST; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.xml.sax.InputSource; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserFactory; import com.google.common.base.Charsets; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonArray; import com.google.gson.JsonObject; /** * This is the entry point for the publication method for FHIR The general order * of publishing is Check that everything we expect to find is found Load the * page.getDefinitions() Produce the specification 1. reference implementations * 2. schemas 4. final specification Validate the XML * * @author Grahame * */ public class Publisher implements URIResolver, SectionNumberer { public static final String CANONICAL_BASE = "http://build.fhir.org/"; public class DocumentHolder { public XhtmlDocument doc; } public static class Fragment { private String type; private String xml; private String page; private String id; private boolean json; public String getType() { return type; } public void setType(String type) { this.type = type; } public String getXml() { return xml; } public void setXml(String xml) { this.xml = xml; } public String getPage() { return page; } public void setPage(String page) { this.page = page; } public boolean isJson() { return json; } public void setJson(boolean json) { this.json = json; } public void setId(String id2) { this.id = id2; } } public static class ExampleReference { private final String ref; private final String path; private boolean exempt; private String id; private String type; public ExampleReference(String ref, String path) { super(); this.ref = ref; this.path = path; exempt = false; if (ref.startsWith("#")) { type = null; id = ref; exempt = true; } else if (isExemptUrl(ref)) { type = null; id = null; exempt = true; } else { String[] parts = ref.split("\\/"); if (ref.contains("_history") && parts.length >= 4) { type = parts[parts.length-4]; id = parts[parts.length-3]; } else if (parts.length >= 2) { type = parts[parts.length-2]; id = parts[parts.length-1]; } } } private boolean isExemptUrl(String url) { if (url.startsWith("urn:")) return true; if (url.startsWith("http:") && !url.startsWith("http://hl7.org/fhir")) return true; return false; } public String getPath() { return path; } public boolean hasType() { return type != null; } public String getType() { return type; } public String getId() { return id; } public boolean isExempt() { return exempt; } public String getRef() { return ref; } } private static final String HTTP_separator = "/"; private String outputdir; private SourceParser prsr; private PageProcessor page; // private BookMaker book; private JavaGenerator javaReferencePlatform; private boolean isGenerate; private boolean noArchive; private boolean web; private String diffProgram; private Bundle profileBundle; private Bundle valueSetsFeed; private Bundle conceptMapsFeed; private Bundle dataElements; private Bundle externals; private boolean noPartialBuild; private List<Fragment> fragments = new ArrayList<Publisher.Fragment>(); private Map<String, String> xmls = new HashMap<String, String>(); private Map<String, String> jsons = new HashMap<String, String>(); private Map<String, String> ttls = new HashMap<String, String>(); private Map<String, Long> dates = new HashMap<String, Long>(); private Map<String, Boolean> buildFlags = new HashMap<String, Boolean>(); private IniFile cache; private String singleResource; private String singlePage; private PublisherTestSuites tester; private List<FHIRPathUsage> fpUsages = new ArrayList<FHIRPathUsage>(); private List<ConceptMap> statusCodeConceptMaps = new ArrayList<ConceptMap>(); private int cscounter = 0; private int vscounter = 0; private int cmcounter = 0; private ProfileGenerator pgen; private boolean noSound; private boolean doValidate; private boolean isCIBuild; private boolean isPostPR; private String validateId; public static void main(String[] args) throws Exception { // Publisher pub = new Publisher(); pub.page = new PageProcessor(PageProcessor.DEF_TS_SERVER); pub.isGenerate = !(args.length > 1 && hasParam(args, "-nogen")); pub.doValidate = true; pub.noArchive = (args.length > 1 && hasParam(args, "-noarchive")); pub.web = (args.length > 1 && hasParam(args, "-web")); pub.page.setForPublication(pub.web); pub.diffProgram = getNamedParam(args, "-diff"); pub.noSound = (args.length > 1 && hasParam(args, "-nosound")); pub.noPartialBuild = (args.length > 1 && hasParam(args, "-nopartial")); pub.isPostPR = (args.length > 1 && hasParam(args, "-post-pr")); if (hasParam(args, "-resource")) pub.singleResource = getNamedParam(args, "-resource"); if (hasParam(args, "-page")) pub.singlePage = getNamedParam(args, "-page"); if (hasParam(args, "-name")) pub.page.setPublicationType(getNamedParam(args, "-name")); if (hasParam(args, "-url")) pub.page.setBaseURL(getNamedParam(args, "-url")); if (hasParam(args, "-svn")) pub.page.setBuildId(getNamedParam(args, "-svn")); // if (hasParam("args", "-noref")) // pub.setNoReferenceImplementations(getNamedParam(args, "-noref")); // if (hasParam(args, "-langfolder")) // pub.setAlternativeLangFolder(getNamedParam(args, "-langfolder")); if (pub.web) { pub.page.setPublicationType(PageProcessor.WEB_PUB_NAME); pub.page.setPublicationNotice(PageProcessor.WEB_PUB_NOTICE); } else { pub.page.setPublicationType(PageProcessor.CI_PUB_NAME); pub.page.setPublicationNotice(PageProcessor.CI_PUB_NOTICE); } pub.validateId = getNamedParam(args, "-validate"); String dir = hasParam(args, "-folder") ? getNamedParam(args, "-folder") : System.getProperty("user.dir"); pub.outputdir = hasParam(args, "-output") ? getNamedParam(args, "-output") : null; pub.isCIBuild = dir.contains("/ubuntu/agents/"); pub.execute(dir); } private static boolean hasParam(String[] args, String param) { for (String a : args) if (a.equals(param)) return true; return false; } private static String getNamedParam(String[] args, String param) { boolean found = false; for (String a : args) { if (found) return a; if (a.equals(param)) { found = true; } } return null; } /** * Entry point to the publisher. This classes Java Main() calls this function * to actually produce the specification * * @param folder * @throws IOException * @throws Exception */ public void execute(String folder) throws IOException { tester = new PublisherTestSuites(); page.log("Publish FHIR in folder " + folder + " @ " + Config.DATE_FORMAT().format(page.getGenDate().getTime()), LogMessageType.Process); if (web) page.log("Build final copy for HL7 web site", LogMessageType.Process); else page.log("Build local copy", LogMessageType.Process); if (outputdir != null) { page.log("Create output in "+outputdir, LogMessageType.Process); } page.log("Detected Java version: " + System.getProperty("java.version")+" from "+System.getProperty("java.home")+" on "+System.getProperty("os.arch"), LogMessageType.Process); try { tester.initialTests(); page.setFolders(new FolderManager(folder, outputdir)); registerReferencePlatforms(); if (!initialize(folder)) throw new Exception("Unable to publish as preconditions aren't met"); cache = new IniFile(page.getFolders().rootDir + "temp" + File.separator + "build.cache"); loadSuppressedMessages(page.getFolders().rootDir); boolean doAny = false; for (String n : dates.keySet()) { Long d = cache.getLongProperty("dates", n); boolean b = d == null || (dates.get(n) > d); cache.setLongProperty("dates", n, dates.get(n).longValue(), null); buildFlags.put(n.toLowerCase(), b); doAny = doAny || b; } cache.save(); // overriding build if (noPartialBuild || !doAny || !(new File(page.getFolders().dstDir + "qa.html").exists())) buildFlags.put("all", true); // nothing - build all if (singlePage != null) { for (String n : buildFlags.keySet()) buildFlags.put(n, false); buildFlags.put("page-"+singlePage.toLowerCase(), true); } else if (singleResource != null) { for (String n : buildFlags.keySet()) buildFlags.put(n, false); buildFlags.put(singleResource.toLowerCase(), true); } if (!buildFlags.get("all")) { if (!noSound) { AudioUtilities.tone(1000, 10); AudioUtilities.tone(1400, 10); AudioUtilities.tone(1800, 10); AudioUtilities.tone(1000, 10); AudioUtilities.tone(1400, 10); AudioUtilities.tone(1800, 10); } page.log("Partial Build (if you want a full build, just run the build again)", LogMessageType.Process); CommaSeparatedStringBuilder b = new CommaSeparatedStringBuilder(); for (String n : buildFlags.keySet()) if (buildFlags.get(n)) b.append(n); page.log(" Build: "+b.toString(), LogMessageType.Process); } else { if (!noSound) AudioUtilities.tone(1200, 30); page.log("Full Build", LogMessageType.Process); } if (isGenerate && page.getBuildId() == null) page.setBuildId(getGitBuildId()); page.log("Version " + page.getVersion() + "-" + page.getBuildId(), LogMessageType.Hint); Utilities.createDirectory(page.getFolders().dstDir); Utilities.deleteTempFiles(); page.getBreadCrumbManager().parse(page.getFolders().srcDir + "hierarchy.xml"); page.loadSnomed(); page.loadLoinc(); page.loadUcum(); buildFeedsAndMaps(); prsr.setExternals(externals); prsr.parse(page.getGenDate(), page.getValidationErrors()); for (String n : page.getDefinitions().sortedResourceNames()) if (!page.getBreadCrumbManager().knowsResource(n)) page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.INVALID, -1, -1, "hierarchy.xml", "Resource not found: "+n,IssueSeverity.ERROR)); for (String n : prsr.getErrors()) page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.INVALID, -1, -1, "source spreadsheets", n, IssueSeverity.ERROR)); if (web) { page.log("Clear Directory", LogMessageType.Process); Utilities.clearDirectory(page.getFolders().dstDir); } if (web || (isGenerate && buildFlags.get("all"))) { Utilities.createDirectory(page.getFolders().dstDir + "html"); Utilities.createDirectory(page.getFolders().dstDir + "examples"); } for (ImplementationGuideDefn ig : page.getDefinitions().getSortedIgs()) if (!ig.isCore()) Utilities.createDirectory(page.getFolders().dstDir + ig.getCode()); if (buildFlags.get("all")) { copyStaticContent(); } loadValueSets1(); prsr.getRegistry().commit(); generateSCMaps(); validate(); processProfiles(); checkAllOk(); if (isGenerate) { produceSpecification(); checkAllOk(); } if (doValidate) validationProcess(); page.saveSnomed(); page.getWorkerContext().saveCache(); processWarnings(false); if (isGenerate && buildFlags.get("all")) produceQA(); if (!buildFlags.get("all")) { page.log("This was a Partial Build", LogMessageType.Process); CommaSeparatedStringBuilder b = new CommaSeparatedStringBuilder(); for (String n : buildFlags.keySet()) if (buildFlags.get(n)) b.append(n); page.log(" Build: "+b.toString(), LogMessageType.Process); } else page.log("This was a Full Build", LogMessageType.Process); if (!noSound) { AudioUtilities.tone(800, 10); AudioUtilities.tone(1000, 10); AudioUtilities.tone(1200, 10); AudioUtilities.tone(1000, 10); AudioUtilities.tone(800, 10); } page.log("Finished publishing FHIR @ " + Config.DATE_FORMAT().format(Calendar.getInstance().getTime()), LogMessageType.Process); } catch (Exception e) { if (!(e instanceof NullPointerException)) { // because NullPointerException is unexpected... try { processWarnings(e instanceof EValidationFailed); } catch (Exception e2) { page.log(" ERROR: Unable to process warnings: " + e2.getMessage(), LogMessageType.Error); e2.printStackTrace(); } } if (buildFlags.containsKey("all") && !buildFlags.get("all")) { page.log("This was a Partial Build", LogMessageType.Process); CommaSeparatedStringBuilder b = new CommaSeparatedStringBuilder(); for (String n : buildFlags.keySet()) if (buildFlags.get(n)) b.append(n); page.log(" Build: "+b.toString(), LogMessageType.Process); } else page.log("This was a Full Build", LogMessageType.Process); if (!noSound) { AudioUtilities.tone(800, 20); AudioUtilities.tone(1000, 20); AudioUtilities.tone(1200, 20); } try { Thread.sleep(50); } catch (InterruptedException e1) { } if (!noSound) { AudioUtilities.tone(800, 20); AudioUtilities.tone(1000, 20); AudioUtilities.tone(1200, 20); } try { Thread.sleep(50); } catch (InterruptedException e1) { } if (!noSound) { AudioUtilities.tone(800, 20); AudioUtilities.tone(1000, 20); AudioUtilities.tone(1200, 20); } page.log("FHIR build failure @ " + Config.DATE_FORMAT().format(Calendar.getInstance().getTime()), LogMessageType.Process); System.out.println("Error: " + e.getMessage()); e.printStackTrace(); TextFile.stringToFile(StringUtils.defaultString(e.getMessage()), Utilities.path(folder, "publish", "simple-error.txt")); System.exit(1); } } private String getGitBuildId() { String version = ""; try { String[] cmd = { "git", "describe", "--tags", "--always" }; Process p = Runtime.getRuntime().exec(cmd); p.waitFor(); InputStreamReader isr = new InputStreamReader(p.getInputStream()); BufferedReader br = new BufferedReader(isr); String line; while ((line = br.readLine()) != null) { version += line; } } catch (Exception e) { System.out.println("Warning @ Unable to read the git commit: " + e.getMessage() ); version = "????"; } return version; } private void generateSCMaps() throws Exception { page.log("Generate Status Code Concept Maps", LogMessageType.Process); for (ResourceDefn rd : page.getDefinitions().getResources().values()) { generateSCMaps(rd.getRoot().getName(), rd.getRoot(), rd); } } private void generateSCMaps(String path, ElementDefn element, ResourceDefn rd) throws Exception { if (elementHasSCMapping(path)) { ValueSet vs = element.getBinding().getValueSet(); if (vs == null) throw new Exception("Element has a Status Code binding, but no ValueSet"); ConceptMap cm = (ConceptMap) vs.getUserData("build.statuscodes.map"); if (cm == null) { cm = buildConceptMap(path, vs, rd); if (cm != null) vs.setUserData("build.statuscodes.map", cm); } } for (ElementDefn child : element.getElements()) { generateSCMaps(path+"."+child.getName(), child, rd); } } private ConceptMap buildConceptMap(String path, ValueSet vs, ResourceDefn rd) throws EOperationOutcome, FHIRException, IOException { ConceptMap cm = new ConceptMap(); cm.setUserData("path", "sc-"+vs.getUserString("path")); cm.setUserData("resource-definition", rd); cm.setId("sc-"+vs.getId()); cm.setUrl("http://hl7.org/fhir/ConceptMap/"+cm.getId()); cm.setVersion(page.getVersion().toCode()); cm.setName(vs.getName()+"CanonicalMap"); cm.setTitle("Canonical Mapping for \""+vs.present()+"\""); cm.setStatus(PublicationStatus.DRAFT); cm.setDate(vs.getDate()); cm.setPublisher(vs.getPublisher()); cm.addContact(vs.getContactFirstRep()); cm.setDescription("Canonical Mapping for \""+vs.getDescription()+"\""); cm.setSource(new CanonicalType(vs.getUrl())); cm.setTarget(new CanonicalType("http://hl7.org/fhir/ValueSet/resource-status")); List<String> canonical = page.getDefinitions().getStatusCodes().get("@code"); List<String> self = page.getDefinitions().getStatusCodes().get(path); ConceptMapGroupComponent grp = cm.addGroup(); grp.setTarget("http://hl7.org/fhir/resource-status"); grp.setSource(vs.getCompose().getIncludeFirstRep().getSystem()); for (int i =0; i < self.size(); i++) { if (!Utilities.noString(self.get(i))) { String cc = canonical.get(i); String sc = self.get(i); SourceElementComponent e = grp.addElement(); e.setCode(sc); TargetElementComponent t = e.addTarget(); t.setCode(cc); t.setRelationship(ConceptMapRelationship.EQUIVALENT); } } if (!grp.hasElement()) return null; page.getConceptMaps().put(cm.getUrl(), cm); statusCodeConceptMaps.add(cm); return cm; } private boolean elementHasSCMapping(String path) { return page.getDefinitions().getStatusCodes().containsKey(path); } private void generateRedirects() throws Exception { page.log("Produce "+Integer.toString(page.getDefinitions().getRedirectList().size())+" Redirects", LogMessageType.Process); for (String n : page.getDefinitions().getRedirectList().keySet()) { NamespacePair nsp = page.getDefinitions().getRedirectList().get(n); generateRedirect(n, nsp.desc, nsp.page); } } private void generateRedirect(String n, String desc, String pn) throws Exception { if (!n.startsWith("http://hl7.org/fhir/")) throw new Error("wrong path"); n = n.substring(20); String level = "../"; for (char c : n.toCharArray()) if (c == '/') level = level +"../"; String fullFileName = Utilities.path(page.getFolders().dstDir, n.replace("/", File.separator)); Utilities.createDirectory(fullFileName); // simple html version // String pagecnt = "<html>\r\n<head>\r\n<title>Redirect Page for "+Utilities.escapeXml(desc)+" </title>\r\n<meta http-equiv=\"REFRESH\" content=\"0;url="+ // level+pn+"\"></HEAD>\r\n</head>\r\n<body>\r\nThis page is a redirect to "+level+pn+"\r\n</body>\r\n</html>\r\n"; // asp redirection version String pagecnt = TextFile.fileToString(Utilities.path(page.getFolders().rootDir, "tools", "html", "redirect.asp")); pagecnt = pagecnt.replace("<%filename%>", Utilities.changeFileExt(pn, "")); String fn = Utilities.path(fullFileName, "index.asp"); if (!(new File(fn).exists())) TextFile.stringToFile(pagecnt, fn); } @SuppressWarnings("unchecked") private List<StructureDefinition> listProfiles(Map<String, Resource> igResources) throws Exception { List<StructureDefinition> list = new ArrayList<StructureDefinition>(); for (Resource ae : igResources.values()) if (ae instanceof StructureDefinition) { processProfile((StructureDefinition) ae); list.add((StructureDefinition) ae); } return list; } @SuppressWarnings("unchecked") private void loadIgReference(Resource ae) throws Exception { page.getIgResources().put(ae.getId(), ae); if (ae instanceof ValueSet) { ValueSet vs = (ValueSet) ae; page.getValueSets().put(ae.getId(), vs); } if (ae instanceof CodeSystem) page.getCodeSystems().put(ae.getId(), (CodeSystem) ae); if (ae instanceof ConceptMap) page.getConceptMaps().put(ae.getId(), (ConceptMap) ae); if (ae instanceof StructureDefinition) { StructureDefinition sd = (StructureDefinition) ae; if (page.getProfiles().containsKey(sd.getUrl())) throw new Exception("Duplicate Profile URL "+sd.getUrl()); page.getProfiles().put(sd.getUrl(), sd); } } @SuppressWarnings("unchecked") private void processProfiles() throws Exception { page.log(" ...process profiles (base)", LogMessageType.Process); // first, for each type and resource, we build it's master profile for (DefinedCode t : page.getDefinitions().getPrimitives().values()) { if (t instanceof PrimitiveType) genPrimitiveTypeProfile((PrimitiveType) t); else genPrimitiveTypeProfile((DefinedStringPattern) t); } genXhtmlProfile(); for (TypeDefn t : page.getDefinitions().getTypes().values()) genTypeProfile(t); for (TypeDefn t : page.getDefinitions().getStructures().values()) genTypeProfile(t); for (TypeDefn t : page.getDefinitions().getInfrastructure().values()) genTypeProfile(t); page.log(" ...process profiles (resources)", LogMessageType.Process); for (ResourceDefn r : page.getDefinitions().getBaseResources().values()) { r.setConformancePack(makeConformancePack(r)); r.setProfile(new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir).generate(r.getConformancePack(), r, "core", false)); if (page.getProfiles().containsKey(r.getProfile().getUrl())) throw new Exception("Duplicate Profile URL "+r.getProfile().getUrl()); page.getProfiles().put(r.getProfile().getUrl(), r.getProfile()); ResourceTableGenerator rtg = new ResourceTableGenerator(page.getFolders().dstDir, page, null, true); r.getProfile().getText().setDiv(new XhtmlNode(NodeType.Element, "div")); r.getProfile().getText().getDiv().getChildNodes().add(rtg.generate(r, "")); } for (String rn : page.getDefinitions().sortedResourceNames()) { ResourceDefn r = page.getDefinitions().getResourceByName(rn); r.setConformancePack(makeConformancePack(r)); r.setProfile(new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir).generate(r.getConformancePack(), r, "core", false)); if (page.getProfiles().containsKey(r.getProfile().getUrl())) throw new Exception("Duplicate Profile URL "+r.getProfile().getUrl()); page.getProfiles().put(r.getProfile().getUrl(), r.getProfile()); ResourceTableGenerator rtg = new ResourceTableGenerator(page.getFolders().dstDir, page, null, true); r.getProfile().getText().setDiv(new XhtmlNode(NodeType.Element, "div")); r.getProfile().getText().getDiv().getChildNodes().add(rtg.generate(r, "")); } for (ResourceDefn r : page.getDefinitions().getResourceTemplates().values()) { r.setConformancePack(makeConformancePack(r)); r.setProfile(new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir).generate(r.getConformancePack(), r, "core", true)); ResourceTableGenerator rtg = new ResourceTableGenerator(page.getFolders().dstDir, page, null, true); r.getProfile().getText().setDiv(new XhtmlNode(NodeType.Element, "div")); r.getProfile().getText().getDiv().getChildNodes().add(rtg.generate(r, "")); if (page.getProfiles().containsKey(r.getProfile().getUrl())) throw new Exception("Duplicate Profile URL "+r.getProfile().getUrl()); page.getProfiles().put(r.getProfile().getUrl(), r.getProfile()); } for (ProfiledType pt : page.getDefinitions().getConstraints().values()) { genProfiledTypeProfile(pt); } page.log(" ...process profiles (extensions)", LogMessageType.Process); for (StructureDefinition ex : page.getWorkerContext().getExtensionDefinitions()) processExtension(ex); for (ResourceDefn r : page.getDefinitions().getResources().values()) { // boolean logged = false; for (Profile ap : r.getConformancePackages()) { // if (!logged) // page.log(" ... resource "+r.getName(), LogMessageType.Process); // logged = true; for (ConstraintStructure p : ap.getProfiles()) processProfile(ap, p, ap.getId(), r); } } page.log(" ...process profiles (packs)", LogMessageType.Process); // we have profiles scoped by resources, and stand alone profiles for (Profile ap : page.getDefinitions().getPackList()) { // page.log(" ... pack "+ap.getId(), LogMessageType.Process); for (ConstraintStructure p : ap.getProfiles()) processProfile(ap, p, ap.getId(), null); } page.log(" ...process logical models", LogMessageType.Process); for (ImplementationGuideDefn ig : page.getDefinitions().getSortedIgs()) { for (LogicalModel lm : ig.getLogicalModels()) { page.log(" ...process logical model " + lm.getId(), LogMessageType.Process); if (lm.getDefinition() == null) lm.setDefinition(new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir).generateLogicalModel(ig, lm.getResource())); } } // now, validate the profiles for (Profile ap : page.getDefinitions().getPackList()) for (ConstraintStructure p : ap.getProfiles()) validateProfile(p); for (ResourceDefn r : page.getDefinitions().getResources().values()) for (Profile ap : r.getConformancePackages()) for (ConstraintStructure p : ap.getProfiles()) validateProfile(p); page.log(" ...Check FHIR Path Expressions", LogMessageType.Process); StringBuilder b = new StringBuilder(); FHIRPathEngine fp = new FHIRPathEngine(page.getWorkerContext()); fp.setHostServices(page.getExpressionResolver()); for (FHIRPathUsage p : fpUsages) { checkExpression(b, fp, p); } TextFile.stringToFile(b.toString(), Utilities.path(page.getFolders().dstDir, "fhirpaths.txt")); checkAllOk(); } private void checkExpression(StringBuilder b, FHIRPathEngine fp, FHIRPathUsage p) { b.append(p.getResource() + " (" + p.getContext() + "): " + p.getExpression()+"\r\n"); try { if (!"n/a".equals(p.getExpression())) { fp.check(null, p.getResource(), p.getContext(), p.getExpression()); } } catch (Exception e) { ValidationMessage validationMessage = new ValidationMessage(Source.Publisher, IssueType.STRUCTURE, -1, -1, p.getLocation(), "Expression '"+p.getExpression()+"' has illegal path ("+e.getMessage()+")", IssueSeverity.ERROR); page.getValidationErrors().add(validationMessage); } } private void processExtension(StructureDefinition ex) throws Exception { StructureDefinition bd = page.getDefinitions().getSnapShotForBase(ex.getBaseDefinition()); new ProfileUtilities(page.getWorkerContext(), page.getValidationErrors(), page).generateSnapshot(bd, ex, ex.getUrl(), null, ex.getName()); } private Profile makeConformancePack(ResourceDefn r) { Profile result = new Profile("core"); result.setTitle("Base Profile for "+r.getName()); return result; } private void validateProfile(ConstraintStructure p) throws Exception { ProfileValidator pv = new ProfileValidator(); pv.setContext(page.getWorkerContext()); page.getValidationErrors().addAll(pv.validate(p.getResource(), true)); } private void genProfiledTypeProfile(ProfiledType pt) throws Exception { StructureDefinition profile = new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir).generate(pt, page.getValidationErrors()); if (page.getProfiles().containsKey(profile.getUrl())) throw new Exception("Duplicate Profile URL "+profile.getUrl()); page.getProfiles().put(profile.getUrl(), profile); pt.setProfile(profile); page.getProfiles().put(profile.getUrl(), profile); page.getProfiles().put(profile.getName(), profile); // todo: what to do in the narrative? } private void genXhtmlProfile() throws Exception { StructureDefinition profile = new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir).generateXhtml(); if (page.getProfiles().containsKey(profile.getUrl())) throw new Exception("Duplicate Profile URL "+profile.getUrl()); page.getProfiles().put(profile.getUrl(), profile); page.getProfiles().put(profile.getName(), profile); // DataTypeTableGenerator dtg = new DataTypeTableGenerator(page.getFolders().dstDir, page, t.getCode(), true); // t.setProfile(profile); // t.getProfile().getText().setDiv(new XhtmlNode(NodeType.Element, "div")); // t.getProfile().getText().getDiv().getChildNodes().add(dtg.generate(t)); } private void genPrimitiveTypeProfile(PrimitiveType t) throws Exception { StructureDefinition profile = new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir).generate(t); if (page.getProfiles().containsKey(profile.getUrl())) throw new Exception("Duplicate Profile URL "+profile.getUrl()); page.getProfiles().put(profile.getUrl(), profile); page.getProfiles().put(profile.getName(), profile); t.setProfile(profile); // DataTypeTableGenerator dtg = new DataTypeTableGenerator(page.getFolders().dstDir, page, t.getCode(), true); // t.setProfile(profile); // t.getProfile().getText().setDiv(new XhtmlNode(NodeType.Element, "div")); // t.getProfile().getText().getDiv().getChildNodes().add(dtg.generate(t)); } private void genPrimitiveTypeProfile(DefinedStringPattern t) throws Exception { StructureDefinition profile = new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir).generate(t); if (page.getProfiles().containsKey(profile.getUrl())) throw new Exception("Duplicate Profile URL "+profile.getUrl()); page.getProfiles().put(profile.getUrl(), profile); page.getProfiles().put(profile.getName(), profile); t.setProfile(profile); // DataTypeTableGenerator dtg = new DataTypeTableGenerator(page.getFolders().dstDir, page, t.getCode(), true); // t.setProfile(profile); // t.getProfile().getText().setDiv(new XhtmlNode(NodeType.Element, "div")); // t.getProfile().getText().getDiv().getChildNodes().add(dtg.generate(t)); } private void genTypeProfile(TypeDefn t) throws Exception { StructureDefinition profile; try { profile = new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir).generate(t); page.getProfiles().put(profile.getUrl(), profile); t.setProfile(profile); DataTypeTableGenerator dtg = new DataTypeTableGenerator(page.getFolders().dstDir, page, t.getName(), true); t.getProfile().getText().setDiv(new XhtmlNode(NodeType.Element, "div")); t.getProfile().getText().getDiv().getChildNodes().add(dtg.generate(t, null)); } catch (Exception e) { throw new Exception("Error generating profile for '"+t.getName()+"': "+e.getMessage(), e); } } private void processProfile(Profile ap, ConstraintStructure profile, String filename, ResourceDefn baseResource) throws Exception { // page.log(" ... profile "+profile.getId(), LogMessageType.Process); // they've either been loaded from spreadsheets, or from profile declarations // what we're going to do: // create StructureDefinition structures if needed (create differential definitions from spreadsheets) if (profile.getResource() == null) { StructureDefinition p = new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir) .generate(ap, profile, profile.getDefn(), profile.getId(), profile.getUsage(), page.getValidationErrors(), baseResource); p.setUserData("pack", ap); profile.setResource(p); if (profile.getResourceInfo() != null) { profile.getResourceInfo().setUserData(ToolResourceUtilities.RES_ACTUAL_RESOURCE, p); } if (page.getProfiles().containsKey(p.getUrl())) throw new Exception("Duplicate Profile URL "+p.getUrl()); page.getProfiles().put(p.getUrl(), p); } else { profile.getResource().setUserData("pack", ap); sortProfile(profile.getResource()); for (ElementDefinition ed : profile.getResource().getDifferential().getElement()) if (!ed.hasId()) throw new Exception("Missing ID"); // special case: if the profile itself doesn't claim a date, it's date is the date of this publication if (!profile.getResource().hasDate()) profile.getResource().setDate(page.getGenDate().getTime()); if (profile.getResource().hasBaseDefinition() && !profile.getResource().hasSnapshot()) { // cause it probably doesn't, coming from the profile directly StructureDefinition base = getSnapShotForProfile(profile.getResource().getBaseDefinition()); new ProfileUtilities(page.getWorkerContext(), page.getValidationErrors(), page).generateSnapshot(base, profile.getResource(), profile.getResource().getBaseDefinition().split("#")[0], "http://hl7.org/fhir", profile.getResource().getName()); } if (page.getProfiles().containsKey(profile.getResource().getUrl())) throw new Exception("Duplicate Profile URL "+profile.getResource().getUrl()); page.getProfiles().put(profile.getResource().getUrl(), profile.getResource()); } if (!Utilities.noString(filename)) profile.getResource().setUserData("filename", filename+".html"); if (Utilities.noString(profile.getResource().getUserString("path"))) { String path = ""; ImplementationGuideDefn ig = page.getDefinitions().getUsageIG(ap.getCategory(), "processProfile"); if (ig!=null && !ig.isCore()) path = ig.getCode() + File.separator; profile.getResource().setUserData("path", path + filename+".html"); } } private void sortProfile(StructureDefinition diff) throws Exception { StructureDefinition base = page.getWorkerContext().fetchResource(StructureDefinition.class, diff.getBaseDefinition()); if (base == null) throw new Exception("unable to find base profile "+diff.getUrl()); List<String> errors = new ArrayList<String>(); new ProfileUtilities(page.getWorkerContext(), null, page).sortDifferential(base, diff, diff.getName(), errors); // if (errors.size() > 0) // throw new Exception("Error sorting profile "+diff.getName()+": "+errors.toString()); } public StructureDefinition getSnapShotForProfile(String base) throws Exception { String[] parts = base.split("#"); if (parts[0].startsWith("http://hl7.org/fhir/StructureDefinition/") && parts.length == 1) { String name = base.substring(40); if (page.getDefinitions().hasResource(name)) return page.getDefinitions().getSnapShotForType(name); else if (page.getDefinitions().hasType(name)) { TypeDefn t = page.getDefinitions().getElementDefn(name); if (t.getProfile().hasSnapshot()) return t.getProfile(); throw new Exception("unable to find snapshot for "+name); } //else // throw new Exception("unable to find base definition for "+name); } StructureDefinition p = new ProfileUtilities(page.getWorkerContext(), page.getValidationErrors(), page).getProfile(null, parts[0]); if (p == null) throw new Exception("unable to find base definition for "+base); if (parts.length == 1) { if (p.getSnapshot() == null) throw new Exception("StructureDefinition "+base+" has no snapshot"); // or else we could fill it in? return p; } for (Resource r : p.getContained()) { if (r instanceof StructureDefinition && r.getId().equals(parts[1])) { StructureDefinition pc = (StructureDefinition) r; if (pc.getSnapshot() == null) { StructureDefinition ps = getSnapShotForProfile(pc.getBaseDefinition()); processProfile(pc); } return pc; } } throw new Exception("Unable to find snapshot for "+base); } private void processProfile(StructureDefinition ae) throws Exception { if (ae.getDate() == null) ae.setDate(page.getGenDate().getTime()); if (ae.hasBaseDefinition() && ae.hasSnapshot()) { // cause it probably doesn't, coming from the profile directly StructureDefinition base = getIgProfile(ae.getBaseDefinition()); if (base == null) base = new ProfileUtilities(page.getWorkerContext(), page.getValidationErrors(), page).getProfile(null, ae.getBaseDefinition()); new ProfileUtilities(page.getWorkerContext(), page.getValidationErrors(), page).generateSnapshot(base, ae, ae.getBaseDefinition().split("#")[0], "http://hl7.org/fhir", ae.getName()); if (page.getProfiles().containsKey(ae.getUrl())) throw new Exception("Duplicate Profile URL "+ae.getUrl()); page.getProfiles().put(ae.getUrl(), ae); } } public StructureDefinition getIgProfile(String base) throws Exception { String[] parts = base.split("#"); StructureDefinition p = getIGProfileByURL(parts[0]); if (p == null) return null; processProfile(p); // this is recursive, but will terminate at the root if (parts.length == 1) { if (p.getSnapshot() == null) throw new Exception("StructureDefinition "+base+" has no snapshot"); // or else we could fill it in? return p; } for (Resource r : p.getContained()) { if (r instanceof StructureDefinition && r.getId().equals(parts[1])) { StructureDefinition pc = (StructureDefinition) r; if (pc.getSnapshot() == null) { StructureDefinition ps = getSnapShotForProfile(pc.getBaseDefinition()); processProfile(pc); } return pc; } } throw new Exception("Unable to find snapshot for "+base); } @SuppressWarnings("unchecked") private StructureDefinition getIGProfileByURL(String url) { if (url.contains("#")) url = url.substring(0, url.indexOf('#')); for (Resource ae : page.getIgResources().values()) { if (ae instanceof StructureDefinition) { StructureDefinition p = (StructureDefinition) ae; if (p.getUrl().equals(url)) return (StructureDefinition) ae; } } return null; } private void loadSuppressedMessages(String rootDir) throws Exception { InputStreamReader r = new InputStreamReader(new FileInputStream(rootDir + "suppressed-messages.txt")); StringBuilder b = new StringBuilder(); while (r.ready()) { char c = (char) r.read(); if (c == '\r' || c == '\n') { if (b.length() > 0) page.getSuppressedMessages().add(b.toString()); b = new StringBuilder(); } else b.append(c); } if (b.length() > 0) page.getSuppressedMessages().add(b.toString()); r.close(); } private void loadValueSets1() throws Exception { page.log(" ...vocab #1", LogMessageType.Process); new ValueSetImporterV3(page, page.getValidationErrors()).execute(); new ValueSetImporterV2(page, page.getValidationErrors()).execute(); generateCodeSystemsPart1(); generateValueSetsPart1(); for (BindingSpecification cd : page.getDefinitions().getUnresolvedBindings()) { String ref = cd.getReference(); if (ref.startsWith("http://hl7.org/fhir")) { // we expect to be able to resolve this ValueSet vs = page.getDefinitions().getValuesets().get(ref); if (vs == null) vs = page.getDefinitions().getExtraValuesets().get(ref); if (vs == null) vs = page.getWorkerContext().fetchResource(ValueSet.class, ref); if (vs == null) { if (page.getDefinitions().getBoundValueSets().containsKey(ref)) throw new Exception("Unable to resolve the value set reference "+ref+" but found it in load list"); throw new Exception("Unable to resolve the value set reference "+ref); } cd.setValueSet(vs); } else { ValueSet vs = page.getWorkerContext().fetchResource(ValueSet.class, ref); if (vs != null) cd.setValueSet(vs); else if (!ref.startsWith("http://loinc.org/vs/LL")) System.out.println("Unresolved value set reference: "+ref); } } for (ImplementationGuideDefn ig : page.getDefinitions().getSortedIgs()) { for (BindingSpecification cd : ig.getUnresolvedBindings()) { String ref = cd.getReference(); if (ref.contains("|")) ref = ref.substring(0, ref.indexOf("|")); ValueSet vs = page.getDefinitions().getValuesets().get(ref); if (vs == null) vs = ig.getValueSet(ref); if (vs == null) vs = page.getWorkerContext().fetchResource(ValueSet.class, ref); if (vs == null) throw new Exception("unable to resolve value set "+ref); cd.setValueSet(vs); } } } private void loadValueSets2() throws Exception { page.log(" ...default Capability Statements", LogMessageType.Process); if (isGenerate) { generateConformanceStatement(true, "base", false); generateConformanceStatement(false, "base2", false); generateCompartmentDefinitions(); } page.log(" ...resource CodeSystem", LogMessageType.Process); ResourceDefn r = page.getDefinitions().getResources().get("CodeSystem"); if (isGenerate && wantBuild("CodeSystem")) { produceResource1(r, false); produceResource2(r, false, null, false); } generateCodeSystemsPart2(); page.log(" ...resource ValueSet", LogMessageType.Process); r = page.getDefinitions().getResources().get("ValueSet"); if (isGenerate && wantBuild("ValueSet")) { produceResource1(r, false); produceResource2(r, false, null, false); } page.log(" ...value sets", LogMessageType.Process); generateValueSetsPart2(); generateConceptMaps(); page.saveSnomed(); if (isGenerate) { /// regenerate. TODO: this is silly - need to generate before so that xpaths are populated. but need to generate now to fill them properly generateConformanceStatement(true, "base", true); generateConformanceStatement(false, "base2", true); } generateCodeSystemRegistry(); // copyTerminologyToVocabPoC(); } // private void copyTerminologyToVocabPoC() throws FileNotFoundException, IOException { // Map<String, ValueSet> hardBoundVS = new HashMap<String, ValueSet>(); // for (ResourceDefn rd : page.getDefinitions().getResources().values()) { // listBoundValueSets(rd.getRoot(), hardBoundVS); // } // Map<String, CodeSystem> hardBoundCS = new HashMap<String, CodeSystem>(); // for (ValueSet vs : hardBoundVS.values()) { // for (ConceptSetComponent cset : vs.getCompose().getInclude()) { // if (cset.hasSystem()) { // CodeSystem cs = page.getCodeSystems().get(cset.getSystem()); // if (cs != null) // hardBoundCS.put(cs.getUrl(), cs); // } // } // } // for (ValueSet vs : page.getValueSets().values()) { // if (!hardBoundVS.containsValue(vs) && !vs.getId().startsWith("v2-")&& !vs.getId().startsWith("v3-")) // new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(new FileOutputStream(Utilities.path("C:\\work\\org.hl7.fhir.intl\\vocab-poc\\fhir", "valueSets", vs.getId()+".xml")), vs); // } // for (CodeSystem cs : page.getCodeSystems().values()) { // if (!hardBoundCS.containsValue(cs) && !cs.getId().startsWith("v2-")&& !cs.getId().startsWith("v3-")) // new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(new FileOutputStream(Utilities.path("C:\\work\\org.hl7.fhir.intl\\vocab-poc\\fhir", "codeSystems", cs.getId()+".xml")), cs); // } // } // private void listBoundValueSets(ElementDefn element, Map<String, ValueSet> list) { if (element.hasBinding() && element.typeCode().equals("code") && element.getBinding().getStrength() == BindingStrength.REQUIRED && element.getBinding().getValueSet() != null) list.put(element.getBinding().getValueSet().getUrl(), element.getBinding().getValueSet()); for (ElementDefn child : element.getElements()) listBoundValueSets(child, list); } private void generateCodeSystemRegistry() throws FileNotFoundException, IOException, Exception { XmlParser xml = new XmlParser(); xml.setOutputStyle(OutputStyle.PRETTY); Bundle bnd = (Bundle) xml.parse(new CSFileInputStream(Utilities.path(page.getFolders().srcDir, "namingsystem", "namingsystem-terminologies.xml"))); for (BundleEntryComponent entry : bnd.getEntry()) { NamingSystem ns = (NamingSystem) entry.getResource(); entry.setFullUrl("http://hl7.org/fhir/NamingSystem/"+ns.getId()); String url = null; for (NamingSystemUniqueIdComponent t : ns.getUniqueId()) { if (t.getType() == NamingSystemIdentifierType.URI) url = t.getValue(); } if (url != null) { if (url.startsWith("http://hl7.org/fhir")) page.getDefinitions().addNs(url, "System "+ns.getName(), "terminologies-systems.html#"+url); page.getDefinitions().addNs(entry.getFullUrl(), ns.getId(), "terminologies-systems.html#"+url); } } List<String> names = new ArrayList<String>(); Set<String> urls = new HashSet<>(); names.addAll(page.getCodeSystems().keySet()); Collections.sort(names); for (String n : names) { CodeSystem cs = page.getCodeSystems().get(n); if (cs != null && !urls.contains(cs.getUrl())) { urls.add(cs.getUrl()); if (cs.hasName()) { NamingSystem ns = new NamingSystem(); ns.setId(cs.getId()); ns.setName(cs.getName()); ns.setStatus(cs.getStatus()); if (!ns.hasStatus()) ns.setStatus(PublicationStatus.DRAFT); ns.setKind(NamingSystemType.CODESYSTEM); ns.setPublisher(cs.getPublisher()); for (ContactDetail c : cs.getContact()) { ContactDetail nc = ns.addContact(); nc.setName(c.getName()); for (ContactPoint cc : c.getTelecom()) { nc.getTelecom().add(cc); } } ns.setDate(cs.getDate()); if (!ns.hasDate()) ns.setDate(page.getGenDate().getTime()); ns.setDescription(cs.getDescription()); ns.addUniqueId().setType(NamingSystemIdentifierType.URI).setValue(cs.getUrl()).setPreferred(true); String oid = CodeSystemUtilities.getOID(cs); if (oid != null) { if (oid.startsWith("urn:oid:")) oid = oid.substring(8); ns.addUniqueId().setType(NamingSystemIdentifierType.OID).setValue(oid).setPreferred(false); } ns.setUserData("path", cs.getUserData("path")); bnd.addEntry().setResource(ns).setFullUrl("http://hl7.org/fhir/"+ns.fhirType()+"/"+ns.getId()); } } } xml.compose(new FileOutputStream(Utilities.path(page.getFolders().dstDir, "namingsystem-terminologies.xml")), bnd); cloneToXhtml("namingsystem-terminologies", "Terminology Registry", false, "resource-instance:NamingSystem", "Terminology Registry", null, wg("vocab")); xml.setOutputStyle(OutputStyle.CANONICAL); xml.compose(new FileOutputStream(Utilities.path(page.getFolders().dstDir, "namingsystem-terminologies.canonical.xml")), bnd); JsonParser json = new JsonParser(); json.setOutputStyle(OutputStyle.PRETTY); json.compose(new FileOutputStream(Utilities.path(page.getFolders().dstDir, "namingsystem-terminologies.json")), bnd); jsonToXhtml("namingsystem-terminologies", "Terminology Registry", TextFile.fileToString(Utilities.path(page.getFolders().dstDir, "namingsystem-terminologies.json")), "resource-instance:NamingSystem", "Terminology Registry", null, wg("vocab")); json.setOutputStyle(OutputStyle.CANONICAL); json.compose(new FileOutputStream(Utilities.path(page.getFolders().dstDir, "namingsystem-terminologies.canonical.json")), bnd); RdfParser rdf = new RdfParser(); rdf.setOutputStyle(OutputStyle.PRETTY); rdf.compose(new FileOutputStream(Utilities.path(page.getFolders().dstDir, "namingsystem-terminologies.ttl")), bnd); ttlToXhtml("namingsystem-terminologies", "Terminology Registry", TextFile.fileToString(Utilities.path(page.getFolders().dstDir, "namingsystem-terminologies.ttl")), "resource-instance:NamingSystem", "Terminology Registry", null, wg("vocab")); StringBuilder b = new StringBuilder(); b.append("<table class=\"grid\">\r\n"); b.append(" <tr>"); b.append("<td><b>Name</b></td>"); b.append("<td><b>Uri</b></td>"); b.append("<td><b>OID</b></td>"); b.append("</tr>\r\n"); for (BundleEntryComponent entry : bnd.getEntry()) { NamingSystem ns = (NamingSystem) entry.getResource(); String uri = ""; String oid = ""; for (NamingSystemUniqueIdComponent id : ns.getUniqueId()) { if (id.getType() == NamingSystemIdentifierType.URI) uri = id.getValue(); if (id.getType() == NamingSystemIdentifierType.OID) oid = id.getValue(); } String link = "terminologies-systems.html#"+uri; if (ns.getUserData("path") != null) link = ns.getUserString("path"); b.append(" <tr>"); b.append("<td><a href=\""+link+"\">"+Utilities.escapeXml(ns.getName())+"</a></td>"); b.append("<td>"+Utilities.escapeXml(uri)+"</td>"); b.append("<td>"+Utilities.escapeXml(oid)+"</td>"); b.append("</tr>\r\n"); } b.append("</table>\r\n"); String html = TextFile.fileToString(page.getFolders().srcDir + "template-example.html").replace("<%example%>", b.toString()).replace("<%example-usage%>", ""); html = page.processPageIncludes("namingsystem-terminologies.html", html, "resource-instance:NamingSystem", null, bnd, null, "Example", null, null, page.getDefinitions().getWorkgroups().get("fhir")); TextFile.stringToFile(html, page.getFolders().dstDir + "namingsystem-terminologies.html"); cachePage("namingsystem-terminologies.html", html, "Registered Code Systems", false); } private WorkGroup wg(String code) { return page.getDefinitions().getWorkgroups().get(code); } private void buildFeedsAndMaps() { page.setResourceBundle(new Bundle()); page.getResourceBundle().setId("resources"); page.getResourceBundle().setType(BundleType.COLLECTION); page.getResourceBundle().setMeta(new Meta().setLastUpdated(page.getGenDate().getTime())); profileBundle = new Bundle(); profileBundle.setId("profiles-others"); profileBundle.setType(BundleType.COLLECTION); profileBundle.setMeta(new Meta().setLastUpdated(page.getGenDate().getTime())); page.setTypeBundle(new Bundle()); page.getTypeBundle().setId("types"); page.getTypeBundle().setType(BundleType.COLLECTION); page.getTypeBundle().setMeta(new Meta().setLastUpdated(page.getGenDate().getTime())); valueSetsFeed = new Bundle(); valueSetsFeed.setId("valuesets"); valueSetsFeed.setType(BundleType.COLLECTION); valueSetsFeed.setMeta(new Meta().setLastUpdated(page.getGenDate().getTime())); dataElements = new Bundle(); dataElements.setId("dataelements"); dataElements.setType(BundleType.COLLECTION); dataElements.setMeta(new Meta().setLastUpdated(page.getGenDate().getTime())); conceptMapsFeed = new Bundle(); conceptMapsFeed.setId("conceptmaps"); conceptMapsFeed.setType(BundleType.COLLECTION); conceptMapsFeed.setMeta(new Meta().setLastUpdated(page.getGenDate().getTime())); externals = new Bundle(); externals.setId("externals"); externals.setType(BundleType.COLLECTION); externals.setMeta(new Meta().setLastUpdated(page.getGenDate().getTime())); } private void generateCompartmentDefinitions() throws Exception { for (Compartment c : page.getDefinitions().getCompartments()) generateCompartmentDefinition(c); } private void generateCompartmentDefinition(Compartment c) throws Exception { CompartmentDefinition cpd = new CompartmentDefinition(); cpd.setId(c.getName()); cpd.setUrl("http://hl7.org/fhir/CompartmentDefinition/" + c.getName()); cpd.setName("Base FHIR compartment definition for " +c.getTitle()); cpd.setStatus(PublicationStatus.DRAFT); cpd.setDescription(c.getIdentity()+". "+c.getDescription()); cpd.setExperimental(true); cpd.setVersion(Constants.VERSION); cpd.setDate(page.getGenDate().getTime()); cpd.setPublisher("FHIR Project Team"); cpd.addContact().getTelecom().add(Factory.newContactPoint(ContactPointSystem.URL, "http://hl7.org/fhir")); cpd.setCode(CompartmentType.fromCode(c.getTitle())); cpd.setSearch(true); for (String rn : page.getDefinitions().sortedResourceNames()) { ResourceDefn rd = page.getDefinitions().getResourceByName(rn); String rules = c.getResources().get(rd); CompartmentDefinitionResourceComponent cc = cpd.addResource().setCode(rd.getName()); if (!Utilities.noString(rules)) { for (String p : rules.split("\\|")) cc.addParam(p.trim()); } } NarrativeGenerator gen = new NarrativeGenerator("", "", page.getWorkerContext()).setTooCostlyNoteEmpty(PageProcessor.TOO_MANY_CODES_TEXT_EMPTY).setTooCostlyNoteNotEmpty(PageProcessor.TOO_MANY_CODES_TEXT_NOT_EMPTY); gen.generate(cpd, null); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + "compartmentdefinition-" + c.getName().toLowerCase() + ".xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, cpd); s.close(); s = new FileOutputStream(page.getFolders().dstDir + "compartmentdefinition-" + c.getName().toLowerCase() + ".canonical.xml"); new XmlParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, cpd); s.close(); cloneToXhtml("compartmentdefinition-" + c.getName().toLowerCase(), "Compartment Definition for "+c.getName(), true, "resource-instance:CompartmentDefinition", "Compartment Definition for "+c.getName(), null, wg("fhir")); s = new FileOutputStream(page.getFolders().dstDir + "compartmentdefinition-" + c.getName().toLowerCase() + ".json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, cpd); s.close(); s = new FileOutputStream(page.getFolders().dstDir + "compartmentdefinition-" + c.getName().toLowerCase() + ".canonical.json"); new JsonParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, cpd); s.close(); jsonToXhtml("compartmentdefinition-" + c.getName().toLowerCase(), "Compartment Definition for "+c.getName(), resource2Json(cpd), "resource-instance:CompartmentDefinition", "Compartment Definition for "+c.getName(), null, wg("fhir")); s = new FileOutputStream(page.getFolders().dstDir + "compartmentdefinition-" + c.getName().toLowerCase() + ".ttl"); new RdfParser().setOutputStyle(OutputStyle.PRETTY).compose(s, cpd); s.close(); ttlToXhtml("compartmentdefinition-" + c.getName().toLowerCase(), "Compartment Definition for "+c.getName(), resource2Ttl(cpd), "resource-instance:CompartmentDefinition", "Compartment Definition for "+c.getName(), null, wg("fhir")); Utilities.copyFile(new CSFile(page.getFolders().dstDir + "compartmentdefinition-" + c.getName().toLowerCase() + ".xml"), new CSFile(page.getFolders().dstDir + "examples" + File.separator + "compartmentdefinition-" + c.getName().toLowerCase()+ ".xml")); addToResourceFeed(cpd, page.getResourceBundle()); } private void generateConformanceStatement(boolean full, String name, boolean register) throws Exception { pgen = new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir); CapabilityStatement cpbs = new CapabilityStatement(); cpbs.setId(FormatUtilities.makeId(name)); cpbs.setUrl("http://hl7.org/fhir/CapabilityStatement/" + name); cpbs.setVersion(page.getVersion().toCode()); cpbs.setName("Base FHIR Capability Statement " + (full ? "(Full)" : "(Empty)")); cpbs.setStatus(PublicationStatus.DRAFT); cpbs.setExperimental(true); cpbs.setDate(page.getGenDate().getTime()); cpbs.setPublisher("FHIR Project Team"); cpbs.addContact().getTelecom().add(Factory.newContactPoint(ContactPointSystem.URL, "http://hl7.org/fhir")); cpbs.setKind(CapabilityStatementKind.CAPABILITY); cpbs.setSoftware(new CapabilityStatementSoftwareComponent()); cpbs.getSoftware().setName("Insert your software name here..."); cpbs.setFhirVersion(page.getVersion()); cpbs.getFormat().add(Factory.newCode("xml")); cpbs.getFormat().add(Factory.newCode("json")); CapabilityStatementRestComponent rest = new CapabilityStatement.CapabilityStatementRestComponent(); cpbs.getRest().add(rest); rest.setMode(RestfulCapabilityMode.SERVER); if (full) { rest.setDocumentation("All the functionality defined in FHIR"); cpbs.setDescription("This is the base Capability Statement for FHIR. It represents a server that provides the full set of functionality defined by FHIR. It is provided to use as a template for system designers to build their own Capability Statements from"); } else { rest.setDocumentation("An empty Capability Statement"); cpbs.setDescription("This is the base Capability Statement for FHIR. It represents a server that provides the none of the functionality defined by FHIR. It is provided to use as a template for system designers to build their own Capability Statements from. A capability statement has to contain something, so this contains a read of a Capability Statement"); } rest.setSecurity(new CapabilityStatementRestSecurityComponent()); rest.getSecurity().setCors(true); rest.getSecurity().addService().setText("See http://docs.smarthealthit.org/").addCoding().setSystem("http://terminology.hl7.org/CodeSystem/restful-security-service").setCode("SMART-on-FHIR").setDisplay("SMART-on-FHIR"); rest.getSecurity().setDescription("This is the Capability Statement to declare that the server supports SMART-on-FHIR. See the SMART-on-FHIR docs for the extension that would go with such a server"); if (full) { for (String rn : page.getDefinitions().sortedResourceNames()) { ResourceDefn rd = page.getDefinitions().getResourceByName(rn); CapabilityStatementRestResourceComponent res = new CapabilityStatement.CapabilityStatementRestResourceComponent(); rest.getResource().add(res); res.setType(rn); res.setProfile("http://hl7.org/fhir/StructureDefinition/" + rn); genConfInteraction(cpbs, res, TypeRestfulInteraction.READ, "Implemented per the specification (or Insert other doco here)"); genConfInteraction(cpbs, res, TypeRestfulInteraction.VREAD, "Implemented per the specification (or Insert other doco here)"); genConfInteraction(cpbs, res, TypeRestfulInteraction.UPDATE, "Implemented per the specification (or Insert other doco here)"); genConfInteraction(cpbs, res, TypeRestfulInteraction.DELETE, "Implemented per the specification (or Insert other doco here)"); genConfInteraction(cpbs, res, TypeRestfulInteraction.HISTORYINSTANCE, "Implemented per the specification (or Insert other doco here)"); genConfInteraction(cpbs, res, TypeRestfulInteraction.HISTORYTYPE, "Implemented per the specification (or Insert other doco here)"); genConfInteraction(cpbs, res, TypeRestfulInteraction.CREATE, "Implemented per the specification (or Insert other doco here)"); genConfInteraction(cpbs, res, TypeRestfulInteraction.SEARCHTYPE, "Implemented per the specification (or Insert other doco here)"); res.setConditionalCreate(true); res.setConditionalUpdate(true); res.setConditionalDelete(ConditionalDeleteStatus.MULTIPLE); res.addReferencePolicy(ReferenceHandlingPolicy.LITERAL); res.addReferencePolicy(ReferenceHandlingPolicy.LOGICAL); for (SearchParameterDefn i : rd.getSearchParams().values()) { res.getSearchParam().add(makeSearchParam(rn, i)); if (i.getType().equals(SearchType.reference)) res.getSearchInclude().add(new StringType(rn+"."+i.getCode())); } for (String rni : page.getDefinitions().sortedResourceNames()) { ResourceDefn rdi = page.getDefinitions().getResourceByName(rni); for (SearchParameterDefn ii : rdi.getSearchParams().values()) { if (ii.getType().equals(SearchType.reference) && ii.getTargets().contains(rn)) res.getSearchRevInclude().add(new StringType(rni+"."+ii.getCode())); } } } genConfInteraction(cpbs, rest, SystemRestfulInteraction.TRANSACTION, "Implemented per the specification (or Insert other doco here)"); genConfInteraction(cpbs, rest, SystemRestfulInteraction.BATCH, "Implemented per the specification (or Insert other doco here)"); genConfInteraction(cpbs, rest, SystemRestfulInteraction.HISTORYSYSTEM, "Implemented per the specification (or Insert other doco here)"); genConfInteraction(cpbs, rest, SystemRestfulInteraction.SEARCHSYSTEM, "Implemented per the specification (or Insert other doco here)"); for (ResourceDefn rd : page.getDefinitions().getBaseResources().values()) { for (SearchParameterDefn i : rd.getSearchParams().values()) rest.getSearchParam().add(makeSearchParam(rd.getName(), i)); rest.getSearchParam().add(makeSearchParam("something", SearchParamType.STRING, "id", "some doco")); rest.getSearchParam().add(makeSearchParam("_list", SearchParamType.TOKEN, "Resource-list", "Retrieval of resources that are referenced by a List resource")); rest.getSearchParam().add(makeSearchParam("_has", SearchParamType.COMPOSITE, "Resource-has", "Provides support for reverse chaining")); rest.getSearchParam().add(makeSearchParam("_type", SearchParamType.TOKEN, "Resource-type", "Type of resource (when doing cross-resource search")); rest.getSearchParam().add(makeSearchParam("_sort", SearchParamType.TOKEN, "Resource-source", "How to sort the resources when returning")); rest.getSearchParam().add(makeSearchParam("_count", SearchParamType.NUMBER, "Resource-count", "How many resources to return")); rest.getSearchParam().add(makeSearchParam("_include", SearchParamType.TOKEN, "Resource-include", "Control over returning additional resources (see spec)")); rest.getSearchParam().add(makeSearchParam("_revinclude", SearchParamType.TOKEN, "Resource-revinclude", "Control over returning additional resources (see spec)")); rest.getSearchParam().add(makeSearchParam("_summary", SearchParamType.TOKEN, "Resource-summary", "What kind of information to return")); rest.getSearchParam().add(makeSearchParam("_elements", SearchParamType.STRING, "Resource-elements", "What kind of information to return")); rest.getSearchParam().add(makeSearchParam("_contained", SearchParamType.TOKEN, "Resource-contained", "Managing search into contained resources")); rest.getSearchParam().add(makeSearchParam("_containedType", SearchParamType.TOKEN, "Resource-containedType", "Managing search into contained resources")); for (Operation op : rd.getOperations()) rest.addOperation().setName(op.getName()).setDefinition("http://hl7.org/fhir/OperationDefinition/"+rd.getName().toLowerCase()+"-"+op.getName()); } for (String rn : page.getDefinitions().sortedResourceNames()) { ResourceDefn r = page.getDefinitions().getResourceByName(rn); for (Operation op : r.getOperations()) rest.addOperation().setName(op.getName()).setDefinition("http://hl7.org/fhir/OperationDefinition/"+r.getName().toLowerCase()+"-"+op.getName()); } } else { // don't add anything - the metadata operation is implicit // CapabilityStatementRestResourceComponent res = new CapabilityStatement.CapabilityStatementRestResourceComponent(); // rest.getResource().add(res); // res.setType("CapabilityStatement"); // genConfInteraction(cpbs, res, TypeRestfulInteraction.READ, "Read CapabilityStatement Resource"); } if (register) { NarrativeGenerator gen = new NarrativeGenerator("", "", page.getWorkerContext()).setTooCostlyNoteEmpty(PageProcessor.TOO_MANY_CODES_TEXT_EMPTY).setTooCostlyNoteNotEmpty(PageProcessor.TOO_MANY_CODES_TEXT_NOT_EMPTY); gen.generate(cpbs, null); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + "capabilitystatement-" + name + ".xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, cpbs); s.close(); s = new FileOutputStream(page.getFolders().dstDir + "capabilitystatement-" + name + ".canonical.xml"); new XmlParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, cpbs); s.close(); cloneToXhtml("capabilitystatement-" + name + "", "Basic Capability Statement", true, "resource-instance:CapabilityStatement", "Capability Statement", null, wg("fhir")); s = new FileOutputStream(page.getFolders().dstDir + "capabilitystatement-" + name + ".json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, cpbs); s.close(); s = new FileOutputStream(page.getFolders().dstDir + "capabilitystatement-" + name + ".canonical.json"); new JsonParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, cpbs); s.close(); jsonToXhtml("capabilitystatement-" + name, "Base Capability Statement", resource2Json(cpbs), "resource-instance:CapabilityStatement", "Capability Statement", null, wg("fhir")); s = new FileOutputStream(page.getFolders().dstDir + "capabilitystatement-" + name + ".ttl"); new RdfParser().setOutputStyle(OutputStyle.PRETTY).compose(s, cpbs); s.close(); ttlToXhtml("capabilitystatement-" + name, "Base Capability Statement", resource2Ttl(cpbs), "resource-instance:CapabilityStatement", "Capability Statement", null, wg("fhir")); Utilities.copyFile(new CSFile(page.getFolders().dstDir + "capabilitystatement-" + name + ".xml"), new CSFile(page.getFolders().dstDir + "examples" + File.separator + "capabilitystatement-" + name + ".xml")); } if (buildFlags.get("all")) { new NarrativeGenerator("", "", page.getWorkerContext()).generate(null, cpbs); deletefromFeed(ResourceType.CapabilityStatement, name, page.getResourceBundle()); addToResourceFeed(cpbs, page.getResourceBundle()); } } private CapabilityStatementRestResourceSearchParamComponent makeSearchParam(String name, SearchParamType type, String id, String doco) throws Exception { CapabilityStatementRestResourceSearchParamComponent result = new CapabilityStatement.CapabilityStatementRestResourceSearchParamComponent(); result.setName(name); result.setDefinition("http://hl7.org/fhir/SearchParameter/"+id); result.setType(type); result.setDocumentation(doco); return result; } private CapabilityStatementRestResourceSearchParamComponent makeSearchParam(String rn, SearchParameterDefn i) throws Exception { CapabilityStatementRestResourceSearchParamComponent result = new CapabilityStatement.CapabilityStatementRestResourceSearchParamComponent(); result.setName(i.getCode()); result.setDefinition("http://hl7.org/fhir/SearchParameter/"+i.getCommonId()); result.setType(getSearchParamType(i.getType())); result.setDocumentation(i.getDescription()); if (Utilities.noString(i.getXPath())) i.setXPath(new XPathQueryGenerator(page.getDefinitions(), page, page.getQa()).generateXpath(i.getPaths())); // used elsewhere later return result; } private SearchParamType getSearchParamType(SearchType type) { switch (type) { case number: return SearchParamType.NUMBER; case string: return SearchParamType.STRING; case date: return SearchParamType.DATE; case reference: return SearchParamType.REFERENCE; case token: return SearchParamType.TOKEN; case uri: return SearchParamType.URI; case composite: return SearchParamType.COMPOSITE; case quantity: return SearchParamType.QUANTITY; case special: return SearchParamType.SPECIAL; } return null; } private void genConfInteraction(CapabilityStatement conf, CapabilityStatementRestResourceComponent res, TypeRestfulInteraction op, String doco) { ResourceInteractionComponent t = new ResourceInteractionComponent(); t.setCode(op); t.setDocumentation(doco); res.getInteraction().add(t); } private void genConfInteraction(CapabilityStatement conf, CapabilityStatementRestComponent res, SystemRestfulInteraction op, String doco) { SystemInteractionComponent t = new SystemInteractionComponent(); t.setCode(op); t.setDocumentation(doco); res.getInteraction().add(t); } private void registerReferencePlatforms() throws FileNotFoundException, IOException { javaReferencePlatform = new JavaGenerator(page.getFolders()); page.getReferenceImplementations().add(javaReferencePlatform); page.getReferenceImplementations().add(new XMLToolsGenerator()); } public boolean checkFile(String purpose, String dir, String file, List<String> errors, String category) throws IOException { CSFile f = new CSFile(dir + file); if (file.contains("*")) return true; if (!f.exists()) { errors.add("Unable to find " + purpose + " file " + file + " in " + dir); return false; } else if (category != null) { long d = f.lastModified(); if ((!dates.containsKey(category) || d > dates.get(category)) && !f.getAbsolutePath().endsWith(".gen.svg") && !f.getName().contains("please-close-this-in-excel-and-return-the-build-prior-to-committing") ) dates.put(category, d); return true; } else return true; } private boolean initialize(String folder) throws Exception { page.setDefinitions(new Definitions()); page.getWorkerContext().setCanRunWithoutTerminology(!web); page.log("Checking Source for " + folder, LogMessageType.Process); List<String> errors = new ArrayList<String>(); Utilities.checkFolder(page.getFolders().rootDir, errors); if (checkFile("required", page.getFolders().rootDir, "publish.ini", errors, "all")) { checkFile("required", page.getFolders().srcDir, "navigation.xml", errors, "all"); page.setIni(new IniFile(page.getFolders().rootDir + "publish.ini")); page.setVersion(FHIRVersion.fromCode(page.getIni().getStringProperty("FHIR", "version"))); prsr = new SourceParser(page, folder, page.getDefinitions(), web, page.getVersion(), page.getWorkerContext(), page.getGenDate(), page, fpUsages, isCIBuild); prsr.checkConditions(errors, dates); page.setRegistry(prsr.getRegistry()); page.getDiffEngine().loadFromIni(prsr.getIni()); for (String s : page.getIni().getPropertyNames("special-pages")) page.getDefinitions().getStructuralPages().add(s); Utilities.checkFolder(page.getFolders().xsdDir, errors); for (PlatformGenerator gen : page.getReferenceImplementations()) Utilities.checkFolder(page.getFolders().implDir(gen.getName()), errors); checkFile("required", page.getFolders().srcDir, "hierarchy.xml", errors, "all"); checkFile("required", page.getFolders().srcDir, "fhir-all.xsd", errors, "all"); checkFile("required", page.getFolders().srcDir, "template.html", errors, "all"); checkFile("required", page.getFolders().srcDir, "template-book.html", errors, "all"); checkFile("required", page.getFolders().srcDir, "mappingSpaces.xml", errors, "all"); // Utilities.checkFolder(page.getFolders().dstDir, errors); if (page.getIni().getPropertyNames("support") != null) for (String n : page.getIni().getPropertyNames("support")) checkFile("support", page.getFolders().srcDir, n, errors, "all"); for (String n : page.getIni().getPropertyNames("images")) checkFile("image", page.getFolders().imgDir, n, errors, "all"); for (String n : page.getIni().getPropertyNames("schema")) checkFile("schema", page.getFolders().srcDir, n, errors, "all"); for (String n : page.getIni().getPropertyNames("pages")) checkFile("page", page.getFolders().srcDir, n, errors, "page-" + n); for (String n : page.getIni().getPropertyNames("files")) checkFile("file", page.getFolders().rootDir, n, errors, "page-" + n); } if (checkFile("translations", page.getFolders().rootDir + "implementations" + File.separator, "translations.xml", errors, null)) { // schema check XmlValidator xv = new XmlValidator(page.getValidationErrors(), page.getFolders().rootDir + "implementations", Utilities.path(page.getFolders().rootDir, "tools", "schematron"), new String[] {"translations.xsd"}); xv.checkBySchema(Utilities.path(page.getFolders().rootDir, "implementations", "translations.xml"), true); Utilities.copyFile(page.getFolders().rootDir + "implementations" + File.separator + "translations.xml", page.getFolders().dstDir + "translations.xml"); page.getTranslations().setLang("en"); page.getTranslations().load(page.getFolders().rootDir + "implementations" + File.separator + "translations.xml"); } if (errors.size() > 0) page.log("Unable to publish FHIR specification:", LogMessageType.Error); for (String e : errors) { page.log(e, LogMessageType.Error); } return errors.size() == 0; } private void validate() throws Exception { page.log("Validating", LogMessageType.Process); ResourceValidator val = new ResourceValidator(page.getDefinitions(), page.getTranslations(), page.getCodeSystems(), page.getFolders().srcDir, fpUsages, page.getSuppressedMessages(), page.getWorkerContext()); val.resolvePatterns(); ProfileValidator valp = new ProfileValidator(); valp.setContext(page.getWorkerContext()); for (String n : page.getDefinitions().getTypes().keySet()) page.getValidationErrors().addAll(val.checkStucture(n, page.getDefinitions().getTypes().get(n))); for (String n : page.getDefinitions().getStructures().keySet()) page.getValidationErrors().addAll(val.checkStucture(n, page.getDefinitions().getStructures().get(n))); val.checkSearchParams(page.getValidationErrors(), page.getDefinitions().getResourceByName("Resource")); val.checkSearchParams(page.getValidationErrors(), page.getDefinitions().getResourceByName("DomainResource")); for (String n : page.getDefinitions().sortedResourceNames()) if (hasBuildFlag("page-" + n.toLowerCase())) page.getValidationErrors().addAll(val.check(n, page.getDefinitions().getResources().get(n))); page.getValidationErrors().addAll(val.check("Parameters", page.getDefinitions().getResourceByName("Parameters"))); for (String rname : page.getDefinitions().sortedResourceNames()) { ResourceDefn r = page.getDefinitions().getResources().get(rname); checkExampleLinks(page.getValidationErrors(), r); } for (Compartment cmp : page.getDefinitions().getCompartments()) page.getValidationErrors().addAll(val.check(cmp)); page.setPatternFinder(val.getPatternFinder()); val.report(); val.summariseSearchTypes(page.getSearchTypeUsage()); val.dumpParams(); val.close(); checkAllOk(); } private void checkAllOk() throws Exception { // page.getCollectedValidationErrors().addAll(page.getValidationErrors()); boolean mustDie = false; for (ValidationMessage e : page.getValidationErrors()) { if (e.getLevel() == IssueSeverity.ERROR || e.getLevel() == IssueSeverity.FATAL) { page.log(e.summary(), LogMessageType.Error); mustDie = true; } } if (mustDie) { page.log("Didn't publish FHIR due to errors @ " + Config.DATE_FORMAT().format(Calendar.getInstance().getTime()), LogMessageType.Process); throw new Exception("Errors executing build. Details logged."); } } private void processWarnings(boolean showOnlyErrors) throws Exception { String xslt = Utilities.path(page.getFolders().rootDir, "implementations", "xmltools", "OwnerResources.xslt"); OutputStreamWriter s = new OutputStreamWriter(new FileOutputStream(page.getFolders().dstDir + "warnings.xml"), "UTF-8"); s.write("<warnings>"); for (WorkGroup wg : page.getDefinitions().getWorkgroups().values()) { s.write("<wg code=\""+wg.getCode()+"\" name=\""+wg.getName()+"\" url=\""+wg.getUrl()+"\"/>\r\n"); } for (PageInformation pn : page.getDefinitions().getPageInfo().values()) { s.write("<page name=\""+pn.getName()+"\" wg=\""+pn.getWgCode()+"\" fmm=\""+pn.getFmm()+"\"/>\r\n"); } try { s.write(new String(XsltUtilities.saxonTransform(page.getFolders().dstDir + "profiles-resources.xml", xslt))); s.write(new String(XsltUtilities.saxonTransform(page.getFolders().dstDir + "profiles-types.xml", xslt))); s.write(new String(XsltUtilities.saxonTransform(page.getFolders().dstDir + "profiles-others.xml", xslt))); } catch (Exception e) { for (ValidationMessage err : page.getValidationErrors()) { if (!page.getSuppressedMessages().contains(err.getDisplay())) System.out.println(err.summary()); } System.out.println("WARNING: Unable to create warnings file - one or more profiles-* files unavailable or invalid"); System.out.println("To determine the cause of the build failure, look in the log prior to the warning and information messages immediately above"); } for (ValidationMessage e : page.getValidationErrors()) { if (!page.getSuppressedMessages().contains(e.getDisplay())) s.write(e.toXML()); } s.write("</warnings>"); s.flush(); s.close(); String xslt2 = Utilities.path(page.getFolders().rootDir, "implementations", "xmltools", "CategorizeWarnings.xslt"); FileOutputStream s2 = new FileOutputStream(page.getFolders().dstDir + "work-group-warnings.xml"); try { s2.write(XsltUtilities.saxonTransform(page.getFolders().dstDir + "warnings.xml", xslt2).getBytes("UTF8")); } catch (Exception e) { // nothing - do not want to know. } s2.flush(); s2.close(); String xslt3 = Utilities.path(page.getFolders().rootDir, "implementations", "xmltools", "RenderWarnings.xslt"); try { String hw = XsltUtilities.saxonTransform(page.getFolders().dstDir + "work-group-warnings.xml", xslt3); if (!showOnlyErrors) page.log(hw, LogMessageType.Process); } catch (Exception e) { // nothing - do not want to know. } int i = 0; int w = 0; int ee = 0; for (ValidationMessage e : page.getValidationErrors()) { if (e.getLevel() == IssueSeverity.ERROR || e.getLevel() == IssueSeverity.FATAL) { ee++; page.log(e.summary(), LogMessageType.Hint); } else if (e.getLevel() == IssueSeverity.WARNING) { w++; } else if (e.getLevel() == IssueSeverity.INFORMATION) { i++; } } page.getQa().setCounts(ee, w, i); } private boolean hasBuildFlag(String n) { return (buildFlags.containsKey("all") && buildFlags.get("all")) || (buildFlags.containsKey(n) && buildFlags.get(n)); } private boolean wantBuild(String rname) { rname = rname.toLowerCase(); return buildFlags.get("all") || (!buildFlags.containsKey(rname) || buildFlags.get(rname)); } private void checkExampleLinks(List<ValidationMessage> errors, ResourceDefn r) throws Exception { for (Example e : r.getExamples()) { try { if (e.getXml() != null) { List<ExampleReference> refs = new ArrayList<ExampleReference>(); listLinks(e.getXml().getDocumentElement(), refs); for (ExampleReference ref : refs) { if (!ref.isExempt() && !resolveLink(ref, e)) { String path = ref.getPath().replace("/f:", ".").substring(1)+" (example "+e.getTitle()+")"; if (ref.hasType() && page.getDefinitions().hasResource(ref.getType())) { errors.add(new ValidationMessage(Source.ExampleValidator, IssueType.BUSINESSRULE, -1, -1, path, "Unable to resolve example reference to " + ref.getRef() + " in " + e.getTitle() + " (Possible Ids: " + listTargetIds(ref.getType())+")", "Unable to resolve example reference to " + ref.getRef() + " in <a href=\""+e.getTitle() + ".html"+"\">" + e.getTitle() + "</a> (Possible Ids: " + listTargetIds(ref.getType())+")", IssueSeverity.INFORMATION/*WARNING*/)); } else { String regex = "((http|https)://([A-Za-z0-9\\\\\\/\\.\\:\\%\\$])*)?("+page.pipeResources()+")\\/"+FormatUtilities.ID_REGEX+"(\\/_history\\/"+FormatUtilities.ID_REGEX+")?"; if (ref.getRef().matches(regex)) { errors.add(new ValidationMessage(Source.ExampleValidator, IssueType.BUSINESSRULE, -1, -1, path, "Unable to resolve example reference " + ref.getRef() + " in " + e.getTitle(), "Unable to resolve example reference " + ref.getRef() + " in <a href=\""+e.getTitle() + ".html"+"\">" + e.getTitle() + "</a>", IssueSeverity.INFORMATION/*WARNING*/)); } else { errors.add(new ValidationMessage(Source.ExampleValidator, IssueType.BUSINESSRULE, -1, -1, path, "Unable to resolve invalid example reference " + ref.getRef() + " in " + e.getTitle(), "Unable to resolve invalid example reference " + ref.getRef() + " in <a href=\""+e.getTitle() + ".html"+"\">" + e.getTitle() + "</a>", IssueSeverity.WARNING)); } } // System.out.println("unresolved reference "+ref.getRef()+" at "+path); } } } } catch (Exception ex) { throw new Exception("Error checking example " + e.getTitle() + ":" + ex.getMessage(), ex); } } } private String listTargetIds(String type) throws Exception { StringBuilder b = new StringBuilder(); ResourceDefn r = page.getDefinitions().getResourceByName(type); if (r != null) { for (Example e : r.getExamples()) { if (!Utilities.noString(e.getId())) b.append(e.getId()).append(", "); if (e.getXml() != null) { if (e.getXml().getDocumentElement().getLocalName().equals("feed")) { List<Element> entries = new ArrayList<Element>(); XMLUtil.getNamedChildren(e.getXml().getDocumentElement(), "entry", entries); for (Element c : entries) { String id = XMLUtil.getNamedChild(c, "id").getTextContent(); if (id.startsWith("http://hl7.org/fhir/") && id.contains("@")) b.append(id.substring(id.indexOf("@") + 1)).append(", "); else b.append(id).append(", "); } } } } } else b.append("(unknown resource type)"); return b.toString(); } private boolean resolveLink(ExampleReference ref, Example src) throws Exception { if (!ref.hasType() && ref.getId() == null) return false; if (!ref.hasType() && ref.getId().startsWith("#")) return true; if (!ref.hasType() || !page.getDefinitions().hasResource(ref.getType())) return false; if (ref.getId().startsWith("#")) return false; String id = ref.getId(); ResourceDefn r = page.getDefinitions().getResourceByName(ref.getType()); for (Example e : r.getExamples()) { if (id.equals(e.getId())) { e.getInbounds().add(src); return true; } if (e.getXml() != null) { if (resolveLinkInBundle(ref, src, e, id)) return true; } } // didn't find it? well, we'll look through all the other examples looking for bundles that contain it for (ResourceDefn rt : page.getDefinitions().getResources().values()) { for (Example e : rt.getExamples()) { if (e.getXml() != null) { if (resolveLinkInBundle(ref, src, e, id)) return true; } } } // still not found? if (ref.type.equals("ConceptMap")) return page.getConceptMaps().containsKey("http://hl7.org/fhir/"+ref.type+"/"+ref.getId()); if (ref.type.equals("StructureDefinition")) { if (page.getDefinitions().hasResource(ref.getId())) return true; if (page.getProfiles().containsKey("http://hl7.org/fhir/"+ref.type+"/"+ref.getId()) || page.getWorkerContext().hasResource(StructureDefinition.class, "http://hl7.org/fhir/"+ref.type+"/"+ref.getId())) return true; for (Profile cp : page.getDefinitions().getPackList()) for (ConstraintStructure p : cp.getProfiles()) if (p.getId().equals(id)) return true; for (ResourceDefn rd : page.getDefinitions().getResources().values()) for (Profile cp : rd.getConformancePackages()) for (ConstraintStructure p : cp.getProfiles()) if (p.getId().equals(id)) return true; } return false; } private boolean resolveLinkInBundle(ExampleReference ref, Example src, Example e, String id) { if (e.getXml().getDocumentElement().getLocalName().equals("Bundle")) { List<Element> entries = new ArrayList<Element>(); XMLUtil.getNamedChildren(e.getXml().getDocumentElement(), "entry", entries); for (Element c : entries) { Element resh = XMLUtil.getNamedChild(c, "resource"); if (resh != null) { Element res = XMLUtil.getFirstChild(resh); String _id = XMLUtil.getNamedChildValue(res, "id"); if (id.equals(_id) && ref.getType().equals(res.getLocalName())) { e.getInbounds().add(src); return true; } } } } return false; } private void listLinks(Element xml, List<ExampleReference> refs) throws Exception { if (xml.getLocalName().equals("feed")) { Element n = XMLUtil.getFirstChild(xml); while (n != null) { if (n.getLocalName().equals("entry")) { Element c = XMLUtil.getNamedChild(n, "content"); listLinks(XMLUtil.getFirstChild(c), refs); } n = XMLUtil.getNextSibling(n); } } else { String n = xml.getLocalName(); if (!n.equals("Binary")) { ResourceDefn r = page.getDefinitions().getResourceByName(n); if (r == null) throw new Exception("Unable to find resource definition for " + n); List<Element> nodes = new ArrayList<Element>(); nodes.add(xml); listLinks("/f:" + n, r.getRoot(), nodes, refs); Element e = XMLUtil.getFirstChild(xml); while (e != null) { if (e.getNodeName().equals("contained")) { listLinks(XMLUtil.getFirstChild(e), refs); } e = XMLUtil.getNextSibling(e); } } } } private void listLinks(String path, org.hl7.fhir.definitions.model.ElementDefn d, List<Element> set, List<ExampleReference> refs) throws Exception { if (d.typeCode().startsWith("Reference")) { for (Element m : set) { if (XMLUtil.getNamedChild(m, "reference") != null) { refs.add(new ExampleReference(XMLUtil.getNamedChildValue(m, "reference"), path)); } } } if (d.typeCode().startsWith("canonical")) { for (Element m : set) { if (!Utilities.noString(m.getAttribute("value"))) { refs.add(new ExampleReference(m.getAttribute("value"), path)); } } } for (org.hl7.fhir.definitions.model.ElementDefn c : d.getElements()) { List<Element> cset = new ArrayList<Element>(); for (Element p : set) XMLUtil.getNamedChildren(p, c.getName(), cset); listLinks(path + "/f:" + c.getName(), c, cset, refs); } } // private List<Element> xPathQuery(String path, Element e) throws Exception { // NamespaceContext context = new NamespaceContextMap("f", // "http://hl7.org/fhir", "h", "http://www.w3.org/1999/xhtml", "a", ); // // XPathFactory factory = XPathFactory.newInstance(); // XPath xpath = factory.newXPath(); // xpath.setNamespaceContext(context); // XPathExpression expression= xpath.compile(path); // NodeList resultNodes = (NodeList)expression.evaluate(e, // XPathConstants.NODESET); // List<Element> result = new ArrayList<Element>(); // for (int i = 0; i < resultNodes.getLength(); i++) { // result.add((Element) resultNodes.item(i)); // } // return result; // } private void produceSpecification() throws Exception { page.setNavigation(new Navigation()); page.getNavigation().parse(page.getFolders().srcDir + "navigation.xml"); processCDA(); page.log("Generate RDF", LogMessageType.Process); processRDF(); page.log("Produce Schemas", LogMessageType.Process); new SchemaGenerator().generate(page.getDefinitions(), page.getIni(), page.getFolders().tmpResDir, page.getFolders().xsdDir+"codegen"+File.separator, page.getFolders().dstDir, page.getFolders().srcDir, page.getVersion().toCode(), Config.DATE_FORMAT().format(page.getGenDate().getTime()), true, page.getWorkerContext()); new SchemaGenerator().generate(page.getDefinitions(), page.getIni(), page.getFolders().tmpResDir, page.getFolders().xsdDir, page.getFolders().dstDir, page.getFolders().srcDir, page.getVersion().toCode(), Config.DATE_FORMAT().format(page.getGenDate().getTime()), false, page.getWorkerContext()); new org.hl7.fhir.definitions.generators.specification.json.SchemaGenerator().generate(page.getDefinitions(), page.getIni(), page.getFolders().tmpResDir, page.getFolders().xsdDir, page.getFolders().dstDir, page.getFolders().srcDir, page.getVersion().toCode(), Config.DATE_FORMAT().format(page.getGenDate().getTime()), page.getWorkerContext()); new org.hl7.fhir.definitions.generators.specification.json.JsonLDDefinitionsGenerator().generate(page.getDefinitions(), page.getIni(), page.getFolders().tmpResDir, page.getFolders().dstDir, page.getFolders().srcDir, page.getVersion().toCode(), Config.DATE_FORMAT().format(page.getGenDate().getTime()), page.getWorkerContext()); List<StructureDefinition> list = new ArrayList<StructureDefinition>(); for (StructureDefinition sd : page.getWorkerContext().allStructures()) { if (sd.getDerivation() == TypeDerivationRule.SPECIALIZATION) list.add(sd); } ShExGenerator shgen = new ShExGenerator(page.getWorkerContext()); shgen.completeModel = true; shgen.withComments = false; TextFile.stringToFile(shgen.generate(HTMLLinkPolicy.NONE, list), page.getFolders().dstDir+"fhir.shex", false); GraphQLSchemaGenerator gql = new GraphQLSchemaGenerator(page.getWorkerContext()); gql.generateTypes(new FileOutputStream(Utilities.path(page.getFolders().dstDir, "types.graphql"))); Set<String> names = new HashSet<String>(); for (StructureDefinition sd : page.getWorkerContext().allStructures()) { if (sd.getKind() == StructureDefinitionKind.RESOURCE && sd.getAbstract() == false && sd.getDerivation() == TypeDerivationRule.SPECIALIZATION && !names.contains(sd.getUrl())) { String filename = Utilities.path(page.getFolders().dstDir, sd.getName().toLowerCase() + ".graphql"); names.add(sd.getUrl()); List<SearchParameter> splist = new ArrayList<SearchParameter>(); ResourceDefn rd = page.getDefinitions().getResourceByName(sd.getName()); while (rd != null) { for (String n : sorted(rd.getSearchParams().keySet())) { SearchParameterDefn spd = rd.getSearchParams().get(n); if (spd.getResource() == null) buildSearchDefinition(rd, spd); splist.add(spd.getResource()); } rd = Utilities.noString(rd.getRoot().typeCode()) ? null : page.getDefinitions().getResourceByName(rd.getRoot().typeCode()); } EnumSet<FHIROperationType> ops = EnumSet.of(FHIROperationType.READ, FHIROperationType.SEARCH, FHIROperationType.CREATE, FHIROperationType.UPDATE, FHIROperationType.DELETE); gql.generateResource(new FileOutputStream(filename), sd, splist, ops); } } TextFile.stringToFile(page.genBackboneElementsJson(), Utilities.path(page.getFolders().dstDir, "backbone-elements.json")); TextFile.stringToFile(page.genChoiceElementsJson(), Utilities.path(page.getFolders().dstDir, "choice-elements.json")); if (buildFlags.get("all")) { for (PlatformGenerator gen : page.getReferenceImplementations()) { page.log("Produce " + gen.getName() + " Reference Implementation", LogMessageType.Process); String destDir = page.getFolders().dstDir; String tmpImplDir = Utilities.path(page.getFolders().tmpDir, gen.getName(), ""); String actualImplDir = Utilities.path(page.getFolders().implDir(gen.getName()), ""); gen.generate(page.getDefinitions(), destDir, actualImplDir, tmpImplDir, page.getVersion().toCode(), page.getGenDate().getTime(), page, page.getBuildId()); } for (PlatformGenerator gen : page.getReferenceImplementations()) { if (gen.doesCompile()) { page.log("Compile " + gen.getName() + " Reference Implementation", LogMessageType.Process); gen.setBuildId(page.getBuildId()); if (!gen.compile(page.getFolders().rootDir, new ArrayList<String>(), page, page.getValidationErrors(), page.isForPublication())) { // Must always be able to compile Java to go on. Also, if we're // building // the web build, all generators that can compile, must compile // without error. if (gen.getName().equals("java")) // || web) throw new Exception("Compile " + gen.getName() + " failed"); else page.log("Compile " + gen.getName() + " failed, still going on.", LogMessageType.Error); } } } } page.log("Produce Schematrons", LogMessageType.Process); for (String rname : page.getDefinitions().sortedResourceNames()) { ResourceDefn r = page.getDefinitions().getResources().get(rname); String n = r.getName().toLowerCase(); SchematronGenerator sch = new SchematronGenerator(page); sch.generate(new FileOutputStream(page.getFolders().dstDir + n + ".sch"), r, page.getDefinitions()); } ResourceDefn r = page.getDefinitions().getBaseResources().get("Parameters"); String n = r.getName().toLowerCase(); SchematronGenerator sch = new SchematronGenerator(page); sch.generate(new FileOutputStream(page.getFolders().dstDir + n + ".sch"), r, page.getDefinitions()); SchematronGenerator sg = new SchematronGenerator(page); sg.generate(new FileOutputStream(page.getFolders().dstDir + "fhir-invariants.sch"), page.getDefinitions()); produceSchemaZip(); page.log("Load R4 Definitions", LogMessageType.Process); loadR4Definitions(); page.log("Produce Content", LogMessageType.Process); produceSpec(); if (buildFlags.get("all")) { if (web) { generateRedirects(); } } page.clean(); } private List<String> sorted(Set<String> keys) { List<String> sl = new ArrayList<String>(); sl.addAll(keys); Collections.sort(sl); return sl; } private void loadR4Definitions() throws FileNotFoundException, FHIRException, IOException { loadR4DefinitionBundle(page.getDiffEngine().getOriginal().getTypes(), Utilities.path(page.getFolders().srcDir, "release4", "profiles-types.xml")); loadR4DefinitionBundle(page.getDiffEngine().getOriginal().getResources(), Utilities.path(page.getFolders().srcDir, "release4", "profiles-resources.xml")); loadR4DefinitionBundle(page.getDiffEngine().getOriginal().getExtensions(), Utilities.path(page.getFolders().srcDir, "release4", "extension-definitions.xml")); loadR4DefinitionBundle(page.getDiffEngine().getOriginal().getProfiles(), Utilities.path(page.getFolders().srcDir, "release4", "profiles-others.xml")); loadValueSetBundle(page.getDiffEngine().getOriginal().getExpansions(), Utilities.path(page.getFolders().srcDir, "release4", "expansions.xml")); loadValueSetBundle(page.getDiffEngine().getOriginal().getValuesets(), Utilities.path(page.getFolders().srcDir, "release4", "valuesets.xml")); } private void loadR4DefinitionBundle(Map<String, StructureDefinition> map, String fn) throws FHIRException, FileNotFoundException, IOException { org.hl7.fhir.r4.model.Bundle bundle = (org.hl7.fhir.r4.model.Bundle) new org.hl7.fhir.r4.formats.XmlParser().parse(new FileInputStream(fn)); for (org.hl7.fhir.r4.model.Bundle.BundleEntryComponent be : bundle.getEntry()) { if (be.getResource() instanceof org.hl7.fhir.r4.model.StructureDefinition) { org.hl7.fhir.r4.model.StructureDefinition sd = (org.hl7.fhir.r4.model.StructureDefinition) be.getResource(); map.put(sd.getName(), org.hl7.fhir.convertors.conv40_50.StructureDefinition.convertStructureDefinition(sd)); } } } private static void loadValueSetBundle(Map<String, ValueSet> map, String fn) throws FHIRException, FileNotFoundException, IOException { org.hl7.fhir.r4.model.Bundle bundle = (org.hl7.fhir.r4.model.Bundle) new org.hl7.fhir.r4.formats.XmlParser().parse(new FileInputStream(fn)); for (org.hl7.fhir.r4.model.Bundle.BundleEntryComponent be : bundle.getEntry()) { if (be.getResource() instanceof org.hl7.fhir.r4.model.ValueSet) { org.hl7.fhir.r4.model.ValueSet sd = (org.hl7.fhir.r4.model.ValueSet) be.getResource(); sd.setUserData("old", "r4"); map.put(sd.getName(), org.hl7.fhir.convertors.conv40_50.ValueSet.convertValueSet(sd)); } } } private void processCDA() { CDAGenerator gen = new CDAGenerator(); // gen.execute(src, dst); } private void processRDF() throws Exception, FileNotFoundException { // first, process the RIM file String rim = TextFile.fileToString(Utilities.path(page.getFolders().srcDir, "v3", "rim.ttl")); ByteArrayOutputStream tmp = new ByteArrayOutputStream(); FhirTurtleGenerator ttl = new FhirTurtleGenerator(tmp, page.getDefinitions(), page.getWorkerContext(), page.getValidationErrors()); ttl.executeV3(page.getValueSets(), page.getCodeSystems()); rim = rim + tmp.toString(); TextFile.stringToFile(rim, Utilities.path(page.getFolders().dstDir, "rim.ttl")); ttl = new FhirTurtleGenerator(new FileOutputStream(Utilities.path(page.getFolders().dstDir, "fhir.ttl")), page.getDefinitions(), page.getWorkerContext(), page.getValidationErrors()); ttl.executeMain(); W5TurtleGenerator w5 = new W5TurtleGenerator(new FileOutputStream(Utilities.path(page.getFolders().dstDir, "w5.ttl")), page.getDefinitions(), page.getWorkerContext(), page.getValidationErrors()); w5.executeMain(); RDFValidator val = new RDFValidator(); val.validate(Utilities.path(page.getFolders().dstDir, "fhir.ttl")); val.validate(Utilities.path(page.getFolders().dstDir, "rim.ttl")); val.validate(Utilities.path(page.getFolders().dstDir, "w5.ttl")); ZipGenerator zip = new ZipGenerator(Utilities.path(page.getFolders().dstDir, "fhir.rdf.ttl.zip")); zip.addFileName("fhir.ttl", Utilities.path(page.getFolders().dstDir, "fhir.ttl"), false); zip.addFileName("rim.ttl", Utilities.path(page.getFolders().dstDir, "rim.ttl"), false); zip.addFileName("w5.ttl", Utilities.path(page.getFolders().dstDir, "w5.ttl"), false); zip.close(); // now that the RDF is generated, run any sparql rules that have been defined Element test = loadDom(new FileInputStream(Utilities.path(page.getFolders().srcDir, "sparql-rules.xml")), false).getDocumentElement(); test = XMLUtil.getFirstChild(test); while (test != null) { if (test.getNodeName().equals("assertion")) { String sparql = test.getTextContent(); page.getValidationErrors().addAll(val.assertion(sparql, test.getAttribute("id"), test.getAttribute("rowtype"), test.getAttribute("message"), test.getAttribute("description"), IssueSeverity.fromCode(test.getAttribute("level")))); } test = XMLUtil.getNextSibling(test); } checkAllOk(); } private void produceArchive() throws Exception { String target = page.getFolders().archiveDir + "v" + page.getVersion() + ".zip"; File tf = new CSFile(target); if (tf.exists()) tf.delete(); ZipGenerator zip = new ZipGenerator(target); int c = 0; String[] files = new CSFile(page.getFolders().dstDir).list(); for (String f : files) { File fn = new CSFile(page.getFolders().dstDir + f); if (!fn.isDirectory()) { if (f.endsWith(".html")) { String src = TextFile.fileToString(fn.getAbsolutePath()); String srcn = src.replace("<!-- achive note -->", "This is an old version of FHIR retained for archive purposes. Do not use for anything else"); if (!srcn.equals(src)) c++; srcn = srcn.replace("<body>", "<body><div class=\"watermark\"/>").replace("<body class=\"book\">", "<body class=\"book\"><div class=\"watermark\"/>"); zip.addFileSource(f, srcn, false); // Utilities.stringToFile(srcn, target+File.separator+f); } else if (f.endsWith(".css")) { String src = TextFile.fileToString(fn.getAbsolutePath()); src = src.replace("#fff", "lightcyan"); zip.addFileSource(f, src, false); // Utilities.stringToFile(srcn, target+File.separator+f); } else zip.addFileName(f, fn.getAbsolutePath(), false); } else if (!fn.getAbsolutePath().endsWith("v2") && !fn.getAbsolutePath().endsWith("v3")) { // used to put stuff in sub-directories. clean them out if they // still exist // Utilities.clearDirectory(fn.getAbsolutePath()); // fn.delete(); } } if (c < 3) throw new Exception("header note replacement in archive failed"); // so // check // the // syntax // of // the // string // constant // above zip.close(); } private void produceSpec() throws Exception { for (ImplementationGuideDefn ig : page.getDefinitions().getSortedIgs()) { for (LogicalModel lm : ig.getLogicalModels()) { page.log(" ...logical model " + lm.getId(), LogMessageType.Process); produceLogicalModel(lm, ig); } } for (StructureDefinition ed : page.getWorkerContext().getExtensionDefinitions()) { String filename = "extension-"+(ed.getUrl().startsWith("http://fhir-registry.smarthealthit.org/StructureDefinition/") ? ed.getUrl().substring(59).toLowerCase() : ed.getUrl().substring(40).toLowerCase()); ed.setUserData("filename", filename); ImplementationGuideDefn ig = page.getDefinitions().getIgs().get(ed.getUserString(ToolResourceUtilities.NAME_RES_IG)); ed.setUserData("path", (ig.isCore() ? "" : ig.getCode()+File.separator) + filename+".html"); } page.updateDiffEngineDefinitions(); loadValueSets2(); page.log(" ...extensions", LogMessageType.Process); for (StructureDefinition ae : page.getWorkerContext().getExtensionDefinitions()) produceExtensionDefinition(ae); checkAllOk(); page.log(" ...resource identities", LogMessageType.Process); for (String rname : page.getDefinitions().getBaseResources().keySet()) { ResourceDefn r = page.getDefinitions().getBaseResources().get(rname); produceResource1(r, r.isAbstract()); } for (String rname : page.getDefinitions().sortedResourceNames()) { if (!rname.equals("ValueSet") && !rname.equals("CodeSystem") && wantBuild(rname)) { ResourceDefn r = page.getDefinitions().getResources().get(rname); produceResource1(r, false); } } if (buildFlags.get("all")) { page.log(" ...base profiles", LogMessageType.Process); produceBaseProfile(); } for (String rname : page.getDefinitions().getBaseResources().keySet()) { ResourceDefn r = page.getDefinitions().getBaseResources().get(rname); page.log(" ...resource " + r.getName(), LogMessageType.Process); produceResource2(r, !rname.equals("Parameters"), rname.equals("Resource") ? "Meta" : null, false); } for (String rname : page.getDefinitions().sortedResourceNames()) { if (!rname.equals("ValueSet") && !rname.equals("CodeSystem") && wantBuild(rname)) { ResourceDefn r = page.getDefinitions().getResources().get(rname); page.log(" ...resource " + r.getName(), LogMessageType.Process); produceResource2(r, false, null, false); } } for (String rname : page.getDefinitions().getResourceTemplates().keySet()) { ResourceDefn r = page.getDefinitions().getResourceTemplates().get(rname); produceResource2(r, false, null, true); } for (Compartment c : page.getDefinitions().getCompartments()) { if (buildFlags.get("all")) { page.log(" ...compartment " + c.getName(), LogMessageType.Process); produceCompartment(c); } } Bundle searchParamsFeed = new Bundle(); searchParamsFeed.setId("searchParams"); searchParamsFeed.setType(BundleType.COLLECTION); searchParamsFeed.setMeta(new Meta().setLastUpdated(page.getResourceBundle().getMeta().getLastUpdated())); Set<String> uris = new HashSet<String>(); for (ResourceDefn rd : page.getDefinitions().getBaseResources().values()) addSearchParams(uris, searchParamsFeed, rd); for (String n : page.getDefinitions().sortedResourceNames()) { ResourceDefn rd = page.getDefinitions().getResources().get(n); addSearchParams(uris, searchParamsFeed, rd); } for (Profile cp : page.getDefinitions().getPackList()) { addSearchParams(uris, searchParamsFeed, cp); } checkBundleURLs(searchParamsFeed); for (String n : page.getIni().getPropertyNames("pages")) { if (buildFlags.get("all") || buildFlags.get("page-" + n.toLowerCase())) { page.log(" ...page " + n, LogMessageType.Process); producePage(n, page.getIni().getStringProperty("pages", n)); } } for (ImplementationGuideDefn ig : page.getDefinitions().getSortedIgs()) { for (String n : ig.getPageList()) { page.log(" ...ig page " + n, LogMessageType.Process); produceIgPage(n, ig); } for (ImplementationGuideDefinitionPageComponent page : ig.getSpecialPages()) { produceIgPage(ig, page); } for (Profile p : ig.getProfiles()) { if (!p.getOperations().isEmpty()) { produceIgOperations(ig, p); } } } if (page.getIni().getPropertyNames("ig-pages") != null) { for (String n : page.getIni().getPropertyNames("ig-pages")) { page.log(" ...page " + n, LogMessageType.Process); for (ImplementationGuideDefn ig : page.getDefinitions().getSortedIgs()) { if (!ig.isCore()) produceIgPage(n, ig, page.getIni().getStringProperty("ig-pages", n)); } } } for (String n : page.getDefinitions().getDictionaries().keySet()) { if (buildFlags.get("all")) { // || buildFlags.get("dict-" + n.toLowerCase())) { page.log(" ...dictionary " + n, LogMessageType.Process); produceDictionary(page.getDefinitions().getDictionaries().get(n)); } } int i = 0; for (String n : page.getIni().getPropertyNames("sid")) { page.log(" ...sid " + n, LogMessageType.Process); produceSid(i, n, page.getIni().getStringProperty("sid", n)); i++; } if (buildFlags.get("all")) { page.log(" ...check Fragments", LogMessageType.Process); checkFragments(); for (Profile p : page.getDefinitions().getPackList()) { // if (!n.startsWith("http://")) { page.log(" ...Profile " + p.getId(), LogMessageType.Process); produceConformancePackage(null, p, null); //} } produceV2(); produceV3(); page.getVsValidator().checkDuplicates(page.getValidationErrors()); if (buildFlags.get("all")) { // if (page.getToc().containsKey("1.1")) // throw new Exception("Duplicate DOC Entry "+"1.1"); page.getToc().put("1.1", new TocEntry("1.1", "Table Of Contents", "toc.html", false)); page.log(" ...page toc.html", LogMessageType.Process); producePage("toc.html", null); } checkAllOk(); page.log(" ...collections ", LogMessageType.Process); com.google.gson.JsonObject diff = new com.google.gson.JsonObject(); page.getDiffEngine().getDiffAsJson(diff); Gson gson = new GsonBuilder().setPrettyPrinting().create(); String json = gson.toJson(diff); TextFile.stringToFile(json, Utilities.path(page.getFolders().dstDir, "fhir.diff.json")); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = dbf.newDocumentBuilder(); Document doc = builder.newDocument(); Element element = doc.createElement("difference"); doc.appendChild(element); page.getDiffEngine().getDiffAsXml(doc, element); prettyPrint(doc, Utilities.path(page.getFolders().dstDir, "fhir.diff.xml")); checkBundleURLs(page.getResourceBundle()); checkStructureDefinitions(page.getResourceBundle()); page.getResourceBundle().getEntry().sort(new ProfileBundleSorter()); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + "profiles-resources.xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, page.getResourceBundle()); s.close(); s = new FileOutputStream(page.getFolders().dstDir + "profiles-resources.json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, page.getResourceBundle()); s.close(); checkBundleURLs(page.getTypeBundle()); checkStructureDefinitions(page.getTypeBundle()); page.getTypeBundle().getEntry().sort(new ProfileBundleSorter()); s = new FileOutputStream(page.getFolders().dstDir + "profiles-types.xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, page.getTypeBundle()); s.close(); s = new FileOutputStream(page.getFolders().dstDir + "profiles-types.json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, page.getTypeBundle()); s.close(); Bundle extensionsFeed = new Bundle(); extensionsFeed.setId("extensions"); extensionsFeed.setType(BundleType.COLLECTION); extensionsFeed.setMeta(new Meta().setLastUpdated(page.getResourceBundle().getMeta().getLastUpdated())); Set<String> urls = new HashSet<String>(); for (StructureDefinition ed : page.getWorkerContext().getExtensionDefinitions()) { if (!urls.contains(ed.getUrl())) { urls.add(ed.getUrl()); extensionsFeed.getEntry().add(new BundleEntryComponent().setResource(ed).setFullUrl("http://hl7.org/fhir/"+ed.fhirType()+"/"+ed.getId())); } } checkBundleURLs(extensionsFeed); checkStructureDefinitions(extensionsFeed); s = new FileOutputStream(page.getFolders().dstDir + "extension-definitions.xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, extensionsFeed); s.close(); s = new FileOutputStream(page.getFolders().dstDir + "extension-definitions.json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, extensionsFeed); s.close(); Utilities.copyFile(page.getFolders().dstDir + "extension-definitions.xml", page.getFolders().dstDir + "examples" + File.separator + "extension-definitions.xml"); s = new FileOutputStream(page.getFolders().dstDir + "search-parameters.xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, searchParamsFeed); s.close(); s = new FileOutputStream(page.getFolders().dstDir + "search-parameters.json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, searchParamsFeed); s.close(); Utilities.copyFile(page.getFolders().dstDir + "search-parameters.xml", page.getFolders().dstDir + "examples" + File.separator + "search-parameters.xml"); for (ResourceDefn rd : page.getDefinitions().getResources().values()) addOtherProfiles(profileBundle, rd); for (Profile cp : page.getDefinitions().getPackList()) { addOtherProfiles(profileBundle, cp); } checkBundleURLs(profileBundle); checkStructureDefinitions(profileBundle); s = new FileOutputStream(page.getFolders().dstDir + "profiles-others.xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, profileBundle); s.close(); s = new FileOutputStream(page.getFolders().dstDir + "profiles-others.json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, profileBundle); s.close(); Utilities.copyFile(page.getFolders().dstDir + "profiles-others.xml", page.getFolders().dstDir + "examples" + File.separator + "profiles-others.xml"); // todo-bundle - should this be checked? // int ec = 0; // for (Resource e : valueSetsFeed.getItem()) { // ValueSet vs = (ValueSet) e; // if (!vs.getUrl().equals(e.getId())) { // ec++; // page.log("Valueset id mismatch: atom entry has '"+e.getId()+"', but value set is '"+vs.getUrl()+"'", LogMessageType.Error); // } // } // if (ec > 0) // throw new Exception("Cannot continue due to value set mis-identification"); checkBundleURLs(dataElements); s = new FileOutputStream(page.getFolders().dstDir + "dataelements.xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, dataElements); s.close(); Utilities.copyFile(page.getFolders().dstDir + "dataelements.xml", page.getFolders().dstDir + "examples" + File.separator + "dataelements.xml"); s = new FileOutputStream(page.getFolders().dstDir + "dataelements.json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, dataElements); s.close(); checkBundleURLs(valueSetsFeed); s = new FileOutputStream(page.getFolders().dstDir + "valuesets.xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, valueSetsFeed); s.close(); Utilities.copyFile(page.getFolders().dstDir + "valuesets.xml", page.getFolders().dstDir + "examples" + File.separator + "valuesets.xml"); s = new FileOutputStream(page.getFolders().dstDir + "valuesets.json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, valueSetsFeed); s.close(); checkBundleURLs(conceptMapsFeed); s = new FileOutputStream(page.getFolders().dstDir + "conceptmaps.xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, conceptMapsFeed); s.close(); Utilities.copyFile(page.getFolders().dstDir + "conceptmaps.xml", page.getFolders().dstDir + "examples" + File.separator + "conceptmaps.xml"); s = new FileOutputStream(page.getFolders().dstDir + "conceptmaps.json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, conceptMapsFeed); s.close(); checkBundleURLs(externals); s = new FileOutputStream(page.getFolders().dstDir + "external-resources.xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, externals); s.close(); Utilities.copyFile(page.getFolders().dstDir + "external-resources.xml", page.getFolders().dstDir + "examples" + File.separator + "external-resources.xml"); s = new FileOutputStream(page.getFolders().dstDir + "external-resources.json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, externals); s.close(); Bundle v2Valuesets = new Bundle(); v2Valuesets.setType(BundleType.COLLECTION); v2Valuesets.setId("v2-valuesets"); v2Valuesets.setMeta(new Meta().setLastUpdated(page.getGenDate().getTime())); for (ValueSet vs : page.getValueSets().values()) if (vs.getUrl().contains("/v2")) v2Valuesets.addEntry().setFullUrl("http://hl7.org/fhir/"+vs.fhirType()+"/"+vs.getId()).setResource(vs); for (CodeSystem cs : page.getCodeSystems().values()) if (cs!= null && cs.getUrl().contains("/v2")) v2Valuesets.addEntry().setFullUrl("http://hl7.org/fhir/"+cs.fhirType()+"/"+cs.getId()).setResource(cs); checkBundleURLs(v2Valuesets); s = new FileOutputStream(page.getFolders().dstDir + "v2-tables.xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, v2Valuesets); s.close(); Utilities.copyFile(page.getFolders().dstDir + "v2-tables.xml", page.getFolders().dstDir + "examples" + File.separator + "v2-tables.xml"); s = new FileOutputStream(page.getFolders().dstDir + "v2-tables.json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, v2Valuesets); s.close(); v2Valuesets = null; Bundle v3Valuesets = new Bundle(); v3Valuesets.setType(BundleType.COLLECTION); v3Valuesets.setId("v3-valuesets"); v3Valuesets.setMeta(new Meta().setLastUpdated(page.getGenDate().getTime())); for (ValueSet vs : page.getValueSets().values()) if (vs.getUrl().contains("/v3")) v3Valuesets.addEntry().setFullUrl("http://hl7.org/fhir/"+vs.fhirType()+"/"+vs.getId()).setResource(vs); for (CodeSystem cs : page.getCodeSystems().values()) if (cs != null && cs.getUrl().contains("/v3")) v3Valuesets.addEntry().setFullUrl("http://hl7.org/fhir/"+cs.fhirType()+"/"+cs.getId()).setResource(cs); checkBundleURLs(v3Valuesets); s = new FileOutputStream(page.getFolders().dstDir + "v3-codesystems.xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, v3Valuesets); s.close(); Utilities.copyFile(page.getFolders().dstDir + "v3-codesystems.xml", page.getFolders().dstDir + "examples" + File.separator + "v3-codesystems.xml"); s = new FileOutputStream(page.getFolders().dstDir + "v3-codesystems.json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, v3Valuesets); s.close(); Bundle expansionFeed = new Bundle(); Set<String> urlset = new HashSet<>(); expansionFeed.setId("valueset-expansions"); expansionFeed.setType(BundleType.COLLECTION); expansionFeed.setMeta(new Meta().setLastUpdated(page.getGenDate().getTime())); expansionFeed.getFormatCommentsPre().add( "This collection contains expansions for all the value sets that are used on an element of type \r\n" + "'code', to help with code generation (saves the code generator having to figure out how to \r\n" + "do the expansions or find a terminology server that supports the same version of the value sets"); for (ValueSet vs : page.getValueSets().values()) { if (!urlset.contains(vs.getUrl())) { urlset.add(vs.getUrl()); if (vs.getUserData(ToolResourceUtilities.NAME_VS_USE_MARKER) != null) { ValueSet evs = null; if (vs.hasUserData("expansion")) evs = (ValueSet) vs.getUserData("expansion"); else { ValueSetExpansionOutcome vse = page.getWorkerContext().expandVS(vs, true, false); if (vse.getValueset() != null) { evs = vse.getValueset(); vs.setUserData("expansion", evs); } } if (evs != null) { ValueSet vsc = vs.copy(); vsc.setText(null); vsc.setExpansion(evs.getExpansion()); expansionFeed.addEntry().setFullUrl("http://hl7.org/fhir/"+vsc.fhirType()+"/"+vsc.getId()).setResource(vsc); } } } } s = new FileOutputStream(page.getFolders().dstDir + "expansions.xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, expansionFeed); s.close(); s = new FileOutputStream(page.getFolders().dstDir + "expansions.json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, expansionFeed); s.close(); produceComparisons(); produceSpecMap(); processRDF(); page.log("....version maps", LogMessageType.Process); ZipGenerator zip = new ZipGenerator(page.getFolders().dstDir + "r3r4maps.zip"); zip.addFiles(Utilities.path(page.getFolders().rootDir, "implementations", "r3maps", "R3toR4", ""), "r3/", null, null); zip.addFiles(Utilities.path(page.getFolders().rootDir, "implementations", "r3maps", "R4toR3", ""), "r4/", null, null); zip.close(); page.log("....definitions", LogMessageType.Process); zip = new ZipGenerator(page.getFolders().dstDir + "definitions.xml.zip"); zip.addFileName("version.info", page.getFolders().dstDir + "version.info", false); zip.addFileName("profiles-types.xml", page.getFolders().dstDir + "profiles-types.xml", false); zip.addFileName("profiles-resources.xml", page.getFolders().dstDir + "profiles-resources.xml", false); zip.addFileName("profiles-others.xml", page.getFolders().dstDir + "profiles-others.xml", false); zip.addFileName("extension-definitions.xml", page.getFolders().dstDir + "extension-definitions.xml", false); zip.addFileName("search-parameters.xml", page.getFolders().dstDir + "search-parameters.xml", false); zip.addFileName("valuesets.xml", page.getFolders().dstDir + "valuesets.xml", false); zip.addFileName("v2-tables.xml", page.getFolders().dstDir + "v2-tables.xml", false); zip.addFileName("v3-codesystems.xml", page.getFolders().dstDir + "v3-codesystems.xml", false); zip.addFileName("conceptmaps.xml", page.getFolders().dstDir + "conceptmaps.xml", false); zip.addFileName("dataelements.xml", page.getFolders().dstDir + "dataelements.xml", false); zip.addFileName("fhir-all-xsd.zip", page.getFolders().dstDir + "fhir-all-xsd.zip", false); zip.close(); zip = new ZipGenerator(page.getFolders().dstDir + "definitions.json.zip"); zip.addFileName("version.info", page.getFolders().dstDir + "version.info", false); zip.addFileName("profiles-types.json", page.getFolders().dstDir + "profiles-types.json", false); zip.addFileName("profiles-resources.json", page.getFolders().dstDir + "profiles-resources.json", false); zip.addFileName("profiles-others.json", page.getFolders().dstDir + "profiles-others.json", false); zip.addFileName("extension-definitions.json", page.getFolders().dstDir + "extension-definitions.json", false); zip.addFileName("search-parameters.json", page.getFolders().dstDir + "search-parameters.json", false); zip.addFileName("valuesets.json", page.getFolders().dstDir + "valuesets.json", false); zip.addFileName("v2-tables.json", page.getFolders().dstDir + "v2-tables.json", false); zip.addFileName("v3-codesystems.json", page.getFolders().dstDir + "v3-codesystems.json", false); zip.addFileName("conceptmaps.json", page.getFolders().dstDir + "conceptmaps.json", false); zip.addFileName("dataelements.json", page.getFolders().dstDir + "dataelements.json", false); zip.addFileName("fhir.schema.json.zip", page.getFolders().dstDir + "fhir.schema.json.zip", false); zip.close(); zip = new ZipGenerator(page.getFolders().dstDir + "definitions.xlsx.zip"); for (String rn : page.getDefinitions().sortedResourceNames()) { zip.addFileName(rn.toLowerCase()+".xlsx", page.getFolders().dstDir + rn.toLowerCase()+".xlsx", false); } zip.close(); // this is the actual package used by the validator. zip = new ZipGenerator(page.getFolders().dstDir + "validator.pack"); // conformance resources zip.addFileName("profiles-types.json", page.getFolders().dstDir + "profiles-types.json", false); zip.addFileName("profiles-resources.json", page.getFolders().dstDir + "profiles-resources.json", false); zip.addFileName("profiles-others.json", page.getFolders().dstDir + "profiles-others.json", false); zip.addFileName("extension-definitions.json", page.getFolders().dstDir + "extension-definitions.json", false); zip.addFileName("valuesets.json", page.getFolders().dstDir + "valuesets.json", false); zip.addFileName("v2-tables.json", page.getFolders().dstDir + "v2-tables.json", false); zip.addFileName("v3-codesystems.json", page.getFolders().dstDir + "v3-codesystems.json", false); zip.addFileName("conceptmaps.json", page.getFolders().dstDir + "conceptmaps.json", false); // native schema zip.addFileName("fhir-all-xsd.zip", page.getFolders().dstDir + "fhir-all-xsd.zip", false); zip.addFileName("fhir.schema.json.zip", page.getFolders().dstDir + "fhir.schema.json.zip", false); zip.addFileName("fhir.shex", page.getFolders().dstDir + "fhir.shex", false); zip.close(); // page.log("....dstu2 format", LogMessageType.Process); // DSTU2ValidationConvertor dstu2 = new DSTU2ValidationConvertor(); // dstu2.convert(page.getFolders().dstDir + "profiles-types.xml", page.getFolders().tmpDir + "profiles-types-r2.xml"); // dstu2.convert(page.getFolders().dstDir + "profiles-resources.xml", page.getFolders().tmpDir + "profiles-resources-r2.xml"); // dstu2.convert(page.getFolders().dstDir + "profiles-others.xml", page.getFolders().tmpDir + "profiles-others-r2.xml"); // dstu2.convert(page.getFolders().dstDir + "extension-definitions.xml", page.getFolders().tmpDir + "extension-definitions-r2.xml"); // dstu2.convert(page.getFolders().dstDir + "search-parameters.xml", page.getFolders().tmpDir + "search-parameters-r2.xml"); // dstu2.convert(page.getFolders().dstDir + "valuesets.xml", page.getFolders().tmpDir + "valuesets-r2.xml"); // dstu2.convert(page.getFolders().dstDir + "v2-tables.xml", page.getFolders().tmpDir + "v2-tables-r2.xml"); // dstu2.convert(page.getFolders().dstDir + "v3-codesystems.xml", page.getFolders().tmpDir + "v3-codesystems-r2.xml"); // dstu2.convert(page.getFolders().dstDir + "conceptmaps.xml", page.getFolders().tmpDir + "conceptmaps-r2.xml"); // dstu2.convert(page.getFolders().dstDir + "dataelements.xml", page.getFolders().tmpDir + "dataelements-r2.xml"); // // zip = new ZipGenerator(page.getFolders().dstDir + "definitions-r2.xml.zip"); // zip.addFileName("profiles-types.xml", page.getFolders().tmpDir + "profiles-types-r2.xml", false); // zip.addFileName("profiles-resources.xml", page.getFolders().tmpDir + "profiles-resources-r2.xml", false); // zip.addFileName("profiles-others.xml", page.getFolders().tmpDir + "profiles-others-r2.xml", false); // zip.addFileName("extension-definitions.xml", page.getFolders().tmpDir + "extension-definitions-r2.xml", false); // zip.addFileName("search-parameters.xml", page.getFolders().tmpDir + "search-parameters-r2.xml", false); // zip.addFileName("valuesets.xml", page.getFolders().tmpDir + "valuesets-r2.xml", false); // zip.addFileName("v2-tables.xml", page.getFolders().tmpDir + "v2-tables-r2.xml", false); // zip.addFileName("v3-codesystems.xml", page.getFolders().tmpDir + "v3-codesystems-r2.xml", false); // zip.addFileName("conceptmaps.xml", page.getFolders().tmpDir + "conceptmaps-r2.xml", false); // zip.addFileName("dataelements.xml", page.getFolders().tmpDir + "dataelements-r2.xml", false); // zip.close(); page.log("....dstu3 format (xml)", LogMessageType.Process); DSTU3ValidationConvertor dstu3 = new DSTU3ValidationConvertor(); dstu3.convert(page.getFolders().dstDir + "profiles-types.xml", page.getFolders().tmpDir + "profiles-types-r3.xml"); dstu3.convert(page.getFolders().dstDir + "profiles-resources.xml", page.getFolders().tmpDir + "profiles-resources-r3.xml"); dstu3.convert(page.getFolders().dstDir + "profiles-others.xml", page.getFolders().tmpDir + "profiles-others-r3.xml"); dstu3.convert(page.getFolders().dstDir + "extension-definitions.xml", page.getFolders().tmpDir + "extension-definitions-r3.xml"); dstu3.convert(page.getFolders().dstDir + "search-parameters.xml", page.getFolders().tmpDir + "search-parameters-r3.xml"); dstu3.convert(page.getFolders().dstDir + "valuesets.xml", page.getFolders().tmpDir + "valuesets-r3.xml"); dstu3.convert(page.getFolders().dstDir + "v2-tables.xml", page.getFolders().tmpDir + "v2-tables-r3.xml"); dstu3.convert(page.getFolders().dstDir + "v3-codesystems.xml", page.getFolders().tmpDir + "v3-codesystems-r3.xml"); dstu3.convert(page.getFolders().dstDir + "conceptmaps.xml", page.getFolders().tmpDir + "conceptmaps-r3.xml"); dstu3.convert(page.getFolders().dstDir + "dataelements.xml", page.getFolders().tmpDir + "dataelements-r3.xml"); zip = new ZipGenerator(page.getFolders().dstDir + "definitions-r3.xml.zip"); zip.addFileName("profiles-types.xml", page.getFolders().tmpDir + "profiles-types-r3.xml", false); zip.addFileName("profiles-resources.xml", page.getFolders().tmpDir + "profiles-resources-r3.xml", false); zip.addFileName("profiles-others.xml", page.getFolders().tmpDir + "profiles-others-r3.xml", false); zip.addFileName("extension-definitions.xml", page.getFolders().tmpDir + "extension-definitions-r3.xml", false); zip.addFileName("search-parameters.xml", page.getFolders().tmpDir + "search-parameters-r3.xml", false); zip.addFileName("valuesets.xml", page.getFolders().tmpDir + "valuesets-r3.xml", false); zip.addFileName("v2-tables.xml", page.getFolders().tmpDir + "v2-tables-r3.xml", false); zip.addFileName("v3-codesystems.xml", page.getFolders().tmpDir + "v3-codesystems-r3.xml", false); zip.addFileName("conceptmaps.xml", page.getFolders().tmpDir + "conceptmaps-r3.xml", false); zip.addFileName("dataelements.xml", page.getFolders().tmpDir + "dataelements-r3.xml", false); zip.close(); page.log("....dstu3 format (json)", LogMessageType.Process); dstu3.convertJ(page.getFolders().dstDir + "profiles-types.xml", page.getFolders().tmpDir + "profiles-types-r3.json"); dstu3.convertJ(page.getFolders().dstDir + "profiles-resources.xml", page.getFolders().tmpDir + "profiles-resources-r3.json"); dstu3.convertJ(page.getFolders().dstDir + "profiles-others.xml", page.getFolders().tmpDir + "profiles-others-r3.json"); dstu3.convertJ(page.getFolders().dstDir + "extension-definitions.xml", page.getFolders().tmpDir + "extension-definitions-r3.json"); dstu3.convertJ(page.getFolders().dstDir + "search-parameters.xml", page.getFolders().tmpDir + "search-parameters-r3.json"); dstu3.convertJ(page.getFolders().dstDir + "valuesets.xml", page.getFolders().tmpDir + "valuesets-r3.json"); dstu3.convertJ(page.getFolders().dstDir + "v2-tables.xml", page.getFolders().tmpDir + "v2-tables-r3.json"); dstu3.convertJ(page.getFolders().dstDir + "v3-codesystems.xml", page.getFolders().tmpDir + "v3-codesystems-r3.json"); dstu3.convertJ(page.getFolders().dstDir + "conceptmaps.xml", page.getFolders().tmpDir + "conceptmaps-r3.json"); dstu3.convertJ(page.getFolders().dstDir + "dataelements.xml", page.getFolders().tmpDir + "dataelements-r3.json"); zip = new ZipGenerator(page.getFolders().dstDir + "definitions-r3.json.zip"); zip.addFileName("profiles-types.json", page.getFolders().tmpDir + "profiles-types-r3.json", false); zip.addFileName("profiles-resources.json", page.getFolders().tmpDir + "profiles-resources-r3.json", false); zip.addFileName("profiles-others.json", page.getFolders().tmpDir + "profiles-others-r3.json", false); zip.addFileName("extension-definitions.json", page.getFolders().tmpDir + "extension-definitions-r3.json", false); zip.addFileName("search-parameters.json", page.getFolders().tmpDir + "search-parameters-r3.json", false); zip.addFileName("valuesets.json", page.getFolders().tmpDir + "valuesets-r3.json", false); zip.addFileName("v2-tables.json", page.getFolders().tmpDir + "v2-tables-r3.json", false); zip.addFileName("v3-codesystems.json", page.getFolders().tmpDir + "v3-codesystems-r3.json", false); zip.addFileName("conceptmaps.json", page.getFolders().tmpDir + "conceptmaps-r3.json", false); zip.addFileName("dataelements.json", page.getFolders().tmpDir + "dataelements-r3.json", false); zip.close(); System.gc(); page.log("....r4 in r5 format", LogMessageType.Process); zip = new ZipGenerator(page.getFolders().dstDir + "definitions-r2asr3.xml.zip"); page.getDiffEngine().saveR4AsR5(zip, FhirFormat.XML); zip.close(); zip = new ZipGenerator(page.getFolders().dstDir + "definitions-r2asr3.json.zip"); page.getDiffEngine().saveR4AsR5(zip, FhirFormat.JSON); zip.close(); zip = new ZipGenerator(page.getFolders().dstDir + "all-valuesets.zip"); zip.addFileName("valuesets.xml", page.getFolders().dstDir + "valuesets.xml", false); zip.addFileName("valuesets.json", page.getFolders().dstDir + "valuesets.json", false); zip.addFileName("conceptmaps.xml", page.getFolders().dstDir + "conceptmaps.xml", false); zip.addFileName("conceptmaps.json", page.getFolders().dstDir + "conceptmaps.json", false); zip.addFileName("v2-tables.xml", page.getFolders().dstDir + "v2-tables.xml", false); zip.addFileName("v2-tables.json", page.getFolders().dstDir + "v2-tables.json", false); zip.addFileName("v3-codesystems.xml", page.getFolders().dstDir + "v3-codesystems.xml", false); zip.addFileName("v3-codesystems.json", page.getFolders().dstDir + "v3-codesystems.json", false); zip.close(); page.log("....IG Builder Resources", LogMessageType.Process); zip = new ZipGenerator(page.getFolders().tmpDir + "ig-template.zip"); zip.addFolder(Utilities.path(page.getFolders().rootDir, "tools", "ig"), "", false, null); zip.close(); zip = new ZipGenerator(page.getFolders().dstDir + "igpack.zip"); zip.addFileName("fhir.css", page.getFolders().dstDir + "fhir.css", false); zip.addFileName("spec.internals", page.getFolders().dstDir + "spec.internals", false); zip.addFileName("profiles-types.xml", page.getFolders().dstDir + "profiles-types.xml", false); zip.addFileName("profiles-resources.xml", page.getFolders().dstDir + "profiles-resources.xml", false); zip.addFileName("profiles-others.xml", page.getFolders().dstDir + "profiles-others.xml", false); zip.addFileName("extension-definitions.xml", page.getFolders().dstDir + "extension-definitions.xml", false); zip.addFileName("search-parameters.xml", page.getFolders().dstDir + "search-parameters.xml", false); zip.addFileName("valuesets.xml", page.getFolders().dstDir + "valuesets.xml", false); zip.addFileName("v2-tables.xml", page.getFolders().dstDir + "v2-tables.xml", false); zip.addFileName("v3-codesystems.xml", page.getFolders().dstDir + "v3-codesystems.xml", false); zip.addFileName("conceptmaps.xml", page.getFolders().dstDir + "conceptmaps.xml", false); zip.addFileName("dataelements.xml", page.getFolders().dstDir + "dataelements.xml", false); zip.addFileName("version.info", page.getFolders().dstDir + "version.info", false); zip.addFileName("mappingSpaces.details", page.getFolders().srcDir + "mappingSpaces.xml", false); zip.addFileName("redirect.asp.template", page.getFolders().srcDir + "redirect.asp", false); zip.addFileName("redirect.cgi.template", page.getFolders().srcDir + "redirect.cgi", false); zip.addFileName("redirect.php.template", page.getFolders().srcDir + "redirect.php", false); zip.addFileName("ig-template.zip", Utilities.path(page.getFolders().tmpDir, "ig-template.zip"), false); zip.addFiles(Utilities.path(page.getFolders().rootDir, "publish", ""), "", ".png", null); zip.addFiles(Utilities.path(page.getFolders().rootDir, "publish", ""), "", ".gif", null); zip.close(); page.log("....IG Builder (2)", LogMessageType.Process); SpecNPMPackageGenerator self = new SpecNPMPackageGenerator(); self.generate(page.getFolders().dstDir, page.getBaseURL(), false, page.getGenDate().getTime()); page.log(" ...zips", LogMessageType.Process); zip = new ZipGenerator(page.getFolders().dstDir + "examples.zip"); zip.addFiles(page.getFolders().dstDir + "examples" + File.separator, "", null, "expansions.xml"); zip.close(); zip = new ZipGenerator(page.getFolders().dstDir + "examples-json.zip"); gson = new GsonBuilder().setPrettyPrinting().create(); File f = new CSFile(page.getFolders().dstDir); File[] files = f.listFiles(); String[] noExt = new String[] {".schema.json", ".canonical.json", ".diff.json", "expansions.json", "package.json", "choice-elements.json", "backbone-elements.json"}; for (int fi = 0; fi < files.length; fi++) { if (files[fi].isFile() && (files[fi].getName().endsWith(".json"))) { boolean ok = true; for (String n : noExt) { ok = ok && !files[fi].getName().endsWith(n); } if (ok) { JsonObject jr = JSONUtil.parse(TextFile.fileToString(files[fi])); if (!jr.has("url")) { JsonObject meta = JSONUtil.forceObject(jr, "meta"); JsonArray labels = JSONUtil.forceArray(meta, "tag"); JsonObject label = JSONUtil.addObj(labels); label.addProperty("system", "http://terminology.hl7.org/CodeSystem/v3-ActReason"); label.addProperty("code", "HTEST"); label.addProperty("display", "test health data"); } String jrs = gson.toJson(jr); zip.addBytes(files[fi].getName(), jrs.getBytes(Charsets.UTF_8), true); } } } zip.close(); NDJsonWriter ndjson = new NDJsonWriter(page.getFolders().dstDir + "examples-ndjson.zip", page.getFolders().tmpDir); ndjson.addFilesFiltered(page.getFolders().dstDir, ".json", new String[] {".schema.json", ".canonical.json", ".diff.json", "expansions.json", "package.json"}); ndjson.close(); zip = new ZipGenerator(page.getFolders().dstDir + "examples-ttl.zip"); zip.addFilesFiltered(page.getFolders().dstDir, "", ".ttl", new String[0]); zip.close(); page.log("Check HTML Links", LogMessageType.Process); page.getHTMLChecker().produce(); checkAllOk(); } else page.log("Partial Build - terminating now", LogMessageType.Error); } private void produceConceptMap(ConceptMap cm, ResourceDefn rd, SectionTracker st) throws Exception { new NarrativeGenerator("", "", page.getWorkerContext()).generate(cm, null); String n = cm.getUserString("path"); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(n, ".xml")); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, cm); s.close(); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(n, ".canonical.xml")); new XmlParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, cm); s.close(); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(n, ".json")); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, cm); s.close(); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(n, ".canonical.json")); new JsonParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, cm); s.close(); Utilities.copyFile(new CSFile(page.getFolders().dstDir + Utilities.changeFileExt(n, ".xml")), new CSFile(page.getFolders().dstDir + "examples" + File.separator + Utilities.changeFileExt(n, ".xml"))); // saveAsPureHtml(cm, new FileOutputStream(Utilities.path(page.getFolders().dstDir, "html", n)), true); String src = TextFile.fileToString(page.getFolders().srcDir + "template-status-map.html"); Map<String, String> others = new HashMap<String, String>(); others.put("status-map", new XhtmlComposer(XhtmlComposer.HTML).compose(cm.getText().getDiv())); TextFile.stringToFile(insertSectionNumbers(page.processPageIncludes(n, src, "conceptmap-instance", others, null, null, "Profile", null, rd, rd.getWg()), st, n, 0, null), page.getFolders().dstDir + n); page.getHTMLChecker().registerFile(n, cm.getTitle(), HTMLLinkChecker.XHTML_TYPE, true); cloneToXhtml(Utilities.changeFileExt(n, ""), cm.getTitle(), true, "conceptmap-instance", "Profile", null, ((ResourceDefn) cm.getUserData("resource-definition")).getWg()); jsonToXhtml(Utilities.changeFileExt(n, ""), cm.getTitle(), resource2Json(cm), "conceptmap-instance", "Profile", null, ((ResourceDefn) cm.getUserData("resource-definition")).getWg()); ttlToXhtml(Utilities.changeFileExt(n, ""), cm.getTitle(), resource2Ttl(cm), "conceptmap-instance", "Profile", null, ((ResourceDefn) cm.getUserData("resource-definition")).getWg()); } public class ProfileBundleSorter implements Comparator<BundleEntryComponent> { @Override public int compare(BundleEntryComponent o1, BundleEntryComponent o2) { String s1 = typeScore(o1.getResource()); String s2 = typeScore(o2.getResource()); return s1.compareTo(s2); } private String typeScore(Resource r) { if (!(r instanceof StructureDefinition)) return r.fhirType()+"."+r.getId(); StructureDefinition sd = (StructureDefinition) r; String p = sd.getDerivation() == TypeDerivationRule.CONSTRAINT ? "1" : "0"; if (sd.getId().equals("Element")) return "aaStructureDefinition.00."+p+".Element"; if (sd.getId().equals("BackboneElement")) return "aaStructureDefinition.01."+p+".BackboneElement"; if (sd.getId().equals("Resource")) return "aaStructureDefinition.03."+p+".Resource"; if (sd.getId().equals("BackboneElement")) return "aaStructureDefinition.04."+p+".DomainResource"; if (sd.getKind() == StructureDefinitionKind.PRIMITIVETYPE) return "aaStructureDefinition.05."+p+"."+r.getId(); if (sd.getKind() == StructureDefinitionKind.COMPLEXTYPE) return "aaStructureDefinition.06."+p+"."+r.getId(); if (sd.getKind() == StructureDefinitionKind.RESOURCE) return "aaStructureDefinition.07."+p+"."+r.getId(); // (r1.getKind() == StructureDefinitionKind.LOGICAL) return "aaStructureDefinition.08."+p+"."+r.getId(); } } private void produceMap(String name, SectionTracker st, ResourceDefn res) throws Exception { File f = new File(Utilities.path(page.getFolders().rootDir, "implementations", "r3maps", "R4toR3", name+".map")); if (!f.exists()) return; String n = name.toLowerCase(); Map<String, String> values = new HashMap<String, String>(); values.put("conv-status", page.r3r4StatusForResource(name)); String fwds = TextFile.fileToString(Utilities.path(page.getFolders().rootDir, "implementations", "r3maps", "R3toR4", page.r3nameForResource(name)+".map")); String bcks = TextFile.fileToString(Utilities.path(page.getFolders().rootDir, "implementations", "r3maps", "R4toR3", name+".map")); values.put("fwds", Utilities.escapeXml(fwds)); values.put("bcks", Utilities.escapeXml(bcks)); values.put("fwds-status", ""); values.put("bcks-status", ""); values.put("r3errs", Utilities.escapeXml(page.getR3R4ValidationErrors(name))); try { new StructureMapUtilities(page.getWorkerContext()).parse(fwds, page.r3nameForResource(name)+".map"); } catch (FHIRException e) { values.put("fwds-status", "<p style=\"background-color: #ffb3b3; border:1px solid maroon; padding: 5px;\">This script does not compile: "+e.getMessage()+"</p>\r\n"); } try { new StructureMapUtilities(page.getWorkerContext()).parse(bcks, name+".map"); } catch (FHIRException e) { values.put("bcks-status", "<p style=\"background-color: #ffb3b3; border:1px solid maroon; padding: 5px;\">This script does not compile: "+e.getMessage()+"</p>\r\n"); } if (page.getDefinitions().hasResource(name) || (page.getDefinitions().getBaseResources().containsKey(name) && !name.equals("Parameters"))) { String src = TextFile.fileToString(page.getFolders().srcDir + "template-version-maps.html"); TextFile.stringToFile( insertSectionNumbers(page.processResourceIncludes(n, page.getDefinitions().getResourceByName(name), null, null, null, null, null, src, null, null, "res-R3/R4 Conversions", n + "-version-maps.html", null, values, res.getWg(), null), st, n + "-version-maps.html", 0, null), page.getFolders().dstDir + n + "-version-maps.html"); page.getHTMLChecker().registerFile(n + "-version-maps.html", "Version Maps for " + name, HTMLLinkChecker.XHTML_TYPE, true); } } private void produceSpecMap() throws IOException { SpecMapManager spm = new SpecMapManager("hl7.fhir.core", page.getVersion().toCode(), page.getVersion().toCode(), page.getBuildId(), page.getGenDate(), CANONICAL_BASE); for (StructureDefinition sd : page.getWorkerContext().allStructures()) { if (sd.hasUserData("path")) { spm.path(sd.getUrl(), sd.getUserString("path").replace("\\", "/")); spm.target(sd.getUserString("path").replace("\\", "/")); } } for (StructureDefinition sd : page.getWorkerContext().getExtensionDefinitions()) { if (sd.hasUserData("path")) { spm.path(sd.getUrl(), sd.getUserString("path").replace("\\", "/")); spm.target(sd.getUserString("path").replace("\\", "/")); } } for (String s : page.getCodeSystems().keySet()) { CodeSystem cs = page.getCodeSystems().get(s); if (cs == null && !Utilities.existsInList(s, "http://unitsofmeasure.org", "http://hl7.org/fhir/sid/cvx", "http://loinc.org", "http://fdasis.nlm.nih.gov", "http://www.nlm.nih.gov/research/umls/rxnorm", "urn:oid:1.2.36.1.2001.1005.17")) System.out.println("No code system resource found for "+s); } for (CodeSystem cs : page.getCodeSystems().values()) { if (cs != null && cs.hasUserData("path")) { spm.path(cs.getUrl(), cs.getUserString("path").replace("\\", "/")); spm.target(cs.getUserString("path").replace("\\", "/")); } } for (ValueSet vs : page.getValueSets().values()) { if (vs.hasUserData("path")) { spm.path(vs.getUrl(), vs.getUserString("path").replace("\\", "/")); spm.target(vs.getUserString("path").replace("\\", "/")); } } for (ConceptMap cm : page.getConceptMaps().values()) { if (cm.hasUserData("path")) { spm.path(cm.getUrl(), cm.getUserString("path").replace("\\", "/")); spm.target(cm.getUserString("path").replace("\\", "/")); } } for (String s : page.getDefinitions().getPageTitles().keySet()) { spm.page(s, page.getDefinitions().getPageTitles().get(s)); } for (String n : page.getIni().getPropertyNames("pages")) { spm.target(n); } for (ResourceDefn rd : page.getDefinitions().getResources().values()) { spm.target(rd.getName().toLowerCase()+".html"); spm.target(rd.getName().toLowerCase()+"-definitions.html"); spm.target(rd.getName().toLowerCase()+"-mappings.html"); spm.target(rd.getName().toLowerCase()+"-examples.html"); spm.target(rd.getName().toLowerCase()+"-profiles.html"); if (!rd.getOperations().isEmpty()) spm.target(rd.getName().toLowerCase()+"-operations.html"); for (Example ex : rd.getExamples()) { ImplementationGuideDefn ig = ex.getIg() == null ? null : page.getDefinitions().getIgs().get(ex.getIg()); String prefix = (ig == null || ig.isCore()) ? "" : ig.getCode() + "/"; spm.target(prefix+ex.getTitle()+".html"); } } for (Profile p : page.getDefinitions().getPackList()) { spm.target(p.getId()+".html"); } // for (String url : page.getDefinitions().getMapTypes().keySet()) { // spm.map(url, page.getDefinitions().getMapTypes().get(url).getPreamble()); // } scanForImages(spm, page.getFolders().dstDir, page.getFolders().dstDir); scanForPages(spm, page.getFolders().dstDir, page.getFolders().dstDir); for (String url : page.getDefinitions().getRedirectList().keySet()) { spm.target(url.substring(20)); // http://hl7.org/fhir/ = 20 chars } spm.save(page.getFolders().dstDir + "spec.internals"); } private void scanForPages(SpecMapManager spm, String base, String folder) { for (File f : new File(folder).listFiles()) { if (f.isDirectory()) { scanForPages(spm, base, f.getAbsolutePath()); } else if (f.getName().equals("redirect.asp")) { String s = folder.substring(0, folder.length()-1); if (s.length() > base.length()) { s = s.substring(base.length()).replace(File.separator, "/"); if (!Utilities.noString(s)) { spm.target(s); spm.target(s+"/"); } } } else { String ext = f.getName().contains(".") ? f.getName().substring(f.getName().lastIndexOf(".")) : ""; if (Utilities.existsInList(ext, ".html", ".zip", ".jar")) spm.target(f.getAbsolutePath().substring(base.length()).replace(File.separator, "/")); } } } private void scanForImages(SpecMapManager spm, String base, String folder) { for (File f : new File(folder).listFiles()) { if (f.isDirectory()) { scanForImages(spm, base, f.getAbsolutePath()); } else { String ext = f.getName().contains(".") ? f.getName().substring(f.getName().lastIndexOf(".")) : ""; if (Utilities.existsInList(ext, ".png", ".jpg")) spm.image(f.getAbsolutePath().substring(base.length()).replace(File.separator, "/")); } } } private void checkStructureDefinitions(Bundle bnd) { for (BundleEntryComponent e : bnd.getEntry()) { if (e.getResource() instanceof StructureDefinition) { StructureDefinition sd = (StructureDefinition) e.getResource(); checkMetaData(sd); for (ElementDefinition ed : sd.getDifferential().getElement()) checkElement(sd, ed, true); for (ElementDefinition ed : sd.getSnapshot().getElement()) checkElement(sd, ed, false); } } } private void checkElement(StructureDefinition sd, ElementDefinition ed, boolean inDiff) { check(ed.hasPath(), sd, "Element has no path"); Set<String> codes = new HashSet<String>(); for (TypeRefComponent tr : ed.getType()) { String tc = tr.getWorkingCode(); if (codes.contains(tc)) check(false, sd, ed.getPath()+": type '"+tc+"' is duplicated"); if ((!inDiff || tr.hasCode()) && tc != null) if (ed.getPath().contains(".")) check(page.getDefinitions().hasBaseType(tc) || tc.equals("Resource"), sd, ed.getPath()+": type '"+tc+"' is not valid (a)"); else if (sd.hasBaseDefinition()) { if (sd.getDerivation() == TypeDerivationRule.CONSTRAINT) check(page.getDefinitions().hasConcreteResource(tc) || page.getDefinitions().hasBaseType(tc) , sd, ed.getPath()+": type '"+tc+"' is not valid (b)"); else check(page.getDefinitions().hasAbstractResource(tc) || tc.equals("Element"), sd, ed.getPath()+": type '"+tc+"' is not valid (c)"); } if (tr.hasProfile()) { check(tr.getProfile().size() == 1, sd, ed.getPath()+": multiple profiles found: "+tr.getProfile()); String pt = tr.getProfile().get(0).getValue(); if (pt.contains("#")) { String[] parts = pt.split("\\#"); StructureDefinition exd = page.getWorkerContext().fetchResource(StructureDefinition.class, parts[0]); if (exd == null) check(false, sd, ed.getPath()+": profile '"+pt+"' is not valid (definition not found)"); else { ElementDefinition ex = null; for (ElementDefinition et : exd.getSnapshot().getElement()) if (et.hasFixed() && et.getFixed() instanceof UriType && ((UriType)et.getFixed()).asStringValue().equals(parts[1])) ex = et; check(ex != null, sd, ed.getPath()+": profile '"+pt+"' is not valid (inner path not found)"); } } else check((page.getWorkerContext().hasResource(StructureDefinition.class, pt)) || isStringPattern(tail(pt)), sd, ed.getPath()+": profile '"+pt+"' is not valid (d)"); } if (tr.hasTargetProfile()) { String pt = tr.getTargetProfile().get(0).getValue(); if (pt.contains("#")) { String[] parts = pt.split("\\#"); StructureDefinition exd = page.getWorkerContext().fetchResource(StructureDefinition.class, parts[0]); if (exd == null) check(false, sd, ed.getPath()+": target profile '"+pt+"' is not valid (definition not found)"); else { ElementDefinition ex = null; for (ElementDefinition et : exd.getSnapshot().getElement()) if (et.hasFixed() && et.getFixed() instanceof UriType && ((UriType)et.getFixed()).asStringValue().equals(parts[1])) ex = et; check(ex != null, sd, ed.getPath()+": target profile '"+pt+"' is not valid (inner path not found)"); } } else check((page.getWorkerContext().hasResource(StructureDefinition.class, pt)) || isStringPattern(tail(pt)), sd, ed.getPath()+": target profile '"+pt+"' is not valid (d)"); } } } private boolean isStringPattern(String name) { return !page.getDefinitions().getPrimitives().containsKey(name) || !(page.getDefinitions().getPrimitives().get(name) instanceof DefinedStringPattern); } private boolean checkMetaData(StructureDefinition sd) { check(tail(sd.getUrl()).equals(sd.getId()), sd, "id must equal tail of URL"); check(page.getVersion().equals(sd.getFhirVersion()), sd, "FhirVersion is wrong (should be "+page.getVersion()+", is "+sd.getFhirVersion()+")"); switch (sd.getKind()) { case COMPLEXTYPE: return checkDataType(sd); case PRIMITIVETYPE: return checkDataType(sd); case RESOURCE: return checkResource(sd); case LOGICAL: return checkLogical(sd); default: check(false, sd, "Unknown kind"); return false; } } private boolean checkLogical(StructureDefinition sd) { return false; } private boolean checkResource(StructureDefinition sd) { check(!sd.getAbstract() || sd.getName().equals("Resource") || sd.getName().equals("DomainResource"), sd, "Only Resource/DomainResource can be abstract"); check(!sd.hasContext(), sd, "Only extensions can have context (not resources)"); if (sd.getDerivation() == TypeDerivationRule.CONSTRAINT) { check(page.getDefinitions().hasConcreteResource(sd.getType()), sd, "Unknown constrained base resource "+sd.getType()); check(!page.getDefinitions().hasResource(sd.getId()), sd, "Duplicate resource name "+sd.getType()); } else { if (sd.hasBaseDefinition()) check(page.getDefinitions().hasAbstractResource(sd.getBaseDefinition().substring(40)), sd, "Unknown specialised base resource "+sd.getType()); else check(page.getDefinitions().hasAbstractResource(sd.getType()), sd, "Unknown specialised base resource "+sd.getType()); } return false; } private boolean checkDataType(StructureDefinition sd) { check(!sd.getAbstract() || sd.getName().equals("Element") || sd.getName().equals("BackboneElement") , sd, "Only Element/BackboneElement can be abstract"); check(!sd.hasContext() || "Extension".equals(sd.getType()), sd, "Only extensions can have context (base type = "+sd.getType()+")"); if (sd.getDerivation() == TypeDerivationRule.CONSTRAINT) { check(page.getDefinitions().hasType(sd.getType()), sd, "Unknown constrained base type "+sd.getType()); check(page.getDefinitions().hasPrimitiveType(sd.getId()) || !page.getDefinitions().hasBaseType(sd.getId()), sd, "Duplicate type name "+sd.getType()); } else { if (sd.hasBaseDefinition()) check(page.getDefinitions().hasBaseType(sd.getBaseDefinition().substring(40)), sd, "Unknown specialised base type "+sd.getType()); else check(page.getDefinitions().hasBaseType(sd.getType()), sd, "Unknown specialised base type "+sd.getType()); } return false; } private void check(boolean pass, StructureDefinition sd, String msg) { if (!pass) System.out.println("Error in StructureDefinition "+sd.getId()+": "+msg); } private String tail(String url) { return url.substring(url.lastIndexOf("/")+1); } private void produceIgOperations(ImplementationGuideDefn ig, Profile p) throws Exception { throw new Error("not supported anymore"); // String src = TextFile.fileToString(page.getFolders().srcDir + "template-ig-operations.html"); // String n = p.getId(); // WorkGroup wg = null; // TextFile.stringToFile(page.processPageIncludes(ig.getCode()+File.separator+n+"-operations.html", src, "?type", null, "??path", null, null, "Operations", p, ig, null, wg), page.getFolders().dstDir + ig.getCode()+File.separator+n + "-operations.html"); // // insertSectionNumbers(, st, n+"-operations.html", 0, null) // page.getHTMLChecker().registerFile(ig.getCode()+File.separator+n + "-operations.html", "Operations defined by " + p.getTitle(), HTMLLinkChecker.XHTML_TYPE, true); // // for (Operation t : p.getOperations()) { // produceOperation(ig, n+"-"+t.getName(), n+"-"+t.getName().toLowerCase(), null, t, null); // } } /** * This is not true of bundles generally, but it is true of all the * conformance bundles produced by the spec: * * all entries must have a fullUrl, and it must equal http://hl7.org/fhir/[type]/[id] * * @param bnd - the bundle to check */ private void checkBundleURLs(Bundle bnd) { int i = 0; for (BundleEntryComponent e : bnd.getEntry()) { i++; if (!e.getResource().hasUserData("external.url")) { if (!e.hasFullUrl()) page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.INVALID, -1, -1, "Bundle "+bnd.getId(), "no Full URL on entry "+Integer.toString(i),IssueSeverity.ERROR)); else if (!e.getFullUrl().endsWith("/"+e.getResource().getResourceType().toString()+"/"+e.getResource().getId()) && e.getResource().getResourceType() != ResourceType.CodeSystem) page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.INVALID, -1, -1, "Bundle "+bnd.getId(), "URL doesn't match resource and id on entry "+Integer.toString(i)+" : "+e.getFullUrl()+" should end with /"+e.getResource().getResourceType().toString()+"/"+e.getResource().getId(),IssueSeverity.ERROR)); else if (!e.getFullUrl().equals("http://hl7.org/fhir/"+e.getResource().getResourceType().toString()+"/"+e.getResource().getId()) && e.getResource().getResourceType() != ResourceType.CodeSystem) page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.INVALID, -1, -1, "Bundle "+bnd.getId(), "URL is non-FHIR "+Integer.toString(i)+" : "+e.getFullUrl()+" should start with http://hl7.org/fhir/ for HL7-defined artifacts",IssueSeverity.WARNING)); if (e.getResource() instanceof MetadataResource) { MetadataResource m = (MetadataResource) e.getResource(); String url = m.getUrl(); if (url != null && url.startsWith("http://hl7.org/fhir")) { if (!Constants.VERSION.equals(m.getVersion())) page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.INVALID, -1, -1, "Bundle "+bnd.getId(), "definitions in FHIR space should have the correct version (url = "+url+", version = "+m.getVersion()+")", IssueSeverity.ERROR)); } } } } } private void produceComparisons() throws Exception { // for (String n : page.getIni().getPropertyNames("comparisons")) { // produceComparison(n); // } } private void produceComparison(String n) throws Exception { int t = page.getIni().getIntegerProperty(n, "pairs"); ProfileComparer pc = new ProfileComparer(page.getWorkerContext(), page.getFolders().dstDir); pc.setId(n); pc.setTitle(page.getIni().getStringProperty("comparisons", n)); page.log("...Comparison: "+pc.getTitle(), LogMessageType.Process); pc.setLeftLink(page.getIni().getStringProperty(n, "left-link")); pc.setLeftName(page.getIni().getStringProperty(n, "left-name")); pc.setRightLink(page.getIni().getStringProperty(n, "right-link")); pc.setRightName(page.getIni().getStringProperty(n, "right-name")); for (int i = 1; i <= t; i++) { String[] pair = page.getIni().getStringProperty(n, "pair"+Integer.toString(i)).split(","); if (pair.length != 2) throw new Exception("Didn't find a pair for "+n+".pair"+Integer.toString(i)); StructureDefinition sdl = page.getWorkerContext().fetchTypeDefinition(pair[0]); if (sdl == null) throw new Exception("Unable to find structure "+pair[0]); StructureDefinition sdr = page.getWorkerContext().fetchTypeDefinition(pair[1]); if (sdr == null) throw new Exception("Unable to find structure "+pair[1]); pc.compareProfiles(sdl, sdr); } // assign file namea and paths to all the structures int i = 0; for (ProfileComparison cmp : pc.getComparisons()) { i++; if (cmp.getSubset() != null) { cmp.getSubset().setUserData("filename", n+".intersection."+Integer.toString(i)+".xml"); cmp.getSubset().setUserData("filename", n+".intersection."+Integer.toString(i)+".html"); } if (cmp.getSuperset() != null) { cmp.getSuperset().setUserData("filename", n+".intersection."+Integer.toString(i)+".xml"); cmp.getSuperset().setUserData("filename", n+".intersection."+Integer.toString(i)+".html"); } } // ok, all compared; now produce the output // first page we produce is simply the index page.log(" ... generate", LogMessageType.Process); String src = TextFile.fileToString(page.getFolders().srcDir + "template-comparison-set.html"); src = page.processPageIncludes(n+".html", src, "?type", null, "??path", null, null, "Comparison", pc, null, null, page.getDefinitions().getWorkgroups().get("fhir")); TextFile.stringToFile(src, Utilities.path(page.getFolders().dstDir, n+".html")); cachePage(n + ".html", src, "Comparison "+pc.getTitle(), false); // then we produce a comparison page for each pair for (ProfileComparison cmp : pc.getComparisons()) { src = TextFile.fileToString(page.getFolders().srcDir + "template-comparison.html"); src = page.processPageIncludes(n+"."+cmp.getId()+".html", src, "?type", null, "??path", null, null, "Comparison", cmp, null, null, page.getDefinitions().getWorkgroups().get("fhir")); TextFile.stringToFile(src, Utilities.path(page.getFolders().dstDir, n+"."+cmp.getId()+".html")); cachePage(n +"."+cmp.getId()+".html", src, "Comparison "+pc.getTitle(), false); } // and also individual pages for each pair outcome // then we produce value set pages for each value set } private void minify(String srcFile, String dstFile) throws Exception { CloseProtectedZipInputStream source = new CloseProtectedZipInputStream(new FileInputStream(srcFile)); ZipGenerator dest = new ZipGenerator(dstFile); ZipEntry entry = null; while ((entry = source.getNextEntry()) != null) { String name = entry.getName(); if (name.endsWith(".xsd")) dest.addStream(entry.getName(), stripXsd(source), false); else if (name.endsWith(".json") && !name.endsWith(".schema.json")) dest.addStream(entry.getName(), stripJson(source), false); else if (name.endsWith(".xml")) dest.addStream(entry.getName(), stripXml(source), false); else dest.addStream(entry.getName(), source, false); } source.actualClose(); dest.close(); } private InputStream stripJson(InputStream source) throws Exception { JsonParser p = new JsonParser(); Resource r = p.parse(source); minify(r); ByteArrayOutputStream bo = new ByteArrayOutputStream(); p.compose(bo, r); bo.close(); return new ByteArrayInputStream(bo.toByteArray()); } private InputStream stripXml(InputStream source) throws Exception { XmlParser p = new XmlParser(); Resource r = p.parse(source); minify(r); ByteArrayOutputStream bo = new ByteArrayOutputStream(); p.compose(bo, r); bo.close(); return new ByteArrayInputStream(bo.toByteArray()); } private void minify(Resource r) { if (r == null) return; if (r instanceof DomainResource) dropNarrative((DomainResource) r); if (r instanceof StructureDefinition) minifyProfile((StructureDefinition) r); if (r instanceof ValueSet) minifyValueSet((ValueSet) r); if (r instanceof CodeSystem) minifyCodeSystem((CodeSystem) r); if (r instanceof Bundle) minifyBundle((Bundle) r); } private void dropNarrative(DomainResource r) { if (r.hasText() && r.getText().hasDiv()) { r.getText().getDiv().getChildNodes().clear(); r.getText().getDiv().addText("Narrative removed to reduce size"); } } private void minifyBundle(Bundle b) { for (BundleEntryComponent e : b.getEntry()) minify(e.getResource()); } private void minifyProfile(StructureDefinition p) { p.getContact().clear(); p.setDescriptionElement(null); p.getKeyword().clear(); p.setPurposeElement(null); p.getMapping().clear(); p.setDifferential(null); for (ElementDefinition ed : p.getSnapshot().getElement()) { ed.setShortElement(null); ed.setDefinitionElement(null); ed.setCommentElement(null); ed.setRequirementsElement(null); ed.getAlias().clear(); ed.setMeaningWhenMissingElement(null); ed.getMapping().clear(); } } private void minifyValueSet(ValueSet vs) { vs.getContact().clear(); vs.setDescriptionElement(null); vs.setCopyrightElement(null); } private void minifyCodeSystem(CodeSystem cs) { cs.getContact().clear(); cs.setDescriptionElement(null); cs.setCopyrightElement(null); stripDefinition(cs.getConcept()); } private void stripDefinition(List<ConceptDefinitionComponent> concept) { for (ConceptDefinitionComponent c : concept) { c.setDefinitionElement(null); if (c.hasConcept()) stripDefinition(c.getConcept()); } } private InputStream stripXsd(InputStream source) throws Exception { byte[] src = IOUtils.toByteArray(source); try { byte[] xslt = IOUtils.toByteArray( new FileInputStream(Utilities.path(page.getFolders().rootDir, "implementations", "xmltools", "AnnotationStripper.xslt"))); String scrs = new String(src); String xslts = new String(xslt); return new ByteArrayInputStream(XsltUtilities.transform(new HashMap<String, byte[]>(), src, xslt)); } catch (Exception e) { if (web) { e.printStackTrace(); throw e; } else return new ByteArrayInputStream(src); } // DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); // factory.setNamespaceAware(false); // DocumentBuilder builder = factory.newDocumentBuilder(); // Document doc = builder.parse(source); // stripElement(doc.getDocumentElement(), "annotation"); // TransformerFactory transformerFactory = TransformerFactory.newInstance(); // Transformer transformer = transformerFactory.newTransformer(); // ByteArrayOutputStream bo = new ByteArrayOutputStream(); // DOMSource src = new DOMSource(doc);erent // StreamResult streamResult = new StreamResult(bo); // transformer.transform(src, streamResult); // bo.close(); // return new ByteArrayInputStream(bo.toByteArray()); } private Document loadDom(InputStream src, boolean namespaces) throws Exception { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(namespaces); DocumentBuilder builder = factory.newDocumentBuilder(); Document doc = builder.parse(src); return doc; } private void stripElement(Element element, String name) { Node child = element.getFirstChild(); while (child != null) { Node next = child.getNextSibling(); if (child.getNodeName().endsWith(name)) element.removeChild(child); else if (child.getNodeType() == Node.ELEMENT_NODE) stripElement((Element) child, name); child = next; } } private void addOtherProfiles(Bundle bundle, Profile cp) { for (ConstraintStructure p : cp.getProfiles()) bundle.addEntry().setResource(p.getResource()).setFullUrl("http://hl7.org/fhir/"+p.getResource().fhirType()+"/"+p.getResource().getId()); } private void addOtherProfiles(Bundle bundle, ResourceDefn rd) { for (Profile cp : rd.getConformancePackages()) addOtherProfiles(bundle, cp); } private void addSearchParams(Set<String> uris, Bundle bundle, ResourceDefn rd) throws Exception { if (rd.getConformancePack() == null) { for (SearchParameterDefn spd : rd.getSearchParams().values()) { if (spd.getResource() == null) { buildSearchDefinition(rd, spd); } SearchParameter sp = spd.getResource(); if (!uris.contains(sp.getUrl())) { bundle.addEntry().setResource(sp).setFullUrl("http://hl7.org/fhir/"+sp.fhirType()+"/"+sp.getId()); uris.add(sp.getUrl()); } } } else addSearchParams(uris, bundle, rd.getConformancePack()); } private void buildSearchDefinition(ResourceDefn rd, SearchParameterDefn spd) throws Exception { StructureDefinition p = new StructureDefinition(); p.setFhirVersion(page.getVersion()); p.setKind(StructureDefinitionKind.RESOURCE); p.setAbstract(true); p.setPublisher("Health Level Seven International (" + rd.getWg() + ")"); p.setName(rd.getName()); p.setVersion(Constants.VERSION); p.setType(rd.getName()); p.addContact().addTelecom().setSystem(ContactPointSystem.URL).setValue("http://hl7.org/fhir"); SearchParameter sp = new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir).makeSearchParam(p, rd.getName()+"-"+spd.getCode(), rd.getName(), spd, rd); spd.setResource(sp); } private void addSearchParams(Set<String> uris, Bundle bundle, Profile conformancePack) { for (SearchParameter sp : conformancePack.getSearchParameters()) { if (!uris.contains(sp.getUrl())) { bundle.addEntry().setResource(sp).setFullUrl("http://hl7.org/fhir/"+sp.fhirType()+"/"+sp.getId()); uris.add(sp.getUrl()); } } } Set<StructureDefinition> ped = new HashSet<StructureDefinition>(); private void produceExtensionDefinition(StructureDefinition ed) throws FileNotFoundException, Exception { if (!ped.contains(ed)) { ped.add(ed); ImplementationGuideDefn ig = page.getDefinitions().getIgs().get(ed.getUserString(ToolResourceUtilities.NAME_RES_IG)); String prefix = ig.isCore() ? "" : ig.getCode()+File.separator; String filename = ed.getUserString("filename"); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + prefix+filename+".xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, ed); s.close(); s = new FileOutputStream(page.getFolders().dstDir + prefix+filename+".canonical.xml"); new XmlParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, ed); s.close(); s = new FileOutputStream(page.getFolders().dstDir + prefix+filename+".json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, ed); s.close(); s = new FileOutputStream(page.getFolders().dstDir + prefix+filename+".canonical.json"); new JsonParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, ed); s.close(); cloneToXhtml(prefix+filename, ed.getName(), false, "summary-instance", ed.getUrl(), null, wg(ed)); jsonToXhtml(prefix+filename, ed.getName(), resource2Json(ed), "extension", ed.getUrl(), null, wg(ed)); ttlToXhtml(prefix+filename, ed.getName(), resource2Ttl(ed), "extension", ed.getUrl(), null, wg(ed)); ByteArrayOutputStream bytes = new ByteArrayOutputStream(); XmlSpecGenerator gen = new XmlSpecGenerator(bytes, filename+"-definitions.html", null /*"http://hl7.org/fhir/"*/, page, page.genlevel(ig.isCore() ? 0 : 1)); gen.generateExtension(ed); gen.close(); String xml = bytes.toString(); bytes = new ByteArrayOutputStream(); JsonSpecGenerator genj = new JsonSpecGenerator(bytes, filename+"-definitions.html", null /*"http://hl7.org/fhir/"*/, page, page.genlevel(ig.isCore() ? 0 : 1)); genj.generateExtension(ed); genj.close(); String json = bytes.toString(); bytes = new ByteArrayOutputStream(); TurtleSpecGenerator gent = new TurtleSpecGenerator(bytes, filename+"-definitions.html", null /*"http://hl7.org/fhir/"*/, page, page.genlevel(ig.isCore() ? 0 : 1)); gent.generateExtension(ed); gent.close(); String ttl = bytes.toString(); bytes = new ByteArrayOutputStream(); TerminologyNotesGenerator tgen = new TerminologyNotesGenerator(bytes, page); tgen.generateExtension("", ed); tgen.close(); String tx = bytes.toString(); String src = TextFile.fileToString(page.getFolders().srcDir + "template-extension-mappings.html"); src = page.processExtensionIncludes(filename, ed, xml, json, ttl, tx, src, filename + ".html", ig); page.getHTMLChecker().registerFile(prefix+filename + "-mappings.html", "Mappings for Extension " + ed.getName(), HTMLLinkChecker.XHTML_TYPE, true); TextFile.stringToFile(src, page.getFolders().dstDir + prefix+filename + "-mappings.html"); src = TextFile.fileToString(page.getFolders().srcDir + "template-extension-definitions.html"); src = page.processExtensionIncludes(filename, ed, xml, json, ttl, tx, src, filename + ".html", ig); page.getHTMLChecker().registerFile(prefix+filename + "-definitions.html", "Definitions for Extension " + ed.getName(), HTMLLinkChecker.XHTML_TYPE, true); TextFile.stringToFile(src, page.getFolders().dstDir + prefix+filename + "-definitions.html"); src = TextFile.fileToString(page.getFolders().srcDir + "template-extension.html"); src = page.processExtensionIncludes(filename, ed, xml, json, ttl, tx, src, filename + ".html", ig); page.getHTMLChecker().registerFile(prefix+filename + ".html", "Extension " + ed.getName(), HTMLLinkChecker.XHTML_TYPE, true); TextFile.stringToFile(src, page.getFolders().dstDir + prefix+filename + ".html"); } } private WorkGroup wg(StructureDefinition ed) { return page.getDefinitions().getWorkgroups().get(ToolingExtensions.readStringExtension(ed, ToolingExtensions.EXT_WORKGROUP)); } private void copyStaticContent() throws IOException, Exception { if (page.getIni().getPropertyNames("support") != null) for (String n : page.getIni().getPropertyNames("support")) { Utilities.copyFile(new CSFile(page.getFolders().srcDir + n), new CSFile(page.getFolders().dstDir + n)); page.getHTMLChecker().registerFile(n, "Support File", HTMLLinkChecker.determineType(n), true); } for (String n : page.getIni().getPropertyNames("images")) { copyImage(page.getFolders().imgDir, n); } for (ImplementationGuideDefn ig : page.getDefinitions().getSortedIgs()) { for (String n : ig.getImageList()) { copyIgImage(ig, n); } } for (String n : page.getIni().getPropertyNames("files")) { Utilities.copyFile(new CSFile(page.getFolders().rootDir + n), new CSFile(page.getFolders().dstDir + page.getIni().getStringProperty("files", n))); page.getHTMLChecker().registerFile(page.getIni().getStringProperty("files", n), "Support File", HTMLLinkChecker.determineType(page.getIni().getStringProperty("files", n)), true); } page.log("Copy HTML templates", LogMessageType.Process); Utilities.copyDirectory(page.getFolders().rootDir + page.getIni().getStringProperty("html", "source"), page.getFolders().dstDir, page.getHTMLChecker()); TextFile.stringToFile("\r\n[FHIR]\r\nFhirVersion=" + page.getVersion() + "-" + page.getBuildId() + "\r\nversion=" + page.getVersion().toCode() + "\r\nbuildId=" + page.getBuildId() + "\r\ndate=" + new SimpleDateFormat("yyyyMMddHHmmss").format(page.getGenDate().getTime()), Utilities.path(page.getFolders().dstDir, "version.info"), false); for (String n : page.getDefinitions().getDiagrams().keySet()) { page.log(" ...diagram " + n, LogMessageType.Process); page.getSvgs().put(n, TextFile.fileToString(page.getFolders().srcDir + page.getDefinitions().getDiagrams().get(n))); } } private void copyImage(String folder, String n) throws IOException { if (n.contains("*")) { final String filter = n.replace("?", ".?").replace("*", ".*?"); File[] files = new File(folder).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.matches(filter); } }); for (File f : files) { Utilities.copyFile(f, new CSFile(page.getFolders().dstDir + f.getName())); page.getHTMLChecker().registerFile(f.getName(), "Support File", HTMLLinkChecker.determineType(n), true); } } else { Utilities.copyFile(new CSFile(Utilities.path(folder, n)), new CSFile(page.getFolders().dstDir + (n.contains("/") ? n.substring(n.lastIndexOf("/")+1): n))); page.getHTMLChecker().registerFile(n, "Support File", HTMLLinkChecker.determineType(n), true); } } private void copyIgImage(ImplementationGuideDefn ig, String path) throws IOException { File file = new File(Utilities.path(page.getFolders().rootDir, ig.getSource(), "..", path)); String prefix = ig.isCore() ? "" : ig.getCode()+File.separator; if (path.contains("*")) { final String filter = file.getName().replace("?", ".?").replace("*", ".*?"); File[] files = new File(file.getParent()).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.matches(filter); } }); for (File f : files) { Utilities.copyFile(f, new CSFile(Utilities.path(page.getFolders().dstDir, prefix+f.getName()))); page.getHTMLChecker().registerFile(prefix+f.getName(), "Support File", HTMLLinkChecker.determineType(f.getName()), true); } } else { Utilities.copyFile(file, new CSFile(Utilities.path(page.getFolders().dstDir, prefix+file.getName()))); page.getHTMLChecker().registerFile(prefix+file.getName(), "Support File", HTMLLinkChecker.determineType(file.getName()), true); } } /** this is only used when generating xhtml of json **/ private String resource2Json(Bundle profileFeed2) throws Exception { ByteArrayOutputStream bytes = new ByteArrayOutputStream(); IParser json = new JsonParser().setOutputStyle(OutputStyle.PRETTY); json.setSuppressXhtml("Snipped for Brevity"); json.compose(bytes, page.getResourceBundle()); bytes.close(); return new String(bytes.toByteArray()); } private String resource2Json(Resource r) throws Exception { ByteArrayOutputStream bytes = new ByteArrayOutputStream(); IParser json = new JsonParser().setOutputStyle(OutputStyle.PRETTY); json.setSuppressXhtml("Snipped for Brevity"); json.compose(bytes, r); bytes.close(); return new String(bytes.toByteArray()); } private String resource2Ttl(Resource r) throws Exception { ByteArrayOutputStream bytes = new ByteArrayOutputStream(); IParser rdf = new RdfParser().setOutputStyle(OutputStyle.PRETTY); rdf.setSuppressXhtml("Snipped for Brevity"); rdf.compose(bytes, r); bytes.close(); return new String(bytes.toByteArray()); } private void produceQA() throws Exception { page.getQa().countDefinitions(page.getDefinitions()); String src = TextFile.fileToString(page.getFolders().srcDir + "qa.html"); TextFile.stringToFile(page.processPageIncludes("qa.html", src, "page", null, null, null, "QA Page", null, null, page.getDefinitions().getWorkgroups().get("fhir")), page.getFolders().dstDir + "qa.html"); if (web) { page.getQa().commit(page.getFolders().rootDir); } } private void produceV3() throws Exception { page.log(" ...v3 Code Systems", LogMessageType.Process); Utilities.createDirectory(page.getFolders().dstDir + "v3"); Utilities.clearDirectory(page.getFolders().dstDir + "v3"); String src = TextFile.fileToString(page.getFolders().srcDir + "v3" + File.separator + "template.html"); TextFile.stringToFile( addSectionNumbers("terminologies-v3.html", "terminologies-v3", page.processPageIncludes("terminologies-v3.html", src, "page", null, null, null, "V3 Terminologies", null, null, wg("vocab")), null, 0, null, null), page.getFolders().dstDir + "terminologies-v3.html"); src = TextFile.fileToString(page.getFolders().srcDir + "v3" + File.separator + "template.html"); cachePage("terminologies-v3.html", page.processPageIncludesForBook("terminologies-v3.html", src, "page", null, null, wg("vocab")), "V3 Terminologes", false); new ValueSetImporterV3(page, page.getValidationErrors()).produce(this); } private void produceV2() throws Exception { page.log(" ...v2 Tables", LogMessageType.Process); Utilities.createDirectory(page.getFolders().dstDir + "v2"); Utilities.clearDirectory(page.getFolders().dstDir + "v2"); String src = TextFile.fileToString(page.getFolders().srcDir + "v2" + File.separator + "template.html"); TextFile.stringToFile( addSectionNumbers("terminologies-v2.html", "terminologies-v2", page.processPageIncludes("terminologies-v2.html", src, "v2Vocab", null, null, null, "V2 Tables", null, null, page.getDefinitions().getWorkgroups().get("vocab")), null, 0, null, null), page.getFolders().dstDir + "terminologies-v2.html"); src = TextFile.fileToString(page.getFolders().srcDir + "v2" + File.separator + "template.html"); cachePage("terminologies-v2.html", page.processPageIncludesForBook("v2/template.html", src, "v2Vocab", null, null, wg("vocab")), "V2 Terminologies", false); new ValueSetImporterV2(page, page.getValidationErrors()).produce(this); } private void produceBaseProfile() throws Exception { for (DefinedCode pt : page.getDefinitions().getPrimitives().values()) producePrimitiveTypeProfile(pt); produceXhtmlProfile(); for (TypeDefn e : page.getDefinitions().getTypes().values()) produceTypeProfile(e); for (TypeDefn e : page.getDefinitions().getInfrastructure().values()) produceTypeProfile(e); for (TypeDefn e : page.getDefinitions().getStructures().values()) produceTypeProfile(e); for (ProfiledType c : page.getDefinitions().getConstraints().values()) produceProfiledTypeProfile(c); } private void produceProfiledTypeProfile(ProfiledType pt) throws Exception { String fn = pt.getName().toLowerCase() + ".profile.xml"; StructureDefinition rp = pt.getProfile(); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + fn); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, rp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(fn, ".canonical.xml")); new XmlParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, rp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(fn, ".json")); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, rp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(fn, ".canonical.json")); new JsonParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, rp); s.close(); Utilities.copyFile(new CSFile(page.getFolders().dstDir + fn), new CSFile(Utilities.path(page.getFolders().dstDir, "examples", fn))); addToResourceFeed(rp, page.getTypeBundle(), (fn)); cloneToXhtml(pt.getName().toLowerCase() + ".profile", "StructureDefinition for " + pt.getName(), false, "profile-instance:type:" + pt.getName(), "Type", null, wg("mnm")); jsonToXhtml(pt.getName().toLowerCase() + ".profile", "StructureDefinition for " + pt.getName(), resource2Json(rp), "profile-instance:type:" + pt.getName(), "Type", null, wg("mnm")); ttlToXhtml(pt.getName().toLowerCase() + ".profile", "StructureDefinition for " + pt.getName(), resource2Ttl(rp), "profile-instance:type:" + pt.getName(), "Type", null, wg("mnm")); String shex = new ShExGenerator(page.getWorkerContext()).generate(HTMLLinkPolicy.NONE, rp); TextFile.stringToFile(shex, Utilities.changeFileExt(page.getFolders().dstDir + fn, ".shex")); shexToXhtml(pt.getName().toLowerCase(), "ShEx statement for " + pt.getName(), shex, "profile-instance:type:" + pt.getName(), "Type", null, wg("mnm")); } private void produceXhtmlProfile() throws Exception { String fn = "xhtml.profile.xml"; StructureDefinition rp = page.getProfiles().get("xhtml"); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + fn); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, rp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(fn, ".canonical.xml")); new XmlParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, rp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(fn, ".json")); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, rp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(fn, ".canonical.json")); new JsonParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, rp); s.close(); String shex = new ShExGenerator(page.getWorkerContext()).generate(HTMLLinkPolicy.NONE, rp); TextFile.stringToFile(shex, Utilities.changeFileExt(page.getFolders().dstDir + fn, ".shex")); Utilities.copyFile(new CSFile(page.getFolders().dstDir + fn), new CSFile(Utilities.path(page.getFolders().dstDir, "examples", fn))); addToResourceFeed(rp, page.getTypeBundle(), (fn)); // saveAsPureHtml(rp, new FileOutputStream(page.getFolders().dstDir+ "html" // + File.separator + "datatypes.html")); cloneToXhtml("xhtml.profile", "StructureDefinition for xhtml", false, "profile-instance:type:xhtml", "Type", null, wg("mnm")); jsonToXhtml("xhtml.profile", "StructureDefinition for xhtml", resource2Json(rp), "profile-instance:type:xhtml", "Type", null, wg("mnm")); ttlToXhtml("xhtml.profile", "StructureDefinition for xhtml", resource2Ttl(rp), "profile-instance:type:xhtml", "Type", null, wg("mnm")); shexToXhtml("xhtml", "ShEx statement for xhtml", shex, "profile-instance:type:xhtml", "Type", null, wg("mnm")); } private void producePrimitiveTypeProfile(DefinedCode type) throws Exception { String fn = type.getCode().toLowerCase() + ".profile.xml"; StructureDefinition rp = type.getProfile(); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + fn); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, rp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(fn, ".canonical.xml")); new XmlParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, rp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(fn, ".json")); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, rp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(fn, ".canonical.json")); new JsonParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, rp); s.close(); String shex = new ShExGenerator(page.getWorkerContext()).generate(HTMLLinkPolicy.NONE, rp); TextFile.stringToFile(shex, Utilities.changeFileExt(page.getFolders().dstDir + fn, ".shex")); Utilities.copyFile(new CSFile(page.getFolders().dstDir + fn), new CSFile(Utilities.path(page.getFolders().dstDir, "examples", fn))); addToResourceFeed(rp, page.getTypeBundle(), (fn)); // saveAsPureHtml(rp, new FileOutputStream(page.getFolders().dstDir+ "html" // + File.separator + "datatypes.html")); cloneToXhtml(type.getCode().toLowerCase() + ".profile", "StructureDefinition for " + type.getCode(), false, "profile-instance:type:" + type.getCode(), "Type", null, wg("mnm")); jsonToXhtml(type.getCode().toLowerCase() + ".profile", "StructureDefinition for " + type.getCode(), resource2Json(rp), "profile-instance:type:" + type.getCode(), "Type", null, wg("mnm")); ttlToXhtml(type.getCode().toLowerCase() + ".profile", "StructureDefinition for " + type.getCode(), resource2Ttl(rp), "profile-instance:type:" + type.getCode(), "Type", null, wg("mnm")); shexToXhtml(type.getCode().toLowerCase(), "ShEx statement for " + type.getCode(), shex, "profile-instance:type:" + type.getCode(), "Type", null, wg("mnm")); } private void produceTypeProfile(TypeDefn type) throws Exception { // ProfileDefn p = new ProfileDefn(); // p.putMetadata("id", type.getName()); // p.putMetadata("name", "Basic StructureDefinition for " + type.getName()); // p.putMetadata("author.name", "FHIR Specification"); // p.putMetadata("author.ref", "http://hl7.org/fhir"); // p.putMetadata("description", "Basic StructureDefinition for " + type.getName() + " for validation support"); // p.putMetadata("status", "draft"); // p.putMetadata("date", new SimpleDateFormat("yyyy-MM-dd", new Locale("en", "US")).format(new Date())); // p.getElements().add(type); // ProfileGenerator pgen = new ProfileGenerator(page.getDefinitions()); // String fn = "type-" + type.getName() + ".profile.xml"; // StructureDefinition rp = pgen.generate(p, "type-" + type.getName() + ".profile", "<div>Type definition for " + type.getName() + " from <a href=\"http://hl7.org/fhir/datatypes.html#" + type.getName() // + "\">FHIR Specification</a></div>"); String fn = type.getName().toLowerCase() + ".profile.xml"; StructureDefinition rp = type.getProfile(); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + fn); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, rp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(fn, ".canonical.xml")); new XmlParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, rp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(fn, ".json")); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, rp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(fn, ".canonical.json")); new JsonParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, rp); s.close(); Utilities.copyFile(new CSFile(page.getFolders().dstDir + fn), new CSFile(Utilities.path(page.getFolders().dstDir, "examples", fn))); addToResourceFeed(rp, page.getTypeBundle(), fn); // saveAsPureHtml(rp, new FileOutputStream(page.getFolders().dstDir+ "html" // + File.separator + "datatypes.html")); cloneToXhtml(type.getName().toLowerCase() + ".profile", "StructureDefinition for " + type.getName(), false, "profile-instance:type:" + type.getName(), "Type", null, wg("mnm")); jsonToXhtml(type.getName().toLowerCase() + ".profile", "StructureDefinition for " + type.getName(), resource2Json(rp), "profile-instance:type:" + type.getName(), "Type", null, wg("mnm")); ttlToXhtml(type.getName().toLowerCase() + ".profile", "StructureDefinition for " + type.getName(), resource2Ttl(rp), "profile-instance:type:" + type.getName(), "Type", null, wg("mnm")); String shex = new ShExGenerator(page.getWorkerContext()).generate(HTMLLinkPolicy.NONE, rp); TextFile.stringToFile(shex, Utilities.changeFileExt(page.getFolders().dstDir + fn, ".shex")); shexToXhtml(type.getName().toLowerCase(), "ShEx statement for " + type.getName(), shex, "profile-instance:type:" + type.getName(), "Type", null, wg("mnm")); } protected XmlPullParser loadXml(InputStream stream) throws Exception { BufferedInputStream input = new BufferedInputStream(stream); XmlPullParserFactory factory = XmlPullParserFactory.newInstance(System.getProperty(XmlPullParserFactory.PROPERTY_NAME), null); factory.setNamespaceAware(true); XmlPullParser xpp = factory.newPullParser(); xpp.setInput(input, "UTF-8"); xpp.next(); return xpp; } protected int nextNoWhitespace(XmlPullParser xpp) throws Exception { int eventType = xpp.getEventType(); while (eventType == XmlPullParser.TEXT && xpp.isWhitespace()) eventType = xpp.next(); return eventType; } private void checkFragments() throws Exception { for (Fragment f : fragments) { try { // System.out.println(" "+f.page+"/"+f.id); String xml = f.getXml(); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); DocumentBuilder builder = factory.newDocumentBuilder(); InputSource is = new InputSource(new StringReader(xml)); Document doc = builder.parse(is); org.w3c.dom.Element base = doc.getDocumentElement(); String type = base.getAttribute("fragment"); if (!page.getDefinitions().hasPrimitiveType(type)) { if (f.isJson()) { org.hl7.fhir.r5.elementmodel.JsonParser p = new org.hl7.fhir.r5.elementmodel.JsonParser(page.getWorkerContext()); p.setupValidation(ValidationPolicy.QUICK, null); p.parse(base.getTextContent(), type); } else { org.hl7.fhir.r5.elementmodel.XmlParser p = new org.hl7.fhir.r5.elementmodel.XmlParser(page.getWorkerContext()); p.setupValidation(ValidationPolicy.QUICK, null); p.parse(XMLUtil.getFirstChild(base), type); } } } catch (Exception e) { page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.STRUCTURE, f.getPage(), "Fragment Error in page " + f.getPage() +(f.id != null ? "#"+f.id : "") + ": " + e.getMessage(), IssueSeverity.ERROR)); } } } private void produceSchemaZip() throws Exception { char sc = File.separatorChar; File f = new CSFile(page.getFolders().dstDir + "fhir-all-xsd.zip"); if (f.exists()) f.delete(); ZipGenerator zip = new ZipGenerator(page.getFolders().tmpResDir + "fhir-all-xsd.zip"); zip.addFiles(page.getFolders().dstDir, "", ".xsd", null); zip.addFiles(page.getFolders().dstDir, "", ".sch", null); zip.addFiles(page.getFolders().rootDir + "tools" + sc + "schematron" + sc, "", ".xsl", ""); zip.close(); Utilities.copyFile(new CSFile(page.getFolders().tmpResDir + "fhir-all-xsd.zip"), f); f = new CSFile(page.getFolders().dstDir + "fhir-codegen-xsd.zip"); if (f.exists()) f.delete(); zip = new ZipGenerator(page.getFolders().tmpResDir + "fhir-codegen-xsd.zip"); zip.addFiles(page.getFolders().xsdDir+"codegen"+File.separator, "", ".xsd", null); zip.close(); Utilities.copyFile(new CSFile(page.getFolders().tmpResDir + "fhir-codegen-xsd.zip"), f); f = new CSFile(page.getFolders().dstDir + "fhir.schema.json.zip"); if (f.exists()) f.delete(); zip = new ZipGenerator(page.getFolders().tmpResDir + "fhir.schema.json.zip"); zip.addFiles(page.getFolders().dstDir, "", ".schema.json", null); zip.close(); Utilities.copyFile(new CSFile(page.getFolders().tmpResDir + "fhir.schema.json.zip"), f); f = new CSFile(page.getFolders().dstDir + "fhir.schema.graphql.zip"); if (f.exists()) f.delete(); zip = new ZipGenerator(page.getFolders().tmpResDir + "fhir.schema.graphql.zip"); zip.addFiles(page.getFolders().dstDir, "", ".graphql", null); zip.close(); Utilities.copyFile(new CSFile(page.getFolders().tmpResDir + "fhir.schema.graphql.zip"), f); zip = new ZipGenerator(page.getFolders().dstDir + "fhir.schema.shex.zip"); zip.addFileName("fhir.shex", Utilities.path(page.getFolders().dstDir, "fhir.shex"), false); zip.close(); } private void produceResource1(ResourceDefn resource, boolean isAbstract) throws Exception { String n = resource.getName().toLowerCase(); ByteArrayOutputStream bs = new ByteArrayOutputStream(); XmlSpecGenerator gen = new XmlSpecGenerator(bs, n + "-definitions.html", null, page, ""); gen.generate(resource.getRoot(), isAbstract, true); gen.close(); String xml = new String(bs.toByteArray()); bs = new ByteArrayOutputStream(); JsonSpecGenerator genJ = new JsonSpecGenerator(bs, n + "-definitions.html", null, page, ""); genJ.generate(resource.getRoot(), true, isAbstract); genJ.close(); String json = new String(bs.toByteArray()); bs = new ByteArrayOutputStream(); TurtleSpecGenerator gent = new TurtleSpecGenerator(bs, n + "-definitions.html", null, page, ""); gent.generate(resource.getRoot(), isAbstract); gent.close(); String ttl = new String(bs.toByteArray()); xmls.put(n, xml); jsons.put(n, json); ttls.put(n, ttl); generateProfile(resource, n, xml, json, ttl, false); } private void produceResource2(ResourceDefn resource, boolean isAbstract, String extraTypeForDefn, boolean logicalOnly) throws Exception { File tmp = Utilities.createTempFile("tmp", ".tmp"); String n = resource.getName().toLowerCase(); String xml = xmls.get(n); String json = jsons.get(n); String ttl = ttls.get(n); TerminologyNotesGenerator tgen = new TerminologyNotesGenerator(new FileOutputStream(tmp), page); tgen.generate("", resource.getRoot()); tgen.close(); String tx = TextFile.fileToString(tmp.getAbsolutePath()); DictHTMLGenerator dgen = new DictHTMLGenerator(new FileOutputStream(tmp), page, ""); dgen.generate(resource.getRoot()); dgen.close(); String dict = TextFile.fileToString(tmp.getAbsolutePath()); if (extraTypeForDefn != null) { dgen = new DictHTMLGenerator(new FileOutputStream(tmp), page, ""); dgen.generate(page.getDefinitions().getElementDefn(extraTypeForDefn)); dgen.close(); dict = dict +"\r\n"+ TextFile.fileToString(tmp.getAbsolutePath()); } Map<String, String> values = new HashMap<String, String>(); MappingsGenerator mgen = new MappingsGenerator(page.getDefinitions()); mgen.generate(resource); String mappings = mgen.getMappings(); String mappingsList = mgen.getMappingsList(); if (!logicalOnly) { SvgGenerator svg = new SvgGenerator(page, "", resource.getLayout(), true, false); svg.generate(resource, page.getFolders().dstDir + n + ".svg", "1"); svg.generate(resource, Utilities.path(page.getFolders().srcDir, n, n + ".gen.svg"), "1"); String prefix = page.getBreadCrumbManager().getIndexPrefixForReference(resource.getName()); SectionTracker st = new SectionTracker(prefix, false); st.start(""); page.getSectionTrackerCache().put(n, st); String template = isAbstract ? "template-abstract" : "template"; String src = TextFile.fileToString(page.getFolders().srcDir + template+".html"); src = insertSectionNumbers(page.processResourceIncludes(n, resource, xml, json, ttl, tx, dict, src, mappings, mappingsList, "resource", n + ".html", null, values, resource.getWg(), null), st, n + ".html", 0, null); TextFile.stringToFile(src, page.getFolders().dstDir + n + ".html"); scanForFragments(n + ".html", new XhtmlParser().parseFragment(src)); page.getHTMLChecker().registerFile(n + ".html", "Base Page for " + resource.getName(), HTMLLinkChecker.XHTML_TYPE, true); StructureDefinition profile = (StructureDefinition) ResourceUtilities.getById(page.getResourceBundle(), ResourceType.StructureDefinition, resource.getName()); String pages = page.getIni().getStringProperty("resource-pages", n); if (!Utilities.noString(pages)) { for (String p : pages.split(",")) { producePage(p, n); } } try { if (!isAbstract) processQuestionnaire(resource, profile, st, true, "", null); } catch (Exception e) { // e.printStackTrace(); page.log("Questionnaire Generation Failed: "+e.getMessage(), LogMessageType.Error); } if (!isAbstract || !resource.getExamples().isEmpty()) { src = TextFile.fileToString(page.getFolders().srcDir + template+"-examples.html"); TextFile.stringToFile( insertSectionNumbers(page.processResourceIncludes(n, resource, xml, json, ttl, tx, dict, src, mappings, mappingsList, "res-Examples", n + "-examples.html", null, values, resource.getWg(), null), st, n + "-examples.html", 0, null), page.getFolders().dstDir + n + "-examples.html"); page.getHTMLChecker().registerFile(n + "-examples.html", "Examples for " + resource.getName(), HTMLLinkChecker.XHTML_TYPE, true); for (Example e : resource.getExamples()) { try { processExample(e, resource, profile, null, e.getIg() == null ? null : page.getDefinitions().getIgs().get(e.getIg())); } catch (Exception ex) { throw new Exception("processing " + e.getTitle(), ex); // throw new Exception(ex.getMessage()+" processing "+e.getFileTitle()); } } } src = TextFile.fileToString(page.getFolders().srcDir + template+"-definitions.html"); TextFile.stringToFile( insertSectionNumbers(page.processResourceIncludes(n, resource, xml, json, ttl, tx, dict, src, mappings, mappingsList, "res-Detailed Descriptions", n + "-definitions.html", null, values, resource.getWg(), null), st, n + "-definitions.html", 0, null), page.getFolders().dstDir + n + "-definitions.html"); page.getHTMLChecker().registerFile(n + "-definitions.html", "Detailed Descriptions for " + resource.getName(), HTMLLinkChecker.XHTML_TYPE, true); if (!isAbstract) { src = TextFile.fileToString(page.getFolders().srcDir + "template-mappings.html"); TextFile.stringToFile( insertSectionNumbers(page.processResourceIncludes(n, resource, xml, json, ttl, tx, dict, src, mappings, mappingsList, "res-Mappings", n + "-mappings.html", null, values, resource.getWg(), null), st, n + "-mappings.html", 0, null), page.getFolders().dstDir + n + "-mappings.html"); page.getHTMLChecker().registerFile(n + "-mappings.html", "Formal Mappings for " + resource.getName(), HTMLLinkChecker.XHTML_TYPE, true); src = TextFile.fileToString(page.getFolders().srcDir + "template-profiles.html"); TextFile.stringToFile( insertSectionNumbers(page.processResourceIncludes(n, resource, xml, json, ttl, tx, dict, src, mappings, mappingsList, "res-Profiles", n + "-profiles.html", null, values, resource.getWg(), null), st, n + "-profiles.html", 0, null), page.getFolders().dstDir + n + "-profiles.html"); page.getHTMLChecker().registerFile(n + "-profiles.html", "Profiles for " + resource.getName(), HTMLLinkChecker.XHTML_TYPE, true); } if (!resource.getOperations().isEmpty()) { src = TextFile.fileToString(page.getFolders().srcDir + "template-operations.html"); TextFile.stringToFile( insertSectionNumbers(page.processResourceIncludes(n, resource, xml, json, ttl, tx, dict, src, mappings, mappingsList, "res-Operations", n + "-operations.html", null, values, resource.getWg(), null), st, n + "-operations.html", 0, null), page.getFolders().dstDir + n + "-operations.html"); page.getHTMLChecker().registerFile(n + "-operations.html", "Operations for " + resource.getName(), HTMLLinkChecker.XHTML_TYPE, true); for (Operation t : resource.getOperations()) { produceOperation(null, resource.getName().toLowerCase()+"-"+t.getName(), resource.getName()+"-"+t.getName(), resource, t, st); } // // todo: get rid of these... // src = TextFile.fileToString(page.getFolders().srcDir + "template-book.html").replace("<body>", "<body style=\"margin: 10px\">"); // src = page.processResourceIncludes(n, resource, xml, json, tx, dict, src, mappings, mappingsList, "resource", n + ".html", null); // cachePage(n + ".html", src, "Resource " + resource.getName(), true); // // src = TextFile.fileToString(page.getFolders().srcDir + "template-book-ex.html").replace("<body>", "<body style=\"margin: 10px\">"); // // src = page.processResourceIncludes(n, resource, xml, tx, dict, src, mappings, mappingsList, "res-Examples"); // // cachePage(n + "Ex.html", src, // // "Resource Examples for "+resource.getName()); // src = TextFile.fileToString(page.getFolders().srcDir + "template-book-defn.html").replace("<body>", "<body style=\"margin: 10px\">"); // src = page.processResourceIncludes(n, resource, xml, json, tx, dict, src, mappings, mappingsList, "res-Detailed Descriptions", n + "-definitions.html", null); // cachePage(n + "-definitions.html", src, "Resource Definitions for " + resource.getName(), true); } produceMap(resource.getName(), st, resource); for (Profile ap : resource.getConformancePackages()) produceConformancePackage(resource, ap, st); src = TextFile.fileToString(page.getFolders().srcDir + "template-json-schema.html"); TextFile.stringToFile( insertSectionNumbers(page.processResourceIncludes(n, resource, xml, json, ttl, tx, dict, src, mappings, mappingsList, "res-schema", n + ".schema.json.html", null, values, resource.getWg(), null), st, n + ".schema.json.html", 0, null), page.getFolders().dstDir + n + ".schema.json.html"); page.getHTMLChecker().registerFile(n + ".schema.json.html", "Json Schema for " + resource.getName(), HTMLLinkChecker.XHTML_TYPE, true); src = TextFile.fileToString(page.getFolders().srcDir + "template-dependencies.html"); TextFile.stringToFile( insertSectionNumbers(page.processResourceIncludes(n, resource, xml, json, ttl, tx, dict, src, mappings, mappingsList, "res-Dependencies", n + "-dependencies.html", null, values, resource.getWg(), null), st, n + "-dependencies.html", 0, null), page.getFolders().dstDir + n + "-dependencies.html"); page.getHTMLChecker().registerFile(n + "-dependencies.html", "Dependency graph for " + resource.getName(), HTMLLinkChecker.XHTML_TYPE, true); for (ConceptMap cm : statusCodeConceptMaps) if (cm.getUserData("resource-definition") == resource) produceConceptMap(cm, resource, st); // xml to json // todo - fix this up // JsonGenerator jsongen = new JsonGenerator(); // jsongen.generate(new CSFile(page.getFolders().dstDir+n+".xml"), new // File(page.getFolders().dstDir+n+".json")); } tmp.delete(); new ProfileUtilities(page.getWorkerContext(), page.getValidationErrors(), page).generateXlsx(new FileOutputStream(Utilities.path(page.getFolders().dstDir, n + ".xlsx")), resource.getProfile(), false, false); // because we'll pick up a little more information as we process the // resource StructureDefinition p = generateProfile(resource, n, xml, json, ttl, !logicalOnly); com.google.gson.JsonObject diff = new com.google.gson.JsonObject(); page.getDiffEngine().getDiffAsJson(diff, p); Gson gson = new GsonBuilder().setPrettyPrinting().create(); json = gson.toJson(diff); TextFile.stringToFile(json, Utilities.path(page.getFolders().dstDir, resource.getName().toLowerCase()+".diff.json")); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = dbf.newDocumentBuilder(); Document doc = builder.newDocument(); Element element = doc.createElement("difference"); doc.appendChild(element); page.getDiffEngine().getDiffAsXml(doc, element, p); prettyPrint(doc, Utilities.path(page.getFolders().dstDir, resource.getName().toLowerCase()+".diff.xml")); } public void prettyPrint(Document xml, String filename) throws Exception { Transformer tf = TransformerFactory.newInstance().newTransformer(); tf.setOutputProperty(OutputKeys.ENCODING, "UTF-8"); tf.setOutputProperty(OutputKeys.INDENT, "yes"); Writer out = new StringWriter(); tf.transform(new DOMSource(xml), new StreamResult(out)); TextFile.stringToFile(out.toString(), filename); } private void produceOperation(ImplementationGuideDefn ig, String name, String id, ResourceDefn resource, Operation op, SectionTracker st) throws Exception { OperationDefinition opd = new ProfileGenerator(page.getDefinitions(), page.getWorkerContext(), page, page.getGenDate(), page.getVersion(), dataElements, fpUsages, page.getFolders().rootDir).generate(name, id, resource.getName(), op, resource); String dir = ig == null ? "" : ig.getCode()+File.separator; FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + dir+"operation-" + name + ".xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, opd); s.close(); s = new FileOutputStream(page.getFolders().dstDir + dir+"operation-" + name + ".canonical.xml"); new XmlParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, opd); s.close(); cloneToXhtml(dir+"operation-" + name + "", "Operation Definition", true, "resource-instance:OperationDefinition", "Operation definition", resource, resource.getWg()); s = new FileOutputStream(page.getFolders().dstDir + dir+"operation-" + name + ".json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, opd); s.close(); s = new FileOutputStream(page.getFolders().dstDir + dir+"operation-" + name + ".canonical.json"); new JsonParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, opd); s.close(); jsonToXhtml(dir+"operation-" + name, "Operation Definition", resource2Json(opd), "resource-instance:OperationDefinition", "Operation definition", resource, resource.getWg()); s = new FileOutputStream(page.getFolders().dstDir + dir+"operation-" + name + ".ttl"); new RdfParser().setOutputStyle(OutputStyle.PRETTY).compose(s, opd); s.close(); ttlToXhtml(dir+"operation-" + name, "Operation Definition", resource2Ttl(opd), "resource-instance:OperationDefinition", "Operation definition", resource, resource.getWg()); Utilities.copyFile(new CSFile(page.getFolders().dstDir + dir+"operation-" + name + ".xml"), new CSFile(page.getFolders().dstDir + "examples" + File.separator + "operation-" + name + ".xml")); if (buildFlags.get("all")) { addToResourceFeed(opd, page.getResourceBundle(), name); page.getWorkerContext().cacheResource(opd); } // now we create a page for the operation String fnp = resource.getName().toLowerCase()+"-operation-" + op.getName().toLowerCase()+".html"; String src = TextFile.fileToString(page.getFolders().srcDir + "template-operation.html"); src = page.processPageIncludes(fnp, src, "res-Operations", null, "operation-" + name + ".html", op.getResource(), null, "Operation Definition", op, ig, resource, resource.getWg()); TextFile.stringToFile(insertSectionNumbers(src, st, fnp, 0, null), page.getFolders().dstDir + fnp); page.getHTMLChecker().registerFile(fnp, "Operation "+op.getName()+" for " + resource.getName(), HTMLLinkChecker.XHTML_TYPE, true); // now, we create an html page from the narrative String html = TextFile.fileToString(page.getFolders().srcDir + "template-example.html").replace("<%example%>", new XhtmlComposer(XhtmlComposer.HTML).compose(opd.getText().getDiv())); html = page.processPageIncludes(dir+"operation-" + name + ".html", html, "resource-instance:OperationDefinition", null, null, null, "Operation Definition", ig, resource, resource.getWg()); TextFile.stringToFile(html, page.getFolders().dstDir + dir+"operation-" + name + ".html"); page.getHTMLChecker().registerFile(dir+"operation-" + name + ".html", "Operation " + op.getName(), HTMLLinkChecker.XHTML_TYPE, true); // head = // "<html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\">\r\n<head>\r\n <title>"+Utilities.escapeXml(e.getDescription())+"</title>\r\n <link rel=\"Stylesheet\" href=\"fhir.css\" type=\"text/css\" media=\"screen\"/>\r\n"+ // "</head>\r\n<body>\r\n<p>&nbsp;</p>\r\n<p>"+Utilities.escapeXml(e.getDescription())+"</p>\r\n"+ // "<p><a href=\""+n+".xml.html\">XML</a> <a href=\""+n+".json.html\">JSON</a></p>\r\n"; // tail = "\r\n</body>\r\n</html>\r\n"; // TextFile.stringToFile(head+narrative+tail, page.getFolders().dstDir + n + // ".html"); } /* private void generateQuestionnaire(String n, StructureDefinition p) throws Exception { QuestionnaireBuilder b = new QuestionnaireBuilder(page.getWorkerContext()); b.setProfile(p); b.build(); Questionnaire q = b.getQuestionnaire(); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(new FileOutputStream(page.getFolders().dstDir + n + "-questionnaire.xml"), q); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(new FileOutputStream(page.getFolders().dstDir + n + "-questionnaire.json"), q); } */ private void shexToXhtml(String n, String description, String shex, String pageType, String crumbTitle, ResourceDefn rd, WorkGroup wg) throws Exception { shexToXhtml(n, description, shex, pageType, crumbTitle, null, rd, wg); } private void shexToXhtml(String n, String description, String shex, String pageType, String crumbTitle, ImplementationGuideDefn igd, ResourceDefn rd, WorkGroup wg) throws Exception { shex = "<div class=\"example\">\r\n<p>" + Utilities.escapeXml(description) + "</p>\r\n<pre class=\"shex\">\r\n" + Utilities.escapeXml(shex)+ "\r\n</pre>\r\n</div>\r\n"; String html = TextFile.fileToString(page.getFolders().srcDir + "template-example-shex.html").replace("<%example%>", shex); html = page.processPageIncludes(n + ".shex.html", html, pageType, null, null, null, crumbTitle, igd, rd, wg); TextFile.stringToFile(html, page.getFolders().dstDir + n + ".shex.html"); page.getHTMLChecker().registerExternal(n + ".shex.html"); } private void ttlToXhtml(String n, String description, String ttl, String pageType, String crumbTitle, ResourceDefn rd, WorkGroup wg) throws Exception { ttlToXhtml(n, description, ttl, pageType, crumbTitle, null, rd, wg); } private void ttlToXhtml(String n, String description, String ttl, String pageType, String crumbTitle, ImplementationGuideDefn igd, ResourceDefn rd, WorkGroup wg) throws Exception { ttl = "<div class=\"example\">\r\n<p>" + Utilities.escapeXml(description) + "</p>\r\n<pre class=\"turtle\">\r\n" + Utilities.escapeXml(ttl)+ "\r\n</pre>\r\n</div>\r\n"; String html = TextFile.fileToString(page.getFolders().srcDir + "template-example-ttl.html").replace("<%example%>", ttl); html = page.processPageIncludes(n + ".ttl.html", html, pageType, null, null, null, crumbTitle, igd, rd, wg); TextFile.stringToFile(html, page.getFolders().dstDir + n + ".ttl.html"); page.getHTMLChecker().registerExternal(n + ".ttl.html"); } private void jsonToXhtml(String n, String description, String json, String pageType, String crumbTitle, ResourceDefn rd, WorkGroup wg) throws Exception { jsonToXhtml(n, description, json, pageType, crumbTitle, null, rd, wg); } private void jsonToXhtml(String n, String description, String json, String pageType, String crumbTitle, ImplementationGuideDefn igd, ResourceDefn rd, WorkGroup wg) throws Exception { json = "<div class=\"example\">\r\n<p>" + Utilities.escapeXml(description) + "</p>\r\n<pre class=\"json\">\r\n" + Utilities.escapeXml(json)+ "\r\n</pre>\r\n</div>\r\n"; String html = TextFile.fileToString(page.getFolders().srcDir + "template-example-json.html").replace("<%example%>", json); html = page.processPageIncludes(n + ".json.html", html, pageType, null, null, null, crumbTitle, igd, rd, wg); TextFile.stringToFile(html, page.getFolders().dstDir + n + ".json.html"); page.getHTMLChecker().registerExternal(n + ".json.html"); } private void cloneToXhtml(String n, String description, boolean adorn, String pageType, String crumbTitle, ResourceDefn rd, WorkGroup wg) throws Exception { cloneToXhtml(n, description, adorn, pageType, crumbTitle, null, rd, wg); } private void cloneToXhtml(String n, String description, boolean adorn, String pageType, String crumbTitle, ImplementationGuideDefn igd, ResourceDefn rd, WorkGroup wg) throws Exception { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); DocumentBuilder builder = factory.newDocumentBuilder(); Document xdoc = builder.parse(new CSFileInputStream(new CSFile(page.getFolders().dstDir + n + ".xml"))); XhtmlGenerator xhtml = new XhtmlGenerator(new ExampleAdorner(page.getDefinitions(), page.genlevel(Utilities.charCount(n, File.separatorChar)))); ByteArrayOutputStream b = new ByteArrayOutputStream(); xhtml.generate(xdoc, b, n.toUpperCase().substring(0, 1) + n.substring(1), description, 0, adorn, n + ".xml.html"); String html = TextFile.fileToString(page.getFolders().srcDir + "template-example-xml.html").replace("<%example%>", b.toString()); html = page.processPageIncludes(n + ".xml.html", html, pageType, null, n + ".xml.html", null, null, crumbTitle, (adorn && hasNarrative(xdoc)) ? new Boolean(true) : null, igd, rd, wg); TextFile.stringToFile(html, page.getFolders().dstDir + n + ".xml.html"); // page.getEpub().registerFile(n + ".xml.html", description, EPubManager.XHTML_TYPE); page.getHTMLChecker().registerExternal(n + ".xml.html"); } private boolean hasNarrative(Document xdoc) { return XMLUtil.hasNamedChild(XMLUtil.getNamedChild(xdoc.getDocumentElement(), "text"), "div"); } private void processQuestionnaire(ResourceDefn res, StructureDefinition profile, SectionTracker st, boolean isResource, String prefix, ImplementationGuideDefn ig) throws Exception { QuestionnaireBuilder qb = new QuestionnaireBuilder(page.getWorkerContext()); qb.setProfile(profile); qb.build(); Questionnaire q = qb.getQuestionnaire(); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + prefix+ profile.getId().toLowerCase() + "-questionnaire.json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, q); s.close(); s = new FileOutputStream(page.getFolders().dstDir + prefix+ profile.getId().toLowerCase() + "-questionnaire.canonical.json"); new JsonParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, q); s.close(); s = new FileOutputStream(page.getFolders().dstDir + prefix+ profile.getId().toLowerCase() + "-questionnaire.xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, q); s.close(); s = new FileOutputStream(page.getFolders().dstDir + prefix+ profile.getId().toLowerCase() + "-questionnaire.canonical.xml"); new XmlParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, q); s.close(); String json = "<div class=\"example\">\r\n<p>Generated Questionnaire for "+profile.getId()+"</p>\r\n<pre class=\"json\">\r\n" + Utilities.escapeXml(new JsonParser().setOutputStyle(OutputStyle.PRETTY).composeString(q)) + "\r\n</pre>\r\n</div>\r\n"; String html = TextFile.fileToString(page.getFolders().srcDir + "template-example-json.html").replace("<%example%>", json); html = page.processPageIncludes(prefix+profile.getId().toLowerCase() + "-questionnaire.json.html", html, (isResource ? "resource-questionnaire:" : "profile-questionnaire:") + profile.getId(), null, null, null, "Questionnaire", ig, res, res == null ? wg("fhir") : res.getWg()); TextFile.stringToFile(html, page.getFolders().dstDir + prefix+ profile.getId().toLowerCase() + "-questionnaire.json.html"); String xml = "<div class=\"example\">\r\n<p>Generated Questionnaire for "+profile.getId()+"</p>\r\n<pre class=\"json\">\r\n" + Utilities.escapeXml(new XmlParser().setOutputStyle(OutputStyle.PRETTY).composeString(q)) + "\r\n</pre>\r\n</div>\r\n"; html = TextFile.fileToString(page.getFolders().srcDir + "template-example-xml.html").replace("<%example%>", xml); html = page.processPageIncludes(prefix+profile.getId().toLowerCase() + "-questionnaire.xml.html", html, (isResource ? "resource-questionnaire:" : "profile-questionnaire:") + profile.getId(), null, null, null, "Questionnaire", ig, res, res == null ? wg("fhir") : res.getWg()); TextFile.stringToFile(html, page.getFolders().dstDir + prefix+ profile.getId().toLowerCase() + "-questionnaire.xml.html"); if (false) { File tmpTransform = Utilities.createTempFile("tmp", ".html"); // if (web) { HashMap<String, String> params = new HashMap<String, String>(); params.put("suppressWarnings", "true"); XsltUtilities.saxonTransform( Utilities.path(page.getFolders().rootDir, "implementations", "xmltools"), // directory for xslt references page.getFolders().dstDir + prefix+ profile.getId().toLowerCase() + "-questionnaire.xml", // source to run xslt on Utilities.path(page.getFolders().rootDir, "implementations", "xmltools", "QuestionnaireToHTML.xslt"), // xslt file to run tmpTransform.getAbsolutePath(), // file to produce this, // handle to self to implement URI resolver for terminology fetching params ); // } else // TextFile.stringToFile("test", tmpTransform.getAbsolutePath()); // now, generate the form html = TextFile.fileToString(page.getFolders().srcDir + (isResource ? "template-questionnaire.html" : "template-profile-questionnaire.html")).replace("<%questionnaire%>", loadHtmlForm(tmpTransform.getAbsolutePath())); } else html = "<html><p>Not generated in this build</p></html>"; html = page.processPageIncludes(profile.getId().toLowerCase() + "-questionnaire.html", html, (isResource ? "resource-questionnaire:" : "profile-questionnaire:") + profile.getId(), null, profile, null, "Questionnaire", ig, res, res == null ? wg("fhir") : res.getWg()); int level = (ig == null || ig.isCore()) ? 0 : 1; if (st != null) html = insertSectionNumbers(html, st, profile.getId().toLowerCase() + "-questionnaire.html", level, null); TextFile.stringToFile(html, page.getFolders().dstDir + prefix+ profile.getId().toLowerCase() + "-questionnaire.html"); page.getHTMLChecker().registerExternal(prefix+ profile.getId().toLowerCase() + "-questionnaire.html"); page.getHTMLChecker().registerExternal(prefix+ profile.getId().toLowerCase() + "-questionnaire.json.html"); page.getHTMLChecker().registerExternal(prefix+ profile.getId().toLowerCase() + "-questionnaire.xml.html"); } private String loadHtmlForm(String path) throws Exception { String form = TextFile.fileToString(path); form = form.replace("h5>", "h6>").replace("h4>", "h6>").replace("h3>", "h5>").replace("h2>", "h4>").replace("h1>", "h3>"); form = form.replace("<!--header insertion point-->", "\r\n"); form = form.replace("<!--body top insertion point-->", "\r\n"); form = form.replace("<!--body bottom insertion point-->", "\r\n"); return form; } private Set<String> examplesProcessed = new HashSet<String>(); private void processExample(Example e, ResourceDefn resn, StructureDefinition profile, Profile pack, ImplementationGuideDefn ig) throws Exception { if (e.getType() == ExampleType.Tool) return; long time = System.currentTimeMillis(); int level = (ig == null || ig.isCore()) ? 0 : 1; String prefix = (ig == null || ig.isCore()) ? "" : ig.getCode() + File.separator; DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); DocumentBuilder builder = factory.newDocumentBuilder(); Document xdoc; String narrative = null; String n = e.getTitle(); if (examplesProcessed.contains(prefix+n)) return; examplesProcessed.add(prefix+n); // strip the xsi: stuff. seems to need double processing in order to // delete namespace crap xdoc = e.getXml(); XmlGenerator xmlgen = new XmlGenerator(); CSFile file = new CSFile(page.getFolders().dstDir + prefix +n + ".xml"); xmlgen.generate(xdoc.getDocumentElement(), file, "http://hl7.org/fhir", xdoc.getDocumentElement() .getLocalName()); // check the narrative. We generate auto-narrative. If the resource didn't // have it's own original narrative, then we save it anyway // n String rt = null; try { NarrativeGenerator gen = new NarrativeGenerator("", "", page.getWorkerContext(), page).setTooCostlyNoteEmpty(PageProcessor.TOO_MANY_CODES_TEXT_EMPTY).setTooCostlyNoteNotEmpty(PageProcessor.TOO_MANY_CODES_TEXT_NOT_EMPTY); xdoc = loadDom(new FileInputStream(file), true); rt = xdoc.getDocumentElement().getNodeName(); String id = XMLUtil.getNamedChildValue(xdoc.getDocumentElement(), "id"); if (!page.getDefinitions().getBaseResources().containsKey(rt) && !id.equals(e.getId())) throw new Error("Resource in "+prefix +n + ".xml needs an id of value=\""+e.getId()+"\""); page.getDefinitions().addNs("http://hl7.org/fhir/"+rt+"/"+id, "Example", prefix +n + ".html"); if (rt.equals("ValueSet") || rt.equals("CodeSystem") || rt.equals("ConceptMap") || rt.equals("CapabilityStatement")) { // for these, we use the reference implementation directly MetadataResource res = (MetadataResource) new XmlParser().parse(new FileInputStream(file)); if (res.getUrl() != null && res.getUrl().startsWith("http://hl7.org/fhir")) res.setVersion(Constants.VERSION); boolean wantSave = false; if (res instanceof CapabilityStatement) { ((CapabilityStatement) res).setFhirVersion(page.getVersion()); if (res.hasText() && res.getText().hasDiv()) wantSave = updateVersion(((CapabilityStatement) res).getText().getDiv()); } if (!res.hasText() || !res.getText().hasDiv()) { gen.generate(res, null); wantSave = true; } if (wantSave) new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(new FileOutputStream(file), res); narrative = new XhtmlComposer(XhtmlComposer.HTML).compose(res.getText().getDiv()); } else { if (rt.equals("Bundle")) { List<Element> entries = new ArrayList<Element>(); XMLUtil.getNamedChildren(xdoc.getDocumentElement(), "entry", entries); boolean wantSave = false; for (Element entry : entries) { Element ers = XMLUtil.getFirstChild(XMLUtil.getNamedChild(entry, "resource")); id = XMLUtil.getNamedChildValue(ers, "id"); if (id != null) page.getDefinitions().addNs("http://hl7.org/fhir/"+ers.getLocalName()+"/"+id, "Example", prefix +n + ".html", true); if (ers != null) { String ert = ers.getLocalName(); String s = null; if (!page.getDefinitions().getBaseResources().containsKey(ert) && !ert.equals("Binary") && !ert.equals("Parameters") && !ert.equals("Bundle")) { Element div = gen.getNarrative(ers); if (div == null || !div.hasChildNodes()) { wantSave = true; s = gen.generate(ers); } else s = new XmlGenerator().generate(div); if (s != null) narrative = narrative == null ? s : narrative +"<hr/>\r\n"+s; } if (ert.equals("NamingSystem")) { ByteArrayOutputStream bs = new ByteArrayOutputStream(); new XmlGenerator().generate(ers, bs); bs.close(); NamingSystem ns = (NamingSystem) new XmlParser().parse(new ByteArrayInputStream(bs.toByteArray())); if (!ns.hasUrl() || ns.getUrl().startsWith("http://hl7.org/fhir")) ns.setVersion(Constants.VERSION); ns.setUserData("path", prefix +n+".html"); page.getDefinitions().getNamingSystems().add(ns); } } } if (wantSave) new XmlGenerator().generate(xdoc.getDocumentElement(), file, "http://hl7.org/fhir", xdoc.getDocumentElement().getLocalName()); } else { if (!page.getDefinitions().getBaseResources().containsKey(rt) && !rt.equals("Binary") && !rt.equals("Parameters")) { Element div = gen.getNarrative(xdoc.getDocumentElement()); if (div == null || !div.hasChildNodes()) { narrative = gen.generate(xdoc.getDocumentElement()); new XmlGenerator().generate(xdoc.getDocumentElement(), file, "http://hl7.org/fhir", xdoc.getDocumentElement().getLocalName()); } else { narrative = new XmlGenerator().generate(div); } } } } } catch (Exception ex) { StringWriter errors = new StringWriter(); ex.printStackTrace(); XhtmlNode xhtml = new XhtmlNode(NodeType.Element, "div"); xhtml.addTag("p").setAttribute("style", "color: maroon").addText("Error processing narrative: " + ex.getMessage()); xhtml.addTag("p").setAttribute("style", "color: maroon").addText(errors.toString()); narrative = new XhtmlComposer(XhtmlComposer.HTML).compose(xhtml); } if (rt.equals("ValueSet")) { ValueSet vs = (ValueSet) new XmlParser().parse(new FileInputStream(file)); vs.setUserData("filename", Utilities.changeFileExt(file.getName(), "")); vs.addExtension().setUrl(ToolingExtensions.EXT_WORKGROUP).setValue(new CodeType("fhir")); if (vs.getUrl().startsWith("http://hl7.org/fhir")) vs.setVersion(Constants.VERSION); page.getVsValidator().validate(page.getValidationErrors(), "Value set Example "+prefix +n, vs, false, false); if (vs.getUrl() == null) throw new Exception("Value set example " + e.getTitle() + " has no url"); vs.setUserData("path", prefix +n + ".html"); if (vs.getUrl().startsWith("http:")) page.getValueSets().put(vs.getUrl(), vs); addToResourceFeed(vs, valueSetsFeed, file.getName()); page.getDefinitions().getValuesets().put(vs.getUrl(), vs); page.getDefinitions().getValuesets().put(vs.getUrl()+"|"+vs.getVersion(), vs); } else if (rt.equals("CodeSystem")) { CodeSystem cs = (CodeSystem) new XmlParser().parse(new FileInputStream(file)); if (cs.getUrl().startsWith("http://hl7.org/fhir")) cs.setVersion(Constants.VERSION); cs.setUserData("example", "true"); cs.setUserData("filename", Utilities.changeFileExt(file.getName(), "")); cs.addExtension().setUrl(ToolingExtensions.EXT_WORKGROUP).setValue(new CodeType("fhir")); cs.setUserData("path", prefix +n + ".html"); addToResourceFeed(cs, valueSetsFeed, file.getName()); page.getCodeSystems().put(cs.getUrl(), cs); page.getCodeSystems().put(cs.getUrl()+"|"+cs.getVersion(), cs); } else if (rt.equals("ConceptMap")) { ConceptMap cm = (ConceptMap) new XmlParser().parse(new FileInputStream(file)); new ConceptMapValidator(page.getDefinitions(), e.getTitle()).validate(cm, false); if (cm.getUrl() == null) throw new Exception("Value set example " + e.getTitle() + " has no identifier"); if (cm.getUrl().startsWith("http://hl7.org/fhir")) cm.setVersion(Constants.VERSION); addToResourceFeed(cm, conceptMapsFeed, file.getName()); page.getDefinitions().getConceptMaps().put(cm.getUrl(), cm); cm.setUserData("path", prefix +n + ".html"); page.getConceptMaps().put(cm.getUrl(), cm); page.getConceptMaps().put(cm.getUrl()+"|"+cm.getVersion(), cm); } // queue for json and canonical XML generation processing e.setResourceName(resn.getName()); String canonical = "http://hl7.org/fhir/"; org.hl7.fhir.r5.elementmodel.Element ex = Manager.parse(page.getWorkerContext(), new CSFileInputStream(page.getFolders().dstDir + prefix+n + ".xml"), FhirFormat.XML); new DefinitionsUsageTracker(page.getDefinitions()).updateUsage(ex); Manager.compose(page.getWorkerContext(), ex, new FileOutputStream(page.getFolders().dstDir + prefix+n + ".json"), FhirFormat.JSON, OutputStyle.PRETTY, canonical); // Manager.compose(page.getWorkerContext(), ex, new FileOutputStream(Utilities.changeFileExt(destName, ".canonical.json")), FhirFormat.JSON, OutputStyle.CANONICAL); // Manager.compose(page.getWorkerContext(), ex, new FileOutputStream(Utilities.changeFileExt(destName, ".canonical.xml")), FhirFormat.XML, OutputStyle.CANONICAL); Manager.compose(page.getWorkerContext(), ex, new FileOutputStream(page.getFolders().dstDir + prefix+n + ".ttl"), FhirFormat.TURTLE, OutputStyle.PRETTY, resn.getName().equals("Parameters") || resn.getName().equals("OperationOutcome") ? null : canonical); String json = TextFile.fileToString(page.getFolders().dstDir + prefix+n + ".json"); // String json2 = "<div class=\"example\">\r\n<p>" + Utilities.escapeXml(e.getDescription()) + "</p>\r\n<p><a href=\""+ n + ".json\">Raw JSON</a> (<a href=\""+n + ".canonical.json\">Canonical</a>)</p>\r\n<pre class=\"json\">\r\n" + Utilities.escapeXml(json) // + "\r\n</pre>\r\n</div>\r\n"; json = "<div class=\"example\">\r\n<p>" + Utilities.escapeXml(e.getDescription()) + "</p>\r\n<pre class=\"json\">\r\n" + Utilities.escapeXml(json) + "\r\n</pre>\r\n</div>\r\n"; String html = TextFile.fileToString(page.getFolders().srcDir + "template-example-json.html").replace("<%example%>", json); html = page.processPageIncludes(n + ".json.html", html, e.getResourceName() == null ? "profile-instance:resource:" + e.getResourceName() : "resource-instance:" + e.getResourceName(), null, null, null, "Example", null, resn, resn.getWg()); TextFile.stringToFile(html, page.getFolders().dstDir + prefix+n + ".json.html"); page.getHTMLChecker().registerExternal(prefix+n + ".json.html"); String ttl = TextFile.fileToString(page.getFolders().dstDir + prefix+n + ".ttl"); ttl = "<div class=\"example\">\r\n<p>" + Utilities.escapeXml(e.getDescription()) + "</p>\r\n<pre class=\"rdf\">\r\n" + Utilities.escapeXml(ttl) + "\r\n</pre>\r\n</div>\r\n"; html = TextFile.fileToString(page.getFolders().srcDir + "template-example-ttl.html").replace("<%example%>", ttl); html = page.processPageIncludes(n + ".ttl.html", html, e.getResourceName() == null ? "profile-instance:resource:" + e.getResourceName() : "resource-instance:" + e.getResourceName(), null, null, null, "Example", null, resn, resn.getWg()); TextFile.stringToFile(html, page.getFolders().dstDir + prefix+n + ".ttl.html"); page.getHTMLChecker().registerExternal(prefix+n + ".ttl.html"); // reload it now, xml to xhtml of xml builder = factory.newDocumentBuilder(); xdoc = builder.parse(new CSFileInputStream(file)); XhtmlGenerator xhtml = new XhtmlGenerator(new ExampleAdorner(page.getDefinitions(), page.genlevel(level))); ByteArrayOutputStream b = new ByteArrayOutputStream(); xhtml.generate(xdoc, b, n.toUpperCase().substring(0, 1) + n.substring(1), Utilities.noString(e.getId()) ? e.getDescription() : e.getDescription() + " (id = \"" + e.getId() + "\")", 0, true, n + ".xml.html"); html = TextFile.fileToString(page.getFolders().srcDir + "template-example-xml.html").replace("<%example%>", b.toString()); html = page.processPageIncludes(n + ".xml.html", html, resn == null ? "profile-instance:resource:" + rt : "resource-instance:" + resn.getName(), null, n + ".xml.html", profile, null, "Example", (hasNarrative(xdoc)) ? new Boolean(true) : null, ig, resn, resn.getWg()); TextFile.stringToFile(html, page.getFolders().dstDir + prefix +n + ".xml.html"); XhtmlDocument d = new XhtmlParser().parse(new CSFileInputStream(page.getFolders().dstDir + prefix +n + ".xml.html"), "html"); XhtmlNode pre = d.getElement("html").getElement("body").getElement("div"); e.setXhtm(b.toString()); Element root = xdoc.getDocumentElement(); Element meta = XMLUtil.getNamedChild(root, "meta"); if (meta == null) { Element id = XMLUtil.getNamedChild(root, "id"); if (id == null) meta = XMLUtil.insertChild(xdoc, root, "meta", FormatUtilities.FHIR_NS, 2); else { Element pid = XMLUtil.getNextSibling(id); if (pid == null) throw new Exception("not handled - id is last child in "+n); else meta = XMLUtil.insertChild(xdoc, root, "meta", FormatUtilities.FHIR_NS, pid, 2); } } Element tag = XMLUtil.getNamedChild(meta, "tag"); Element label = XMLUtil.insertChild(xdoc, meta, "security", FormatUtilities.FHIR_NS, tag, 4); XMLUtil.addTextTag(xdoc, label, "system", FormatUtilities.FHIR_NS, "http://terminology.hl7.org/CodeSystem/v3-ActReason", 6); XMLUtil.addTextTag(xdoc, label, "code", FormatUtilities.FHIR_NS, "HTEST", 6); XMLUtil.addTextTag(xdoc, label, "display", FormatUtilities.FHIR_NS, "test health data", 6); XMLUtil.spacer(xdoc, label, 4); XMLUtil.spacer(xdoc, meta, 2); String destf = (!Utilities.noString(e.getId())) ? page.getFolders().dstDir + "examples" + File.separator + n + "(" + e.getId() + ").xml" : page.getFolders().dstDir + "examples" + File.separator + n + ".xml"; FileOutputStream fs = new FileOutputStream(destf); XMLUtil.saveToFile(root, fs); fs.close(); // now, we create an html page from the narrative narrative = fixExampleReferences(e.getTitle(), narrative); html = TextFile.fileToString(page.getFolders().srcDir + "template-example.html").replace("<%example%>", narrative == null ? "" : narrative).replace("<%example-usage%>", genExampleUsage(e, page.genlevel(level))); html = page.processPageIncludes(n + ".html", html, resn == null ? "profile-instance:resource:" + rt : "resource-instance:" + resn.getName(), null, profile, null, "Example", ig, resn, resn.getWg()); TextFile.stringToFile(html, page.getFolders().dstDir + prefix +n + ".html"); // head = // "<html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\">\r\n<head>\r\n <title>"+Utilities.escapeXml(e.getDescription())+"</title>\r\n <link rel=\"Stylesheet\" href=\"fhir.css\" type=\"text/css\" media=\"screen\"/>\r\n"+ // "</head>\r\n<body>\r\n<p>&nbsp;</p>\r\n<p>"+Utilities.escapeXml(e.getDescription())+"</p>\r\n"+ // "<p><a href=\""+n+".xml.html\">XML</a> <a href=\""+n+".json.html\">JSON</a></p>\r\n"; // tail = "\r\n</body>\r\n</html>\r\n"; // TextFile.stringToFile(head+narrative+tail, page.getFolders().dstDir + n + // ".html"); page.getHTMLChecker().registerExternal(prefix +n + ".html"); page.getHTMLChecker().registerExternal(prefix +n + ".xml.html"); } private String fixExampleReferences(String path, String narrative) throws Exception { if (narrative == null) return ""; XhtmlNode node = new XhtmlParser().parseFragment(narrative); checkExampleLinks(path, node); return new XhtmlComposer(XhtmlComposer.HTML).compose(node); } private void checkExampleLinks(String path, XhtmlNode node) throws Exception { if (node.getNodeType() == NodeType.Element) { if (node.getName().equals("a") && node.hasAttribute("href")) { String link = node.getAttribute("href"); if (!link.startsWith("http:") && !link.startsWith("https:") && !link.startsWith("mailto:") && !link.contains(".html") &&!link.startsWith("#")) { String[] parts = link.split("\\/"); if ((parts.length == 2) || (parts.length == 4 && parts[2].equals("_history")) && page.getDefinitions().hasResource(parts[0])) { node.setAttribute("href", determineLink(path, parts[0], parts[1])); } else if (page.getDefinitions().hasType(link)) { node.setAttribute("href", page.getDefinitions().getSrcFile(link)+".html#"+link); } else if (page.getDefinitions().hasResource(link)) node.setAttribute("href", link.toLowerCase()+".html#"+link); else throw new Exception("Unknown example narrative href pattern: "+link); } } else for (XhtmlNode n : node.getChildNodes()) { checkExampleLinks(path, n); } } } private String determineLink(String path, String rn, String id) throws Exception { ResourceDefn r = page.getDefinitions().getResourceByName(rn); Example e = r.getExampleById(id); if (e == null) for (ImplementationGuideDefn ig : page.getDefinitions().getIgs().values()) { e = ig.getExample(rn, id); if (e != null) break; } if (e == null) { page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.NOTFOUND, path, "The reference to "+rn+"/"+id+" could not be resolved", IssueSeverity.WARNING)); return "#null"; } else return e.getTitle()+".html"; } private boolean updateVersion(XhtmlNode div) { if (div.getNodeType().equals(NodeType.Text)) { if (div.getContent().contains("$ver$")) { div.setContent(div.getContent().replace("$ver$", page.getVersion().toCode())); return true; } else return false; } else { boolean res = false; for (XhtmlNode child : div.getChildNodes()) res = updateVersion(child) || res; return res; } } private String genExampleUsage(Example e, String prefix) { if (e.getInbounds().isEmpty()) return ""; else { StringBuilder b = new StringBuilder(); b.append("<p>\r\nOther examples that reference this example:</p>\r\n"); List<String> names = new ArrayList<String>(); for (Example x : e.getInbounds()) names.add(x.getResourceName()+":"+x.getId()); Collections.sort(names); for (String n : names) { Example x = null; for (Example y : e.getInbounds()) if (n.equals(y.getResourceName()+":"+y.getId())) x = y; b.append("<li><a href=\""); b.append(prefix); if (x.getIg() != null) { ImplementationGuideDefn ig = page.getDefinitions().getIgs().get(x.getIg()); if (ig != null && !ig.isCore()) { b.append(ig.getCode()); b.append("/"); } } b.append(x.getTitle()+".html"); b.append("\">"); b.append(x.getResourceName()+"/"+x.getName()); b.append("</a></li>\r\n"); } b.append("</ul>\r\n"); return b.toString(); } } private String buildLoincExample(String filename) throws FileNotFoundException, Exception { LoincToDEConvertor conv = new LoincToDEConvertor(); conv.setDefinitions(Utilities.path(page.getFolders().srcDir, "loinc", "loincS.xml")); conv.process(); IParser xml = new XmlParser().setOutputStyle(OutputStyle.PRETTY); FileOutputStream s = new FileOutputStream(Utilities.path(page.getFolders().dstDir, filename+".xml")); xml.compose(s, conv.getBundle()); s.close(); IParser json = new JsonParser().setOutputStyle(OutputStyle.PRETTY); s = new FileOutputStream(Utilities.path(page.getFolders().dstDir, filename+".json")); json.compose(s, conv.getBundle()); s.close(); return "Loinc Narrative"; } private StructureDefinition generateProfile(ResourceDefn root, String n, String xmlSpec, String jsonSpec, String ttlSpec, boolean gen) throws Exception, FileNotFoundException { StructureDefinition rp = root.getProfile(); page.getProfiles().put("http://hl7.org/fhir/StructureDefinition/"+root.getName(), rp); page.getProfiles().put(root.getName(), rp); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + n + ".profile.xml"); new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(s, rp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + n + ".profile.canonical.xml"); new XmlParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, rp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + n + ".profile.json"); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(s, rp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + n + ".profile.canonical.json"); new JsonParser().setOutputStyle(OutputStyle.CANONICAL).compose(s, rp); s.close(); Utilities.copyFile(new CSFile(page.getFolders().dstDir + n + ".profile.xml"), new CSFile(page.getFolders().dstDir + "examples" + File.separator + n + ".profile.xml")); if (buildFlags.get("all")) { addToResourceFeed(rp, page.getResourceBundle(), null); } if (gen) { saveAsPureHtml(rp, new FileOutputStream(page.getFolders().dstDir + "html" + File.separator + n + ".html")); cloneToXhtml(n + ".profile", "StructureDefinition for " + n, true, "profile-instance:resource:" + root.getName(), "Profile", root, root.getWg()); jsonToXhtml(n + ".profile", "StructureDefinition for " + n, resource2Json(rp), "profile-instance:resource:" + root.getName(), "Profile", root, root.getWg()); ttlToXhtml(n + ".profile", "StructureDefinition for " + n, resource2Ttl(rp), "profile-instance:resource:" + root.getName(), "Profile", root, root.getWg()); String shex = new ShExGenerator(page.getWorkerContext()).generate(HTMLLinkPolicy.NONE, rp); TextFile.stringToFile(shex, page.getFolders().dstDir + n+".shex"); shexToXhtml(n, "ShEx statement for " + n, shex, "profile-instance:type:" + root.getName(), "Type", root, root.getWg()); } return rp; } private void deletefromFeed(ResourceType type, String id, Bundle feed) { int index = -1; for (BundleEntryComponent ae : feed.getEntry()) { if (ae.getResource().getId().equals(id) && ae.getResource().getResourceType() == type) index = feed.getEntry().indexOf(ae); } if (index > -1) feed.getEntry().remove(index); } private void saveAsPureHtml(DomainResource resource, FileOutputStream stream) throws Exception { saveAsPureHtml(resource, stream, false); } private void saveAsPureHtml(DomainResource resource, FileOutputStream stream, boolean isPretty) throws Exception { XhtmlDocument html = new XhtmlDocument(); html.setNodeType(NodeType.Document); html.addComment("Generated by automatically by FHIR Tooling"); XhtmlNode doc = html.addTag("html"); XhtmlNode head = doc.addTag("head"); XhtmlNode work = head.addTag("title"); work.addText("test title"); work = head.addTag("link"); work.setAttribute("rel", "Stylesheet"); work.setAttribute("href", "/css/fhir.css"); work.setAttribute("type", "text/css"); work.setAttribute("media", "screen"); work = doc.addTag("body"); if ((resource.hasText()) && (resource.getText().hasDiv())) { work.getAttributes().putAll(resource.getText().getDiv().getAttributes()); work.getChildNodes().addAll(resource.getText().getDiv().getChildNodes()); } XhtmlComposer xml = new XhtmlComposer(XhtmlComposer.HTML, isPretty); xml.compose(stream, html); stream.close(); } private void addToResourceFeed(DomainResource resource, Bundle dest, String filename) throws Exception { maybeFixResourceId(resource, filename); if (resource.getId() == null) throw new Exception("Resource has no id"); BundleEntryComponent byId = ResourceUtilities.getEntryById(dest, resource.getResourceType(), resource.getId()); if (byId != null) dest.getEntry().remove(byId); deletefromFeed(resource.getResourceType(), resource.getId(), dest); ResourceUtilities.meta(resource).setLastUpdated(page.getGenDate().getTime()); if (resource.getText() == null || resource.getText().getDiv() == null) throw new Exception("Example Resource " + resource.getId() + " does not have any narrative"); dest.getEntry().add(new BundleEntryComponent().setResource(resource).setFullUrl("http://hl7.org/fhir/"+resource.getResourceType().toString()+"/"+resource.getId())); } private void maybeFixResourceId(DomainResource theResource, String theFilename) { if (theResource.getId() == null && theFilename != null) { String candidateId = theFilename.replaceAll("\\..*", ""); candidateId = FormatUtilities.makeId(candidateId); theResource.setId(candidateId); } } private void addToResourceFeed(ValueSet vs, Bundle dest, String filename) throws Exception { maybeFixResourceId(vs, filename); if (vs.getId() == null) throw new Exception("Resource has no id: "+vs.getName()+" ("+vs.getUrl()+")"); if (ResourceUtilities.getById(dest, ResourceType.ValueSet, vs.getId()) != null) throw new Exception("Attempt to add duplicate value set " + vs.getId()+" ("+vs.getName()+")"); if (!vs.hasText() || vs.getText().getDiv() == null) throw new Exception("Example Value Set " + vs.getId() + " does not have any narrative"); ResourceUtilities.meta(vs).setLastUpdated(page.getGenDate().getTime()); if (vs.getUrl().startsWith("http://hl7.org/fhir/") && !vs.getUrl().equals("http://hl7.org/fhir/"+vs.getResourceType().toString()+"/"+vs.getId())) throw new Exception("URL mismatch on value set: "+vs.getUrl()+" vs "+"http://hl7.org/fhir/"+vs.getResourceType().toString()+"/"+vs.getId()); dest.getEntry().add(new BundleEntryComponent().setResource(vs).setFullUrl("http://hl7.org/fhir/"+vs.fhirType()+"/"+vs.getId())); } private void addToResourceFeed(ConceptMap cm, Bundle dest) throws Exception { if (cm.getId() == null) throw new Exception("Resource has no id"); if (ResourceUtilities.getById(dest, ResourceType.ValueSet, cm.getId()) != null) throw new Exception("Attempt to add duplicate Concept Map " + cm.getId()); if (cm.getText() == null || cm.getText().getDiv() == null) throw new Exception("Example Concept Map " + cm.getId() + " does not have any narrative"); ResourceUtilities.meta(cm).setLastUpdated(page.getGenDate().getTime()); if (!cm.getUrl().equals("http://hl7.org/fhir/"+cm.getResourceType().toString()+"/"+cm.getId())) throw new Exception("URL mismatch on concept map"); dest.getEntry().add(new BundleEntryComponent().setResource(cm).setFullUrl("http://hl7.org/fhir/"+cm.fhirType()+"/"+cm.getId())); } private void addToResourceFeed(CompartmentDefinition cd, Bundle dest) throws Exception { if (cd.getId() == null) throw new Exception("Resource has no id"); if (ResourceUtilities.getById(dest, ResourceType.CompartmentDefinition, cd.getId()) != null) throw new Exception("Attempt to add duplicate Compartment Definition " + cd.getId()); if (cd.getText() == null || cd.getText().getDiv() == null) throw new Exception("Example Compartment Definition " + cd.getId() + " does not have any narrative"); ResourceUtilities.meta(cd).setLastUpdated(page.getGenDate().getTime()); if (!cd.getUrl().equals("http://hl7.org/fhir/"+cd.getResourceType().toString()+"/"+cd.getId())) throw new Exception("URL mismatch on concept map"); dest.getEntry().add(new BundleEntryComponent().setResource(cd).setFullUrl("http://hl7.org/fhir/"+cd.fhirType()+"/"+cd.getId())); } private void addToResourceFeed(CapabilityStatement cs, Bundle dest) throws Exception { if (cs.getId() == null) throw new Exception("Resource has no id"); if (ResourceUtilities.getById(dest, ResourceType.ValueSet, cs.getId()) != null) throw new Exception("Attempt to add duplicate Conformance " + cs.getId()); if (!cs.hasText() || cs.getText().getDiv() == null) System.out.println("WARNING: Example CapabilityStatement " + cs.getId() + " does not have any narrative"); // Changed this from an exception to a warning because generateConformanceStatement doesn't produce narrative if // "register" is 'false' ResourceUtilities.meta(cs).setLastUpdated(page.getGenDate().getTime()); if (!cs.getUrl().equals("http://hl7.org/fhir/"+cs.getResourceType().toString()+"/"+cs.getId())) throw new Exception("URL mismatch on CapabilityStatement"); dest.getEntry().add(new BundleEntryComponent().setResource(cs).setFullUrl("http://hl7.org/fhir/"+cs.fhirType()+"/"+cs.getId())); } private void produceConformancePackage(ResourceDefn res, Profile pack, SectionTracker st) throws Exception { String resourceName = res == null ? "" : res.getName(); if (Utilities.noString(resourceName)) { if (pack.getProfiles().size() == 1) if (pack.getProfiles().get(0).getDefn() != null) resourceName = pack.getProfiles().get(0).getDefn().getName(); else resourceName = pack.getProfiles().get(0).getResource().getType(); else if (pack.getProfiles().size() == 0) { // throw new Exception("Unable to determine resource name - no profiles"); no, we don't complain } else if (pack.getProfiles().get(0).getDefn() != null) { resourceName = pack.getProfiles().get(0).getDefn().getName(); for (int i = 1; i < pack.getProfiles().size(); i++) if (!pack.getProfiles().get(i).getDefn().getName().equals(resourceName)) throw new Exception("Unable to determine resource name - profile mismatch "+resourceName+"/"+pack.getProfiles().get(i).getDefn().getName()); } } ImplementationGuideDefn ig = page.getDefinitions().getIgs().get(pack.getCategory()); String prefix = (ig == null || ig.isCore()) ? "" : ig.getCode()+File.separator; String intro = pack.getIntroduction() != null ? page.loadXmlNotesFromFile(pack.getIntroduction(), false, null, null, null, null, res == null ? wg("fhir") : res.getWg()) : null; String notes = pack.getNotes() != null ? page.loadXmlNotesFromFile(pack.getNotes(), false, null, null, null, null, res == null ? wg("fhir") : res.getWg()) : null; if (!("profile".equals(pack.metadata("navigation")) && pack.getProfiles().size() == 1)) { String src = TextFile.fileToString(page.getFolders().srcDir + "template-conformance-pack.html"); src = page.processConformancePackageIncludes(pack, src, intro, notes, resourceName, ig); if (st != null) src = insertSectionNumbers(src, st, pack.getId().toLowerCase() + ".html", 0, null); else if (ig != null && !ig.isCore()) src = addSectionNumbers(pack.getId() + ".html", pack.getId(), src, null, 1, null, ig); page.getHTMLChecker().registerFile(prefix+pack.getId().toLowerCase() + ".html", "Profile " + pack.getId(), HTMLLinkChecker.XHTML_TYPE, true); TextFile.stringToFile(src, page.getFolders().dstDir + prefix+pack.getId() + ".html"); } // now, we produce each profile for (ConstraintStructure profile : pack.getProfiles()) produceProfile(res, pack, profile, st, intro, notes, prefix, ig); for (SearchParameter sp : pack.getSearchParameters()) producePackSearchParameter(res, pack, sp, st, ig); for (Example ex : pack.getExamples()) { StructureDefinition sd = null; boolean ambiguous = false; for (ConstraintStructure sdt : pack.getProfiles()) { if (sdt.getResource().getSnapshot().getElement().get(0).getPath().equals(resourceName)) if (sd == null) sd = sdt.getResource(); else ambiguous = true; } if (ambiguous) processExample(ex, res, null, null, ig); else processExample(ex, res, sd, pack, ig); } // create examples here // if (examples != null) { // for (String en : examples.keySet()) { // processExample(examples.get(en), null, profile.getSource()); } private void producePackSearchParameter(ResourceDefn res, Profile pack, SearchParameter sp, SectionTracker st, ImplementationGuideDefn ig) throws Exception { String title = sp.getId(); sp.setUserData("pack", pack.getId()); String prefix = (ig == null || ig.isCore()) ? "" : ig.getCode()+File.separator; int level = (ig == null || ig.isCore()) ? 0 : 1; XmlParser comp = new XmlParser(); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + prefix+title + ".xml"); comp.setOutputStyle(OutputStyle.PRETTY).compose(s, sp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + prefix+title + ".canonical.xml"); comp.setOutputStyle(OutputStyle.CANONICAL).compose(s, sp); s.close(); JsonParser jcomp = new JsonParser(); s = new FileOutputStream(page.getFolders().dstDir + prefix+title + ".json"); jcomp.setOutputStyle(OutputStyle.PRETTY).compose(s, sp); s.close(); s = new FileOutputStream(page.getFolders().dstDir + prefix+title + ".canonical.json"); jcomp.setOutputStyle(OutputStyle.CANONICAL).compose(s, sp); s.close(); String src = TextFile.fileToString(page.getFolders().srcDir + "template-search-parameter.html"); src = page.processPageIncludes(sp.getId()+".html", src, "search-parameter:"+(res == null ? "na" : res.getName())+"/"+pack.getId()+"/"+sp.getId(), null, sp, null, "Search Parameter", ig, res, res == null ? wg("fhir"): res.getWg()); if (st != null) src = insertSectionNumbers(src, st, title + ".html", level, null); page.getHTMLChecker().registerFile(prefix+title + ".html", "SearchParameter " + sp.getName(), HTMLLinkChecker.XHTML_TYPE, true); TextFile.stringToFile(src, page.getFolders().dstDir + prefix+title + ".html"); cloneToXhtml(prefix+title, "Search Parameter "+sp.getName(), false, "searchparam-instance", "Search Parameter", res, res == null ? wg("fhir") : res.getWg()); String json = resource2Json(sp); json = "<div class=\"example\">\r\n<p>" + Utilities.escapeXml("SearchParameter " + sp.getName()) + "</p>\r\n<pre class=\"json\">\r\n" + Utilities.escapeXml(json)+ "\r\n</pre>\r\n</div>\r\n"; String html = TextFile.fileToString(page.getFolders().srcDir + "template-example-json.html").replace("<%example%>", json); html = page.processPageIncludes(title + ".json.html", html, "search-parameter:"+(res == null ? "wg" : res.getName())+"/"+pack.getId()+"/"+sp.getId(), null, sp, null, "Search Parameter", ig, res, res == null ? wg("fhir"): res.getWg()); TextFile.stringToFile(html, page.getFolders().dstDir + prefix+title + ".json.html"); page.getHTMLChecker().registerExternal(prefix+title + ".json.html"); } private void produceProfile(ResourceDefn resource, Profile pack, ConstraintStructure profile, SectionTracker st, String intro, String notes, String prefix, ImplementationGuideDefn ig) throws Exception { File tmp = Utilities.createTempFile("tmp", ".tmp"); String title = profile.getId(); int level = (ig == null || ig.isCore()) ? 0 : 1; // you have to validate a profile, because it has to be merged with it's // base resource to fill out all the missing bits // validateProfile(profile); ByteArrayOutputStream bs = new ByteArrayOutputStream(); XmlSpecGenerator gen = new XmlSpecGenerator(bs, title + "-definitions.html", "", page, ig.isCore() ? "" : "../"); gen.generate(profile.getResource()); gen.close(); String xml = new String(bs.toByteArray()); bs = new ByteArrayOutputStream(); JsonSpecGenerator genJ = new JsonSpecGenerator(bs, title + "-definitions.html", "", page, ig.isCore() ? "" : "../"); genJ.generate(profile.getResource()); genJ.close(); String json = new String(bs.toByteArray()); XmlParser comp = new XmlParser(); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + prefix +title + ".profile.xml"); comp.setOutputStyle(OutputStyle.PRETTY).compose(s, profile.getResource()); s.close(); Utilities.copyFile(new CSFile(page.getFolders().dstDir + prefix +title + ".profile.xml"), new CSFile(page.getFolders().dstDir + "examples" + File.separator + title+ ".profile.xml")); JsonParser jcomp = new JsonParser(); s = new FileOutputStream(page.getFolders().dstDir + prefix +title + ".profile.json"); jcomp.setOutputStyle(OutputStyle.PRETTY).compose(s, profile.getResource()); s.close(); // String shex = new ShExGenerator(page.getWorkerContext()).generate(HTMLLinkPolicy.NONE, profile.getResource()); // TextFile.stringToFile(shex, Utilities.changeFileExt(page.getFolders().dstDir + prefix +title + ".profile.shex", ".shex")); // shexToXhtml(prefix +title + ".profile", "ShEx statement for " + prefix +title, shex, "profile-instance:type:" + title, "Type"); TerminologyNotesGenerator tgen = new TerminologyNotesGenerator(new FileOutputStream(tmp), page); tgen.generate(level == 0 ? "" : "../", profile); tgen.close(); String tx = TextFile.fileToString(tmp.getAbsolutePath()); String src = TextFile.fileToString(page.getFolders().srcDir + "template-profile.html"); src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName())+"/"+pack.getId()+"/"+profile.getId(), intro, notes, ig, false, false); if (st != null) src = insertSectionNumbers(src, st, title + ".html", level, null); else if (ig != null && !ig.isCore()) { src = addSectionNumbers(title + ".html", title, src, null, 1, null, ig); st = page.getSectionTrackerCache().get(ig.getCode()+"::"+title); } page.getHTMLChecker().registerFile(prefix +title + ".html", "StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, false); TextFile.stringToFile(src, page.getFolders().dstDir + prefix +title + ".html"); new ProfileUtilities(page.getWorkerContext(), page.getValidationErrors(), page).generateSchematrons(new FileOutputStream(page.getFolders().dstDir + prefix +title + ".sch"), profile.getResource()); if (pack.getExamples().size() > 0) { src = TextFile.fileToString(page.getFolders().srcDir + "template-profile-examples.html"); src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName())+"/"+pack.getId()+"/"+profile.getId(), intro, notes, ig, false, false); page.getHTMLChecker().registerFile(prefix+title + "-examples.html", "Examples for StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, true); TextFile.stringToFile(src, page.getFolders().dstDir + prefix +title + "-examples.html"); } src = TextFile.fileToString(page.getFolders().srcDir + "template-profile-definitions.html"); src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName())+"/"+pack.getId()+"/"+profile.getId(), intro, notes, ig, false, false); if (st != null) src = insertSectionNumbers(src, st, title + "-definitions.html", level, null); page.getHTMLChecker().registerFile(prefix +title + "-definitions.html", "Definitions for StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, true); TextFile.stringToFile(src, page.getFolders().dstDir + prefix +title + "-definitions.html"); src = TextFile.fileToString(page.getFolders().srcDir + "template-profile-mappings.html"); src = page.processProfileIncludes(profile.getId(), profile.getId(), pack, profile, xml, json, tx, src, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName())+"/"+pack.getId()+"/"+profile.getId(), intro, notes, ig, false, false); if (st != null) src = insertSectionNumbers(src, st, title + "-mappings.html", level, null); page.getHTMLChecker().registerFile(prefix +title + "-mappings.html", "Mappings for StructureDefinition " + profile.getResource().getName(), HTMLLinkChecker.XHTML_TYPE, true); TextFile.stringToFile(src, page.getFolders().dstDir + prefix +title + "-mappings.html"); try { processQuestionnaire(resource, profile.getResource(), st, false, prefix, ig); } catch (Exception e) { e.printStackTrace(); page.log("Questionnaire Generation Failed: "+e.getMessage(), LogMessageType.Error); } new ReviewSpreadsheetGenerator().generate(page.getFolders().dstDir +prefix+ Utilities.changeFileExt((String) profile.getResource().getUserData("filename"), "-review.xls"), "Health Level Seven International", page.getGenDate(), profile.getResource(), page); // // src = Utilities.fileToString(page.getFolders().srcDir + // "template-print.html").replace("<body>", // "<body style=\"margin: 20px\">"); // src = processResourceIncludes(n, root, xml, tx, dict, src); // Utilities.stringToFile(src, page.getFolders().dstDir + // "print-"+n+".html"); // Utilities.copyFile(umlf, new // File(page.getFolders().dstDir+n+".png")); // src = Utilities.fileToString(page.getFolders().srcDir + // "template-book.html").replace("<body>", // "<body style=\"margin: 10px\">"); // src = processResourceIncludes(n, root, xml, tx, dict, src); // cachePage(n+".html", src); // // xml to xhtml of xml // first pass is to strip the xsi: stuff. seems to need double // processing in order to delete namespace crap DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); DocumentBuilder builder = factory.newDocumentBuilder(); Document xdoc = builder.parse(new CSFileInputStream(page.getFolders().dstDir + prefix +title + ".profile.xml")); XmlGenerator xmlgen = new XmlGenerator(); xmlgen.generate(xdoc.getDocumentElement(), tmp, "http://hl7.org/fhir", xdoc.getDocumentElement().getLocalName()); // reload it now builder = factory.newDocumentBuilder(); xdoc = builder.parse(new CSFileInputStream(tmp.getAbsolutePath())); XhtmlGenerator xhtml = new XhtmlGenerator(new ExampleAdorner(page.getDefinitions(), page.genlevel(level))); ByteArrayOutputStream b = new ByteArrayOutputStream(); xhtml.generate(xdoc, b, "StructureDefinition", profile.getTitle(), 0, true, title + ".profile.xml.html"); String html = TextFile.fileToString(page.getFolders().srcDir + "template-profile-example-xml.html").replace("<%example%>", b.toString()); html = page.processProfileIncludes(title + ".profile.xml.html", profile.getId(), pack, profile, "", "", "", html, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName())+"/"+pack.getId()+"/"+profile.getId(), intro, notes, ig, false, hasNarrative(xdoc)); TextFile.stringToFile(html, page.getFolders().dstDir + prefix +title + ".profile.xml.html"); page.getHTMLChecker().registerFile(prefix +title + ".profile.xml.html", "StructureDefinition", HTMLLinkChecker.XHTML_TYPE, false); String n = prefix +title + ".profile"; json = resource2Json(profile.getResource()); json = "<div class=\"example\">\r\n<p>" + Utilities.escapeXml("StructureDefinition for " + profile.getResource().getDescription()) + "</p>\r\n<p><a href=\""+title+".profile.json\">Raw JSON</a></p>\r\n<pre class=\"json\">\r\n" + Utilities.escapeXml(json)+ "\r\n</pre>\r\n</div>\r\n"; html = TextFile.fileToString(page.getFolders().srcDir + "template-profile-example-json.html").replace("<%example%>", json); html = page.processProfileIncludes(title + ".profile.json.html", profile.getId(), pack, profile, "", "", "", html, title + ".html", (resource == null ? profile.getResource().getType() : resource.getName())+"/"+pack.getId()+"/"+profile.getId(), intro, notes, ig, false, false); TextFile.stringToFile(html, page.getFolders().dstDir + prefix +title + ".profile.json.html"); // page.getEpub().registerFile(n + ".json.html", description, EPubManager.XHTML_TYPE); page.getHTMLChecker().registerExternal(n + ".json.html"); tmp.delete(); } // private void validateProfile(ProfileDefn profile) throws FileNotFoundException, Exception { // for (ResourceDefn c : profile.getResources()) { // StructureDefinition resource = loadResourceProfile(c.getName()); // ProfileValidator v = new ProfileValidator(); // v.setCandidate(c); // v.setProfile(resource); // v.setTypes(typeFeed); // List<String> errors = v.evaluate(); // if (errors.size() > 0) // throw new Exception("Error validating " + profile.metadata("name") + ": " + errors.toString()); // } // } // private void produceFutureReference(String n) throws Exception { // ElementDefn e = new ElementDefn(); // e.setName(page.getIni().getStringProperty("future-resources", n)); // } /* private StructureDefinition loadResourceProfile(String name) throws FileNotFoundException, Exception { XmlParser xml = new XmlParser(); try { return (StructureDefinition) xml.parse(new CSFileInputStream(page.getFolders().dstDir + name.toLowerCase() + ".profile.xml")); } catch (Exception e) { throw new Exception("error parsing " + name, e); } } */ // private void produceIgPage(String source, String file, String logicalName, ImplementationGuideDefn ig) throws Exception { // String src = TextFile.fileToString(source); // src = page.processPageIncludes(file, src, "page", null, null, null, logicalName, null); // // before we save this page out, we're going to figure out what it's index // // is, and number the headers if we can // // if (Utilities.noString(logicalName)) // logicalName = Utilities.fileTitle(file); // // TextFile.stringToFile(src, page.getFolders().dstDir + file); // src = addSectionNumbers(file, logicalName, src, null, 0, null, ig); // // TextFile.stringToFile(src, page.getFolders().dstDir + file); // // src = TextFile.fileToString(source).replace("<body>", "<body style=\"margin: 10px\">"); // src = page.processPageIncludesForBook(file, src, "page", null, null); // cachePage(file, src, logicalName); // } // private void producePage(String file, String logicalName) throws Exception { String src = TextFile.fileToString(page.getFolders().srcDir + file); src = page.processPageIncludes(file, src, "page", null, null, null, logicalName, null, null, null); // before we save this page out, we're going to figure out what it's index // is, and number the headers if we can if (Utilities.noString(logicalName)) logicalName = Utilities.fileTitle(file); TextFile.stringToFile(src, page.getFolders().dstDir + file); DocumentHolder doch = new DocumentHolder(); src = addSectionNumbers(file, logicalName, src, null, 0, doch, null); if (!page.getDefinitions().getStructuralPages().contains(file)) { XhtmlNode fmm = findId(doch.doc, "fmm"); XhtmlNode wg = findId(doch.doc, "wg"); if (fmm == null) page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.BUSINESSRULE, -1, -1, file, "Page has no fmm level", IssueSeverity.ERROR)); else page.getDefinitions().page(file).setFmm(get2ndPart(fmm.allText())); if (wg == null) page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.BUSINESSRULE, -1, -1, file, "Page has no workgroup", IssueSeverity.ERROR)); else page.getDefinitions().page(file).setWg(wg.getChildNodes().get(0).allText()); } TextFile.stringToFile(src, page.getFolders().dstDir + file); src = TextFile.fileToString(page.getFolders().srcDir + file).replace("<body>", "<body style=\"margin: 10px\">"); src = page.processPageIncludesForBook(file, src, "page", null, null, null); cachePage(file, src, logicalName, true); } private String get2ndPart(String t) { return t.substring(t.indexOf(":")+1).trim(); } private void produceIgPage(ImplementationGuideDefn ig, ImplementationGuideDefinitionPageComponent p) throws Exception { String actualName = Utilities.path(page.getFolders().rootDir, Utilities.getDirectoryForFile(ig.getSource()), p.getNameUrlType().getValue()); String logicalName = Utilities.fileTitle(actualName); String src; if (IgParser.getKind(p) == GuidePageKind.TOC) src = TextFile.fileToString(Utilities.path(page.getFolders().srcDir, "template-ig-toc.html")); else throw new Exception("Unsupported special page kind "+IgParser.getKind(p).toCode()); String file = ig.getCode()+File.separator+logicalName +".html"; src = page.processPageIncludes(file, src, "page", null, null, null, logicalName, ig, null, null); // before we save this page out, we're going to figure out what it's index // is, and number the headers if we can src = addSectionNumbers(file, logicalName, src, null, 1, null, ig); TextFile.stringToFile(src, Utilities.path(page.getFolders().dstDir, file)); src = TextFile.fileToString(Utilities.path(page.getFolders().dstDir, file)).replace("<body>", "<body style=\"margin: 10px\">"); src = page.processPageIncludesForBook(file, src, "page", null, ig, null); cachePage(file, src, logicalName, true); } private void produceIgPage(String file, ImplementationGuideDefn ig) throws Exception { String actualName = Utilities.path(page.getFolders().rootDir, Utilities.getDirectoryForFile(ig.getSource()), file); String logicalName = Utilities.fileTitle(actualName); String src = TextFile.fileToString(actualName); file = ig.getCode()+File.separator+logicalName +".html"; src = page.processPageIncludes(file, src, "page", null, null, null, logicalName, ig, null, null); // before we save this page out, we're going to figure out what it's index // is, and number the headers if we can TextFile.stringToFile(src, page.getFolders().dstDir + file); src = addSectionNumbers(file, logicalName, src, null, 1, null, ig); TextFile.stringToFile(src, page.getFolders().dstDir + file); src = TextFile.fileToString(actualName).replace("<body>", "<body style=\"margin: 10px\">"); src = page.processPageIncludesForBook(file, src, "page", null, ig, null); cachePage(file, src, logicalName, true); } private void produceIgPage(String file, ImplementationGuideDefn ig, String logicalName) throws Exception { String srcOrig = TextFile.fileToString(page.getFolders().srcDir + file); file = file.substring(3); String src = page.processPageIncludes(file, srcOrig, "page", null, null, null, logicalName, ig, null, null); // before we save this page out, we're going to figure out what it's index // is, and number the headers if we can if (Utilities.noString(logicalName)) logicalName = Utilities.fileTitle(file); TextFile.stringToFile(src, Utilities.path(page.getFolders().dstDir, ig.getCode(), file)); DocumentHolder doch = new DocumentHolder(); src = addSectionNumbers(file, logicalName, src, null, 0, doch, ig); // if (!page.getDefinitions().getStructuralPages().contains(file)) { // XhtmlNode fmm = findId(doch.doc, "fmm"); // XhtmlNode wg = findId(doch.doc, "wg"); // if (fmm == null) // page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.BUSINESSRULE, -1, -1, file, "Page has no fmm level", IssueSeverity.ERROR)); // else // page.getDefinitions().page(file).setFmm(get2ndPart(fmm.allText())); // if (wg == null) // page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.BUSINESSRULE, -1, -1, file, "Page has no workgroup", IssueSeverity.ERROR)); // else // page.getDefinitions().page(file).setWg(wg.getChildNodes().get(0).allText()); // } // // TextFile.stringToFile(src, page.getFolders().dstDir + file); src = srcOrig.replace("<body>", "<body style=\"margin: 10px\">"); src = page.processPageIncludesForBook(file, src, "page", null, ig, null); cachePage(ig.getCode()+File.separator+file, src, logicalName, true); } private void produceLogicalModel(LogicalModel lm, ImplementationGuideDefn ig) throws Exception { String n = lm.getId(); Map<String, String> examples = new HashMap<String, String>(); File tmp = Utilities.createTempFile("tmp", ".tmp"); TerminologyNotesGenerator tgen = new TerminologyNotesGenerator(new FileOutputStream(tmp), page); if (lm.hasResource()) tgen.generate("", lm.getResource().getRoot()); else tgen.generate("", lm.getDefinition()); tgen.close(); String tx = TextFile.fileToString(tmp.getAbsolutePath()); DictHTMLGenerator dgen = new DictHTMLGenerator(new FileOutputStream(tmp), page, ""); if (lm.hasResource()) dgen.generate(lm.getResource().getRoot()); else dgen.generate(lm.getDefinition()); dgen.close(); String dict = TextFile.fileToString(tmp.getAbsolutePath()); MappingsGenerator mgen = new MappingsGenerator(page.getDefinitions()); if (lm.hasResource()) mgen.generate(lm.getResource()); else mgen.generate(lm.getDefinition()); String mappings = mgen.getMappings(); String mappingsList = mgen.getMappingsList(); SvgGenerator svg = new SvgGenerator(page, "", lm.getLayout(), true, false); String fn = ig.getPrefix()+n; if (lm.hasResource()) svg.generate(lm.getResource(), page.getFolders().dstDir + fn+".svg", "2"); else svg.generate(lm.getDefinition(), page.getFolders().dstDir + fn+".svg", "2"); String prefix = page.getBreadCrumbManager().getIndexPrefixForReference(lm.getId()+".html"); SectionTracker st = new SectionTracker(prefix, true); st.start(""); page.getSectionTrackerCache().put(fn, st); if (lm.getDefinition() != null) { new XmlParser().setOutputStyle(OutputStyle.PRETTY).compose(new FileOutputStream(Utilities.path(page.getFolders().dstDir, ig.getPrefix(), n+".xml")), lm.getDefinition()); cloneToXhtml(ig.getPrefix()+n, "Logical Model "+lm.getDefinition().getName(), true, "logical-model", lm.getDefinition().getName(), null, lm.getWg()); new JsonParser().setOutputStyle(OutputStyle.PRETTY).compose(new FileOutputStream(Utilities.path(page.getFolders().dstDir, ig.getPrefix(), n+".json")), lm.getDefinition()); jsonToXhtml(ig.getPrefix()+n, "Logical Model "+lm.getDefinition().getName(), new JsonParser().setOutputStyle(OutputStyle.PRETTY).composeString(lm.getDefinition()), "logical-model", lm.getDefinition().getName(), null, lm.getWg()); ttlToXhtml(ig.getPrefix()+n, "Logical Model "+lm.getDefinition().getName(), new RdfParser().setOutputStyle(OutputStyle.PRETTY).composeString(lm.getDefinition()), "logical-model", lm.getDefinition().getName(), null, lm.getWg()); } if (lm.getWg() != null && lm.getResource().getWg() == null) lm.getResource().setWg(lm.getWg()); String template = "template-logical"; String src = TextFile.fileToString(page.getFolders().srcDir + template+".html"); Map<String, String> values = new HashMap<String, String>(); if (lm.hasResource()) src = insertSectionNumbers(page.processResourceIncludes(n, lm.getResource(), "", "", "", tx, dict, src, mappings, mappingsList, "resource", n + ".html", ig, values, lm.getWg(), examples), st, n + ".html", ig.getLevel(), null); else src = insertSectionNumbers(new LogicalModelProcessor(n, page, ig, lm.getDefinition().getId(), "logical-model", n+".html", lm.getDefinition(), tx, dict, examples, ig.getLogicalModels(), page.getDefinitions()).process(src), st, n + ".html", ig.getLevel(), null); TextFile.stringToFile(src, page.getFolders().dstDir + fn+".html"); page.getHTMLChecker().registerFile(fn+".html", "Base Page for " + n, HTMLLinkChecker.XHTML_TYPE, true); src = TextFile.fileToString(page.getFolders().srcDir + "template-logical-definitions.html"); if (lm.hasResource()) TextFile.stringToFile(insertSectionNumbers(page.processResourceIncludes(n, lm.getResource(), "", "", "", tx, dict, src, mappings, mappingsList, "res-Detailed Descriptions", n + "-definitions.html", ig, values, lm.getWg(), examples), st, n + "-definitions.html", ig.getLevel(), null), page.getFolders().dstDir + fn+"-definitions.html"); else TextFile.stringToFile(insertSectionNumbers(new LogicalModelProcessor(n, page, ig, lm.getDefinition().getId(), "logical-model", n+".html", lm.getDefinition(), tx, dict, examples, ig.getLogicalModels(), page.getDefinitions()).process(src), st, n + "-definitions.html", ig.getLevel(), null), page.getFolders().dstDir + fn+"-definitions.html"); page.getHTMLChecker().registerFile(fn+"-definitions.html", "Detailed Descriptions for " + (lm.hasResource() ? lm.getResource().getName() : lm.getDefinition().getName()), HTMLLinkChecker.XHTML_TYPE, true); src = TextFile.fileToString(page.getFolders().srcDir + "template-logical-examples.html"); if (lm.hasResource()) TextFile.stringToFile(insertSectionNumbers(page.processResourceIncludes(n, lm.getResource(), "", "", "", tx, dict, src, mappings, mappingsList, "resource", n + ".html", ig, values, lm.getWg(), examples), st, n + ".html", ig.getLevel(), null), page.getFolders().dstDir + fn+"-implementations.html"); else TextFile.stringToFile(insertSectionNumbers(new LogicalModelProcessor(n, page, ig, lm.getDefinition().getId(), "logical-model", n+".html", lm.getDefinition(), tx, dict, examples, ig.getLogicalModels(), page.getDefinitions()).process(src), st, n + "-implementations.html", ig.getLevel(), null), page.getFolders().dstDir + fn+"-implementations.html"); page.getHTMLChecker().registerFile(fn+"-implementations.html", "Implementations for " + (lm.hasResource() ? lm.getResource().getName() : lm.getDefinition().getName()), HTMLLinkChecker.XHTML_TYPE, true); src = TextFile.fileToString(page.getFolders().srcDir + "template-logical-mappings.html"); if (lm.hasResource()) TextFile.stringToFile( insertSectionNumbers(page.processResourceIncludes(n, lm.getResource(), "", "", "", tx, dict, src, mappings, mappingsList, "res-Mappings", n + "-mappings.html", ig, values, lm.getWg(), examples), st, n + "-mappings.html", ig.getLevel(), null), page.getFolders().dstDir + fn + "-mappings.html"); else TextFile.stringToFile(insertSectionNumbers(new LogicalModelProcessor(n, page, ig, lm.getDefinition().getId(), "logical-model", n+".html", lm.getDefinition(), tx, dict, examples, ig.getLogicalModels(), page.getDefinitions()).process(src), st, n + "-mappings.html", ig.getLevel(), null), page.getFolders().dstDir + fn + "-mappings.html"); page.getHTMLChecker().registerFile(fn+"-mappings.html", "Formal Mappings for " + n, HTMLLinkChecker.XHTML_TYPE, true); src = TextFile.fileToString(page.getFolders().srcDir + "template-logical-analysis.html"); if (lm.hasResource()) TextFile.stringToFile( insertSectionNumbers(page.processResourceIncludes(n, lm.getResource(), "", "", "", tx, dict, src, mappings, mappingsList, "res-Analysis", n + "-analysis.html", ig, values, lm.getWg(), examples), st, n + "-analysis.html", ig.getLevel(), null), page.getFolders().dstDir + fn + "-analysis.html"); else TextFile.stringToFile(insertSectionNumbers(new LogicalModelProcessor(n, page, ig, lm.getDefinition().getId(), "logical-model", n+".html", lm.getDefinition(), tx, dict, examples, ig.getLogicalModels(), page.getDefinitions()).process(src), st, n + "-analysis.html", ig.getLevel(), null), page.getFolders().dstDir + fn + "-analysis.html"); page.getHTMLChecker().registerFile(fn+"-analysis.html", "Analysis for " + n, HTMLLinkChecker.XHTML_TYPE, true); tmp.delete(); } private void produceDictionary(Dictionary d) throws Exception { if (web) return; String src = TextFile.fileToString(page.getFolders().srcDir + "template-dictionary.html"); String file = d.getSource(); String prefix = d.getIg() != null ? d.getIg().getCode()+File.separator : ""; String filename = prefix+d.getId(); XmlParser xml = new XmlParser(); Bundle dict = (Bundle) xml.parse(new CSFileInputStream(file)); src = page.processPageIncludes(filename+".html", src, "page", null, dict, null, "Dictionary", null, null, null); // before we save this page out, we're going to figure out what it's index // is, and number the headers if we can TextFile.stringToFile(src, page.getFolders().dstDir + filename+".html"); src = addSectionNumbers(filename+".html", filename, src, null, d.getIg() != null ? 1 : 0, null, d.getIg()); TextFile.stringToFile(src, page.getFolders().dstDir + filename+".html"); src = TextFile.fileToString(page.getFolders().srcDir + "template-dictionary.html").replace("<body>", "<body style=\"margin: 10px\">"); src = page.processPageIncludesForBook(filename+".html", src, "page", dict, null, null); cachePage(filename+".html", src, d.getId(), true); xml.setOutputStyle(OutputStyle.PRETTY); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + filename+".xml"); xml.compose(s, dict); s.close(); xml.setOutputStyle(OutputStyle.CANONICAL); s = new FileOutputStream(page.getFolders().dstDir + filename+".canonical.xml"); xml.compose(s, dict); s.close(); cloneToXhtml(filename, "Source for Dictionary" + d.getName(), false, "dict-instance", "Dictionary", null, null); IParser json = new JsonParser().setOutputStyle(OutputStyle.PRETTY); s = new FileOutputStream(page.getFolders().dstDir+filename+ ".json"); json.compose(s, dict); s.close(); json = new JsonParser().setOutputStyle(OutputStyle.CANONICAL); s = new FileOutputStream(page.getFolders().dstDir+filename+ ".canonical.json"); json.compose(s, dict); s.close(); jsonToXhtml(filename, "Source for Dictionary" + d.getName(), resource2Json(dict), "dict-instance", "Dictionary", null, null); ttlToXhtml(filename, "Source for Dictionary" + d.getName(), resource2Ttl(dict), "dict-instance", "Dictionary", null, null); throw new Error("must be redone"); // for (BundleEntryComponent e : dict.getEntry()) { // produceDictionaryProfile(d, file, filename, (DataElement) e.getResource(), d.getIg()); // } } // private void produceDictionaryProfile(Dictionary d, String srcbase, String destbase, DataElement de, ImplementationGuideDefn ig) throws Exception { // // first, sort out identifiers // String template = TextFile.fileToString(Utilities.changeFileExt(srcbase, "-profile.xml")); // String file = Utilities.changeFileExt(destbase, "-"+de.getId()); // // // second, generate the profile. // Map<String, String> variables = new HashMap<String, String>(); // variables.put("de_id", de.getId()); // variables.put("de_name", de.getName()); // variables.put("de_definition", Utilities.noString(de.getElement().get(0).getDefinition()) ? "??" : de.getElement().get(0).getDefinition()); // variables.put("de_code0_code", de.getElement().get(0).getCode().get(0).getCode()); // Type ucc = ToolingExtensions.getAllowedUnits(de.getElement().get(0)); // if (ucc instanceof CodeableConcept) // variables.put("de_units_code0_code", ((CodeableConcept) ucc).getCoding().get(0).getCode()); // else // variables.put("de_units_code0_code", ""); // String profile = processTemplate(template, variables); // XmlParser xml = new XmlParser(); // StructureDefinition p = (StructureDefinition) xml.parse(new ByteArrayInputStream(profile.getBytes())); // StructureDefinition base = page.getProfiles().get(p.getBaseDefinition()); // if (base == null) // throw new Exception("Unable to find base profile for "+d.getId()+": "+p.getBaseDefinition()+" from "+page.getProfiles().keySet()); // new ProfileUtilities(page.getWorkerContext(), page.getValidationErrors(), page).generateSnapshot(base, p, p.getBaseDefinition(), p.getId()); // ConstraintStructure pd = new ConstraintStructure(p, page.getDefinitions().getUsageIG("hspc", "special HSPC generation"), null, "0", true); // todo // pd.setId(p.getId()); // pd.setTitle(p.getName()); // Profile pack = new Profile("hspc"); // pack.forceMetadata("date", p.getDateElement().asStringValue()); // p.setUserData("filename", file ); // // ByteArrayOutputStream bs = new ByteArrayOutputStream(); // XmlSpecGenerator gen = new XmlSpecGenerator(bs, null, "http://hl7.org/fhir/", page, ""); // gen.generate(p); // gen.close(); // String xmls = new String(bs.toByteArray()); // bs = new ByteArrayOutputStream(); // JsonSpecGenerator genJ = new JsonSpecGenerator(bs, null, "http://hl7.org/fhir/", page, ""); // // genJ.generate(profile.getResource()); // genJ.close(); // String jsons = new String(bs.toByteArray()); // // String tx = ""; //todo // // String src = TextFile.fileToString(page.getFolders().srcDir + "template-profile.html"); // src = page.processProfileIncludes(p.getId(), p.getId(), pack, pd, xmls, jsons, tx, src, file + ".html", "??/??/??", "", "", ig, true, false); // resourceName+"/"+pack.getId()+"/"+profile.getId()); // page.getHTMLChecker().registerFile(file + ".html", "StructureDefinition " + p.getName(), HTMLLinkChecker.XHTML_TYPE, true); // TextFile.stringToFile(src, page.getFolders().dstDir + file + ".html"); // // src = TextFile.fileToString(page.getFolders().srcDir + "template-profile-mappings.html"); // src = page.processProfileIncludes(p.getId(), p.getId(), pack, pd, xmls, jsons, tx, src, file + ".html", "??/??/??", "", "", ig, true, false); // page.getHTMLChecker().registerFile(file + "-mappings.html", "Mappings for StructureDefinition " + p.getName(), HTMLLinkChecker.XHTML_TYPE, true); // TextFile.stringToFile(src, page.getFolders().dstDir + file + "-mappings.html"); // // src = TextFile.fileToString(page.getFolders().srcDir + "template-profile-definitions.html"); // src = page.processProfileIncludes(p.getId(), p.getId(), pack, pd, xmls, jsons, tx, src, file + ".html", "??/??/??", "", "", ig, true, false); // page.getHTMLChecker().registerFile(file + "-definitions.html", "Definitions for StructureDefinition " + p.getName(), HTMLLinkChecker.XHTML_TYPE, true); // TextFile.stringToFile(src, page.getFolders().dstDir + file + "-definitions.html"); // // // now, save the profile and generate equivalents // xml.setOutputStyle(OutputStyle.PRETTY); // FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + file+".profile.xml"); // xml.compose(s, p); // s.close(); // xml.setOutputStyle(OutputStyle.CANONICAL); // s = new FileOutputStream(page.getFolders().dstDir + file+".profile.canonical.xml"); // xml.compose(s, p); // s.close(); // cloneToXhtml(file+".profile", "Source for Dictionary" + page.getDefinitions().getDictionaries().get(file), false, "dict-instance", "Profile", null, null); // IParser json = new JsonParser().setOutputStyle(OutputStyle.PRETTY); // s = new FileOutputStream(page.getFolders().dstDir+file+ ".profile.json"); // json.compose(s, p); // s.close(); // json = new JsonParser().setOutputStyle(OutputStyle.CANONICAL); // s = new FileOutputStream(page.getFolders().dstDir+file+ ".profile.canonical.json"); // json.compose(s, p); // s.close(); // jsonToXhtml(file+".profile", "Source for Dictionary based StructureDefinition" + page.getDefinitions().getDictionaries().get(file), resource2Json(p), "dict-instance", "Profile", null, null); // new ReviewSpreadsheetGenerator().generate(page.getFolders().dstDir + file+ "-review.xls", "Health Level Seven International", page.getGenDate(), p, page); // } private String processTemplate(String template, Map<String, String> variables) { ST st = new ST(template, '$', '$'); for (String var : variables.keySet()) st.add(var, variables.get(var)); return st.render(); } private void produceSid(int i, String logicalName, String file) throws Exception { String src = TextFile.fileToString(page.getFolders().srcDir + file); String dstName = Utilities.path(page.getFolders().dstDir, "sid", logicalName, "index.html"); src = page.processPageIncludes(dstName, src, "sid:" + logicalName, null, null, null, "Sid", null, null, null); // before we save this page out, we're going to figure out what it's index // is, and number the headers if we can Utilities.createDirectory(Utilities.path(page.getFolders().dstDir, "sid", logicalName)); TextFile.stringToFile(src, dstName); src = addSectionNumbers(Utilities.path("sid", logicalName, "index.html"), "sid:terminologies-systems", src, "3." + Integer.toString(i), 0, null, null); TextFile.stringToFile(src, dstName); page.getHTMLChecker().registerFile(Utilities.path("sid", logicalName, "index.html"), logicalName, HTMLLinkChecker.XHTML_TYPE, true); } @Override public String addSectionNumbers(String file, String logicalName, String src, String id, int level, DocumentHolder doch, ImplementationGuideDefn ig) throws Exception { if (ig != null) logicalName = ig.getCode()+"::"+logicalName; if (!page.getSectionTrackerCache().containsKey(logicalName)) { // String prefix = // page.getNavigation().getIndexPrefixForFile(logicalName+".html"); String prefix; if (ig != null) prefix = ig.getIndexPrefixForFile(file, logicalName + ".html"); else prefix = page.getBreadCrumbManager().getIndexPrefixForFile(logicalName + ".html"); if (Utilities.noString(prefix)) throw new Exception("No indexing home for logical place " + logicalName); page.getSectionTrackerCache().put(logicalName, new SectionTracker(prefix, ig != null)); } SectionTracker st = page.getSectionTrackerCache().get(logicalName); st.start(id); src = insertSectionNumbers(src, st, file, level, doch); return src; } private void produceCompartment(Compartment c) throws Exception { String logicalName = "compartmentdefinition-" + c.getName().toLowerCase(); String file = logicalName + ".html"; String src = TextFile.fileToString(page.getFolders().srcDir + "template-compartment.html"); src = page.processPageIncludes(file, src, "resource-instance:CompartmentDefinition", null, null, null, "Compartment", null, null, wg("fhir")); // String prefix = ""; // if // (!page.getSectionTrackerCache().containsKey("compartmentdefinition-"+c.getName())) // { // prefix = page.getNavigation().getIndexPrefixForFile(logicalName+".html"); // if (Utilities.noString(prefix)) // throw new Exception("No indexing home for logical place "+logicalName); // } // page.getSectionTrackerCache().put(logicalName, new // SectionTracker(prefix)); // TextFile.stringToFile(src, page.getFolders().dstDir + file); // src = insertSectionNumbers(src, // page.getSectionTrackerCache().get(logicalName), file); TextFile.stringToFile(src, page.getFolders().dstDir + file); src = TextFile.fileToString(page.getFolders().srcDir + "template-compartment.html").replace("<body>", "<body style=\"margin: 10px\">"); src = page.processPageIncludesForBook(file, src, "compartment", null, null, null); cachePage(file, src, "Compartments", true); } private String insertSectionNumbers(String src, SectionTracker st, String link, int level, DocumentHolder doch) throws Exception { try { // TextFile.stringToFile(src, "c:\\temp\\text.html"); XhtmlDocument doc = new XhtmlParser().parse(src, "html"); insertSectionNumbersInNode(doc, st, link, level, new BooleanHolder(), null); if (doch != null) doch.doc = doc; return new XhtmlComposer(XhtmlComposer.HTML).compose(doc); } catch (Exception e) { System.out.println(e.getMessage()); //TextFile.stringToFile(src, "c:\\temp\\dump.html"); TextFile.stringToFile(src, Utilities.appendSlash(System.getProperty("user.dir")) + "fhir-error-dump.html"); throw new Exception("Exception inserting section numbers in " + link + ": " + e.getMessage(), e); } } private XhtmlNode findId(XhtmlNode node, String id) { if (id.equals(node.getAttribute("id"))) return node; for (XhtmlNode n : node.getChildNodes()) { XhtmlNode xn = findId(n, id); if (xn != null) return xn; } return null; } private class BooleanHolder { private boolean value; } private void insertSectionNumbersInNode(XhtmlNode node, SectionTracker st, String link, int level, BooleanHolder registered, XhtmlNode parent) throws Exception { // while we're looking, mark external references explicitly if (node.getNodeType() == NodeType.Element && node.getName().equals("a") && node.getAttribute("href") != null && node.getAttribute("no-external") == null && (node.getAttribute("href").startsWith("http:") || node.getAttribute("href").startsWith("https:"))) { node.addText(" "); XhtmlNode img = node.addTag("img"); String s = "external.png"; for (int i = 0; i < level; i++) s = "../"+s; img.attribute("src", s); img.attribute("style", "vertical-align: baseline"); } if (node.getNodeType() == NodeType.Element && (node.getName().equals("h1") || node.getName().equals("h2") || node.getName().equals("h3") || node.getName().equals("h4") || node.getName().equals("h5") || node.getName().equals("h6"))) { String v = st.getIndex(Integer.parseInt(node.getName().substring(1))); String sv = v; if (!st.isIg() && !registered.value) { TocEntry t = new TocEntry(v, node.allText(), link, st.isIg()); if (t.getText() == null) t.setText("(No Title?)"); if (!page.getToc().containsKey(v)) { // throw new Exception("Duplicate TOC Entry "+v); page.getToc().put(v, t); registered.value = true; } // else // System.out.println("-- duplicate TOC --> "+v+" = "+t.getLink()+" ("+t.getText()+") in place of "+page.getToc().get(v).getLink()+" ("+page.getToc().get(v).getText()+")"); } else if (parent != null) sv = findSemanticLink(parent, node, sv); node.addText(0, " "); XhtmlNode span = node.addTag(0, "span"); span.setAttribute("class", "sectioncount"); span.addText(v); if (sv.equals(v)) { XhtmlNode a = span.addTag("a"); a.setAttribute("name", v); a.addText(" "); // bug in some browsers? } node.addText(" "); XhtmlNode a = node.addTag("a"); if (node.hasAttribute("class")) throw new Error("test"); else node.setAttribute("class", "self-link-parent"); a.setAttribute("href", (link.contains(File.separator) ? link.substring(link.lastIndexOf(File.separator)+1) : link) +"#"+sv); a.setAttribute("title", "link to here"); a.setAttribute("class", "self-link"); XhtmlNode svg = a.addTag("svg"); XhtmlNode path = svg.addTag("path"); String pathData = "M1520 1216q0-40-28-68l-208-208q-28-28-68-28-42 0-72 32 3 3 19 18.5t21.5 21.5 15 19 13 25.5 3.5 27.5q0 40-28 68t-68 28q-15 0-27.5-3.5t-25.5-13-19-15-21.5-21.5-18.5-19q-33 31-33 73 0 40 28 68l206 207q27 27 68 27 40 0 68-26l147-146q28-28 28-67zm-703-705q0-40-28-68l-206-207q-28-28-68-28-39 0-68 27l-147 146q-28 28-28 67 0 40 28 68l208 208q27 27 68 27 42 0 72-31-3-3-19-18.5t-21.5-21.5-15-19-13-25.5-3.5-27.5q0-40 28-68t68-28q15 0 27.5 3.5t25.5 13 19 15 21.5 21.5 18.5 19q33-31 33-73zm895 705q0 120-85 203l-147 146q-83 83-203 83-121 0-204-85l-206-207q-83-83-83-203 0-123 88-209l-88-88q-86 88-208 88-120 0-204-84l-208-208q-84-84-84-204t85-203l147-146q83-83 203-83 121 0 204 85l206 207q83 83 83 203 0 123-88 209l88 88q86-88 208-88 120 0 204 84l208 208q84 84 84 204z"; svg.attribute("height", "20").attribute("width", "20").attribute("viewBox", "0 0 1792 1792").attribute("class", "self-link"); path.attribute("d", pathData).attribute("fill", "navy"); } if (node.getNodeType() == NodeType.Document || (node.getNodeType() == NodeType.Element && !(node.getName().equals("div") && "sidebar".equals(node.getAttribute("class"))))) { for (XhtmlNode n : node.getChildNodes()) { insertSectionNumbersInNode(n, st, link, level, registered, node); } } } private String findSemanticLink(XhtmlNode parent, XhtmlNode child, String def) { int i = parent.getChildNodes().indexOf(child) - 1; while (i >= 0) { XhtmlNode f = parent.getChildNodes().get(i); if (f.getNodeType() == NodeType.Text) { if (!Utilities.isWhitespace(f.getContent())) break; } else if (f.getNodeType() == NodeType.Element) { if (f.getName().equals("a") && f.hasAttribute("name")) { return f.getAttribute("name"); } else break; } i--; } return def; } private void cachePage(String filename, String source, String title, boolean includeInBook) throws Exception { try { // page.log("parse "+filename); XhtmlDocument src = new XhtmlParser().parse(source, "html"); scanForFragments(filename, src); // book.getPages().put(filename, src); page.getHTMLChecker().registerFile(filename, title, HTMLLinkChecker.XHTML_TYPE, includeInBook); } catch (Exception e) { throw new Exception("error parsing page " + filename + ": " + e.getMessage() + " in source\r\n" + source, e); } } private void scanForFragments(String filename, XhtmlNode node) throws Exception { if (node != null && (node.getNodeType() == NodeType.Element || node.getNodeType() == NodeType.Document)) { if (node.getNodeType() == NodeType.Element && node.getName().equals("pre") && node.getAttribute("fragment") != null) { processFragment(filename, node, node.getAttribute("fragment"), node.getAttribute("class"), node.getAttribute("id")); } for (XhtmlNode child : node.getChildNodes()) scanForFragments(filename, child); } } private void processFragment(String filename, XhtmlNode node, String type, String clss, String id) throws Exception { if ("xml".equals(clss)) { String xml = new XhtmlComposer(XhtmlComposer.XML).compose(node); Fragment f = new Fragment(); f.setType(type); f.setXml(Utilities.unescapeXml(xml)); f.setPage(filename); f.setJson(false); f.setId(id); fragments.add(f); } if ("json".equals(clss)) { String xml = new XhtmlComposer(XhtmlComposer.XML).compose(node); Fragment f = new Fragment(); f.setType(type); f.setXml(xml); f.setPage(filename); f.setId(id); f.setJson(true); fragments.add(f); } } private void validationProcess() throws Exception { if (!isPostPR) { if (buildFlags.get("all")) runJUnitTestsInProcess(); page.log("Validating Examples", LogMessageType.Process); ExampleInspector ei = new ExampleInspector(page.getWorkerContext(), page, page.getFolders().dstDir, Utilities.path(page.getFolders().rootDir, "tools", "schematron"), page.getValidationErrors(), page.getDefinitions()); page.log(".. Loading", LogMessageType.Process); ei.prepare(); for (String rname : page.getDefinitions().sortedResourceNames()) { ResourceDefn r = page.getDefinitions().getResources().get(rname); if (wantBuild(rname)) { for (Example e : r.getExamples()) { String n = e.getTitle(); ImplementationGuideDefn ig = e.getIg() == null ? null : page.getDefinitions().getIgs().get(e.getIg()); if (ig != null) n = ig.getCode()+File.separator+n; if (validateId == null || validateId.equals(n)) ei.validate(n, rname); } for (Profile e : r.getConformancePackages()) { for (Example en : e.getExamples()) { ImplementationGuideDefn ig = en.getIg() == null ? null : page.getDefinitions().getIgs().get(en.getIg()); String prefix = (ig == null || ig.isCore()) ? "" : ig.getCode()+File.separator; String n = prefix+Utilities.changeFileExt(en.getTitle(), ""); if (validateId == null || validateId.equals(n)) ei.validate(n, rname, e.getProfiles().get(0).getResource()); } } } } for (ImplementationGuideDefn ig : page.getDefinitions().getSortedIgs()) { String prefix = (ig == null || ig.isCore()) ? "" : ig.getCode()+File.separator; for (Example ex : ig.getExamples()) { String n = ex.getTitle(); ei.validate(prefix+n, ex.getResourceName()); } for (Profile pck : ig.getProfiles()) { for (Example en : pck.getExamples()) { ei.validate(prefix+Utilities.changeFileExt(en.getTitle(), ""), en.getResourceName(), pck.getProfiles().get(0).getResource()); } } } if (buildFlags.get("all")) { if (validateId == null || validateId.equals("v2-tables")) ei.validate("v2-tables", "Bundle"); if (validateId == null || validateId.equals("v3-codesystems")) ei.validate("v3-codesystems", "Bundle"); if (validateId == null || validateId.equals("valuesets")) ei.validate("valuesets", "Bundle"); if (validateId == null || validateId.equals("conceptmaps")) ei.validate("conceptmaps", "Bundle"); if (validateId == null || validateId.equals("profiles-types")) ei.validate("profiles-types", "Bundle"); if (validateId == null || validateId.equals("profiles-resources")) ei.validate("profiles-resources", "Bundle"); if (validateId == null || validateId.equals("profiles-others")) ei.validate("profiles-others", "Bundle"); if (validateId == null || validateId.equals("search-parameters")) ei.validate("search-parameters", "Bundle"); if (validateId == null || validateId.equals("extension-definitions")) ei.validate("extension-definitions", "Bundle"); } ei.summarise(); if (buildFlags.get("all")) runJUnitTestsEnd(); if (buildFlags.get("all") && isGenerate) produceCoverageWarnings(); if (buildFlags.get("all")) miscValidation(); } } private void runJUnitTestsInProcess() throws Exception { /* TestingUtilities.context = page.getWorkerContext(); TestingUtilities.silent = true; TestingUtilities.fixedpath = page.getFolders().rootDir; TestingUtilities.contentpath = page.getFolders().dstDir; runJUnitClass(ValidationTestSuite.class); runJUnitClass(FHIRPathTests.class); runJUnitClass(NarrativeGeneratorTests.class); runJUnitClass(SnomedExpressionsTests.class); runJUnitClass(ResourceRoundTripTests.class); runJUnitClass(SnapShotGenerationTests.class); runJUnitClass(GraphQLParserTests.class); runJUnitClass(GraphQLEngineTests.class); checkAllOk(); */ } private void runJUnitTestsEnd() throws Exception { /* ValidationEngineTests.inbuild = true; runJUnitClass(ValidationEngineTests.class); runJUnitClass(TransformationTests.class); runJUnitClass(AllGuidesTests.class); checkAllOk(); */ } private void runJUnitClass(Class<?> clzz) { page.log("Run JUnit: "+clzz.getName(), LogMessageType.Process); Result result = JUnitCore.runClasses(clzz); for (Failure failure : result.getFailures()) { page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.EXCEPTION, -1, -1, clzz.getName(), failure.toString(), IssueSeverity.ERROR)); } } private void miscValidation() throws Exception { page.log("Other Validation", LogMessageType.Process); page.clean2(); for (String rn : page.getDefinitions().sortedResourceNames()) { ResourceDefn r = page.getDefinitions().getResourceByName(rn); for (SearchParameterDefn sp : r.getSearchParams().values()) { if (!sp.isWorks() && !sp.getCode().equals("_id") && !Utilities.noString(sp.getExpression())) { page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.INFORMATIONAL, -1, -1, rn + "." + sp.getCode(), "Search Parameter '" + rn + "." + sp.getCode() + "' had no found values in any example. Consider reviewing the expression (" + sp.getExpression() + ")", IssueSeverity.WARNING)); } } } } private void produceCoverageWarnings() throws Exception { for (ElementDefn e : page.getDefinitions().getStructures().values()) produceCoverageWarning("", e); for (ElementDefn e : page.getDefinitions().getTypes().values()) produceCoverageWarning("", e); for (String s : page.getDefinitions().sortedResourceNames()) { ResourceDefn e = page.getDefinitions().getResourceByName(s); produceCoverageWarning("", e.getRoot()); } } private void produceCoverageWarning(String path, ElementDefn e) { if (!e.isCoveredByExample() && !Utilities.noString(path) && !e.typeCode().startsWith("@")) { // page.getValidationErrors().add(new ValidationMessage(Source.Publisher, IssueType.INFORMATIONAL, -1, -1, path+e.getName(), "Path had no found values in any example. Consider reviewing the path", IssueSeverity.INFORMATION)); } for (ElementDefn c : e.getElements()) { produceCoverageWarning(path + e.getName() + "/", c); } } private void compareXml(String t, String n, String fn1, String fn2) throws Exception { char sc = File.separatorChar; DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); dbf.setCoalescing(true); dbf.setIgnoringElementContentWhitespace(true); dbf.setIgnoringComments(true); DocumentBuilder db = dbf.newDocumentBuilder(); Document doc1 = db.parse(new CSFile(fn1)); doc1.normalizeDocument(); stripWhitespaceAndComments(doc1); Document doc2 = db.parse(new CSFile(fn2)); doc2.normalizeDocument(); stripWhitespaceAndComments(doc2); XmlGenerator xmlgen = new XmlGenerator(); File tmp1 = Utilities.createTempFile("xml", ".xml"); xmlgen.generate(doc1.getDocumentElement(), tmp1, doc1.getDocumentElement().getNamespaceURI(), doc1.getDocumentElement().getLocalName()); File tmp2 = Utilities.createTempFile("xml", ".xml"); xmlgen.generate(doc2.getDocumentElement(), tmp2, doc2.getDocumentElement().getNamespaceURI(), doc2.getDocumentElement().getLocalName()); boolean ok = Utilities.compareIgnoreWhitespace(tmp1, tmp2); if (!ok) { page.getValidationErrors().add( new ValidationMessage(Source.Publisher, IssueType.BUSINESSRULE, -1, -1, "Reference Implementation", "file " + t + " did not round trip perfectly in XML in platform " + n, IssueSeverity.WARNING)); String diff = diffProgram != null ? diffProgram : System.getenv("ProgramFiles(X86)") + sc + "WinMerge" + sc + "WinMergeU.exe"; if (new CSFile(diff).exists()) { List<String> command = new ArrayList<String>(); command.add("\"" + diff + "\" \"" + tmp1.getAbsolutePath() + "\" \"" + tmp2.getAbsolutePath() + "\""); ProcessBuilder builder = new ProcessBuilder(command); builder.directory(new CSFile(page.getFolders().rootDir)); // final Process process = builder.start(); builder.start(); // process.waitFor(); } else { // no diff program page.log("Files for diff: '" + fn1 + "' and '" + fn2 + "'", LogMessageType.Warning); } } } private void stripWhitespaceAndComments(Node node) { if (node.getNodeType() == Node.ELEMENT_NODE) { Element e = (Element) node; Map<String, String> attrs = new HashMap<String, String>(); for (int i = e.getAttributes().getLength() - 1; i >= 0; i--) { attrs.put(e.getAttributes().item(i).getNodeName(), e.getAttributes().item(i).getNodeValue()); e.removeAttribute(e.getAttributes().item(i).getNodeName()); } for (String n : attrs.keySet()) { e.setAttribute(n, attrs.get(n)); } } for (int i = node.getChildNodes().getLength() - 1; i >= 0; i--) { Node c = node.getChildNodes().item(i); if (c.getNodeType() == Node.TEXT_NODE && c.getTextContent().trim().length() == 0) node.removeChild(c); else if (c.getNodeType() == Node.TEXT_NODE) c.setTextContent(c.getTextContent().trim()); else if (c.getNodeType() == Node.COMMENT_NODE) node.removeChild(c); else if (c.getNodeType() == Node.ELEMENT_NODE) stripWhitespaceAndComments(c); } if (node.getNodeType() == Node.ELEMENT_NODE) { node.appendChild(node.getOwnerDocument().createTextNode("\r\n")); } } // public void logNoEoln(String content) { // page.logNoEoln(content); // } @SuppressWarnings("unchecked") private void generateIGValueSetsPart1() throws Exception { for (Resource ae : page.getIgResources().values()) { if (ae instanceof ValueSet) { ValueSet vs = (ValueSet) ae; page.getValueSets().put(vs.getUrl(), vs); } if (ae instanceof CodeSystem) { CodeSystem cs = (CodeSystem) ae; page.getCodeSystems().put(cs.getUrl(), cs); } } } private void generateIGValueSetsPart2(ImplementationGuideDefn ig) throws Exception { for (Resource ae : page.getIgResources().values()) { if (ae instanceof ValueSet) { ValueSet vs = (ValueSet) ae; String name = Utilities.fileTitle((String) ae.getUserData("path")); String title = vs.getName(); if (vs.getText() == null || vs.getText().getDiv() == null || vs.getText().getDiv().allChildrenAreText() && (Utilities.noString(vs.getText().getDiv().allText()) || !vs.getText().getDiv().allText().matches(".*\\w.*"))) new NarrativeGenerator("", "", page.getWorkerContext()).setTooCostlyNoteEmpty(PageProcessor.TOO_MANY_CODES_TEXT_EMPTY).setTooCostlyNoteNotEmpty(PageProcessor.TOO_MANY_CODES_TEXT_NOT_EMPTY).generate(vs, null); page.getVsValidator().validate(page.getValidationErrors(), name, vs, true, false); addToResourceFeed(vs, valueSetsFeed, null); // todo - what should the Oids be String sf = page.processPageIncludes(title + ".html", TextFile.fileToString(page.getFolders().srcDir + "template-vs-ig.html"), "valueSet", null, name+".html", vs, null, "Value Set", null, null, wg("vocab")); sf = addSectionNumbers(title + ".html", "template-valueset", sf, "??", 0, null, ig); TextFile.stringToFile(sf, page.getFolders().dstDir + name + ".html"); String src = page.processPageIncludesForBook(title + ".html", TextFile.fileToString(page.getFolders().srcDir + "template-vs-ig-book.html"), "valueSet", vs, null, null); cachePage(name + ".html", src, "Value Set " + title, false); page.setId(null); IParser json = new JsonParser().setOutputStyle(OutputStyle.PRETTY); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir+name + ".json"); json.compose(s, vs); s.close(); json = new JsonParser().setOutputStyle(OutputStyle.CANONICAL); s = new FileOutputStream(page.getFolders().dstDir+name + ".canonical.json"); json.compose(s, vs); s.close(); IParser xml = new XmlParser().setOutputStyle(OutputStyle.PRETTY); s = new FileOutputStream(page.getFolders().dstDir+name + ".xml"); xml.compose(s, vs); s.close(); xml = new XmlParser().setOutputStyle(OutputStyle.CANONICAL); s = new FileOutputStream(page.getFolders().dstDir+name + ".canonical.xml"); xml.compose(s, vs); s.close(); cloneToXhtml(name, "Definition for Value Set" + vs.getName(), false, "valueset-instance", "Value Set", null, wg("vocab")); jsonToXhtml(name, "Definition for Value Set" + vs.getName(), resource2Json(vs), "valueset-instance", "Value Set", null, wg("vocab")); ttlToXhtml(name, "Definition for Value Set" + vs.getName(), resource2Ttl(vs), "valueset-instance", "Value Set", null, wg("vocab")); } } } private void generateCodeSystemsPart2() throws Exception { Set<String> urls = new HashSet<String>(); for (CodeSystem cs : page.getDefinitions().getCodeSystems().values()) { if (cs != null && !cs.hasUserData("external.url")) { if (cs.getUserData("example") == null && !cs.getUrl().contains("/v2-") && !cs.getUrl().contains("/v3-")) if (!urls.contains(cs.getUrl())) { urls.add(cs.getUrl()); generateCodeSystemPart2(cs); } } } } private void generateValueSetsPart2() throws Exception { for (ValueSet vs : page.getDefinitions().getBoundValueSets().values()) { generateValueSetPart2(vs); } for (String s : page.getDefinitions().getExtraValuesets().keySet()) { if (!s.startsWith("http:")) { ValueSet vs = page.getDefinitions().getExtraValuesets().get(s); generateValueSetPart2(vs); } } } private void generateValueSetPart2(ValueSet vs) throws Exception { String n = vs.getUserString("filename"); if (n == null) n = "valueset-"+vs.getId(); ImplementationGuideDefn ig = (ImplementationGuideDefn) vs.getUserData(ToolResourceUtilities.NAME_RES_IG); if (ig != null) n = ig.getCode()+File.separator+n; if (!vs.hasText() || (vs.getText().getDiv().allChildrenAreText() && (Utilities.noString(vs.getText().getDiv().allText()) || !vs.getText().getDiv().allText().matches(".*\\w.*")))) { if (ig != null) new NarrativeGenerator("../", ig.getCode()+"/", page.getWorkerContext()).setTooCostlyNoteEmpty(PageProcessor.TOO_MANY_CODES_TEXT_EMPTY).setTooCostlyNoteNotEmpty(PageProcessor.TOO_MANY_CODES_TEXT_NOT_EMPTY).generate(vs, null); else new NarrativeGenerator("", "", page.getWorkerContext()).setTooCostlyNoteEmpty(PageProcessor.TOO_MANY_CODES_TEXT_EMPTY).setTooCostlyNoteNotEmpty(PageProcessor.TOO_MANY_CODES_TEXT_NOT_EMPTY).generate(vs, null); } page.getVsValidator().validate(page.getValidationErrors(), n, vs, true, false); if (isGenerate) { // page.log(" ... "+n, LogMessageType.Process); addToResourceFeed(vs, valueSetsFeed, null); if (vs.getUserData("path") == null) vs.setUserData("path", n + ".html"); page.setId(vs.getId()); String sf; try { sf = page.processPageIncludes(n + ".html", TextFile.fileToString(page.getFolders().srcDir + "template-vs.html"), "valueSet", null, n+".html", vs, null, "Value Set", ig, null, wg(vs, "vocab")); } catch (Exception e) { throw new Exception("Error processing "+n+".html: "+e.getMessage(), e); } sf = addSectionNumbers(n + ".html", "template-valueset", sf, vsCounter(), ig == null ? 0 : 1, null, ig); TextFile.stringToFile(sf, page.getFolders().dstDir + n + ".html"); try { String src = page.processPageIncludesForBook(n + ".html", TextFile.fileToString(page.getFolders().srcDir + "template-vs-book.html"), "valueSet", vs, ig, null); cachePage(n + ".html", src, "Value Set " + n, false); page.setId(null); } catch (Exception e) { throw new Exception("Error processing "+n+".html: "+e.getMessage(), e); } IParser json = new JsonParser().setOutputStyle(OutputStyle.PRETTY); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + n + ".json"); json.compose(s, vs); s.close(); json = new JsonParser().setOutputStyle(OutputStyle.CANONICAL); s = new FileOutputStream(page.getFolders().dstDir + n + ".canonical.json"); json.compose(s, vs); s.close(); IParser xml = new XmlParser().setOutputStyle(OutputStyle.PRETTY); s = new FileOutputStream(page.getFolders().dstDir + n + ".xml"); xml.compose(s, vs); s.close(); xml = new XmlParser().setOutputStyle(OutputStyle.CANONICAL); s = new FileOutputStream(page.getFolders().dstDir + n + ".canonical.xml"); xml.compose(s, vs); s.close(); // System.out.println(vs.getUrl()); cloneToXhtml(n, "Definition for Value Set" + vs.present(), false, "valueset-instance", "Value Set", null, wg("vocab")); jsonToXhtml(n, "Definition for Value Set" + vs.present(), resource2Json(vs), "valueset-instance", "Value Set", null, wg("vocab")); ttlToXhtml(n, "Definition for Value Set" + vs.present(), resource2Ttl(vs), "valueset-instance", "Value Set", null, wg("vocab")); } } private String vsCounter() { vscounter++; return String.valueOf(vscounter); } private String cmCounter() { cmcounter++; return String.valueOf(cmcounter); } private WorkGroup wg(DomainResource dr, String wg) { String code = ToolingExtensions.readStringExtension(dr, ToolingExtensions.EXT_WORKGROUP); return page.getDefinitions().getWorkgroups().get(Utilities.noString(code) ? wg : code); } private void generateCodeSystemPart2(CodeSystem cs) throws Exception { String n = cs.getUserString("filename"); if (n == null) n = "codesystem-"+cs.getId(); ImplementationGuideDefn ig = (ImplementationGuideDefn) cs.getUserData(ToolResourceUtilities.NAME_RES_IG); if (ig != null) n = ig.getCode()+File.separator+n; if (cs.getText().getDiv().allChildrenAreText() && (Utilities.noString(cs.getText().getDiv().allText()) || !cs.getText().getDiv().allText().matches(".*\\w.*"))) { if (ig != null && !ig.isCore()) new NarrativeGenerator("../", ig.getCode()+"/", page.getWorkerContext()).generate(cs, null); else new NarrativeGenerator("", "", page.getWorkerContext()).generate(cs, null); } page.getVsValidator().validate(page.getValidationErrors(), n, cs, true, false); if (isGenerate) { // page.log(" ... "+n, LogMessageType.Process); addToResourceFeed(cs, valueSetsFeed, null); if (cs.getUserData("path") == null) cs.setUserData("path", n + ".html"); page.setId(cs.getId()); String sf; WorkGroup wg = wg(cs, "vocab"); try { sf = page.processPageIncludes(n + ".html", TextFile.fileToString(page.getFolders().srcDir + "template-cs.html"), "codeSystem", null, n+".html", cs, null, "Value Set", ig, null, wg); } catch (Exception e) { throw new Exception("Error processing "+n+".html: "+e.getMessage(), e); } sf = addSectionNumbers(n + ".html", "template-codesystem", sf, csCounter(), ig == null ? 0 : 1, null, ig); TextFile.stringToFile(sf, page.getFolders().dstDir + n + ".html"); try { String src = page.processPageIncludesForBook(n + ".html", TextFile.fileToString(page.getFolders().srcDir + "template-cs-book.html"), "codeSystem", cs, ig, null); cachePage(n + ".html", src, "Code System " + n, false); page.setId(null); } catch (Exception e) { throw new Exception("Error processing "+n+".html: "+e.getMessage(), e); } IParser json = new JsonParser().setOutputStyle(OutputStyle.PRETTY); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + n + ".json"); json.compose(s, cs); s.close(); json = new JsonParser().setOutputStyle(OutputStyle.CANONICAL); s = new FileOutputStream(page.getFolders().dstDir + n + ".canonical.json"); json.compose(s, cs); s.close(); IParser xml = new XmlParser().setOutputStyle(OutputStyle.PRETTY); s = new FileOutputStream(page.getFolders().dstDir + n + ".xml"); xml.compose(s, cs); s.close(); xml = new XmlParser().setOutputStyle(OutputStyle.CANONICAL); s = new FileOutputStream(page.getFolders().dstDir + n + ".canonical.xml"); xml.compose(s, cs); s.close(); // System.out.println(vs.getUrl()); cloneToXhtml(n, "Definition for Code System " + cs.getName(), false, "codesystem-instance", "Code System", null, wg); jsonToXhtml(n, "Definition for Code System " + cs.getName(), resource2Json(cs), "codesystem-instance", "Code System", null, wg); ttlToXhtml(n, "Definition for Code System " + cs.getName(), resource2Ttl(cs), "codesystem-instance", "Code System", null, wg); } } private String csCounter() { cscounter ++; return String.valueOf(cscounter); } // if (vs.hasCodeSystem()) { // if (ToolingExtensions.getOID(vs.getCodeSystem()) == null && !Utilities.noString(vs.getUserString("csoid"))) // ToolingExtensions.setOID(vs.getCodeSystem(), "urn:oid:"+vs.getUserString("csoid")); // if (ToolingExtensions.getOID(vs.getCodeSystem()) == null) // throw new Exception("No OID on value set define for "+vs.getUrl()); // } // if (vs.hasCodeSystem()) { // page.getCodeSystems().put(vs.getCodeSystem().getSystem(), vs); // page.getDefinitions().getCodeSystems().put(vs.getCodeSystem().getSystem(), vs); // } private void generateValueSetsPart1() throws Exception { page.log(" ...value sets", LogMessageType.Process); for (ValueSet vs : page.getDefinitions().getBoundValueSets().values()) { if (!vs.hasText()) { vs.setText(new Narrative()); vs.getText().setStatus(NarrativeStatus.EMPTY); } if (!vs.getText().hasDiv()) { vs.getText().setDiv(new XhtmlNode(NodeType.Element)); vs.getText().getDiv().setName("div"); } if (ValueSetUtilities.getOID(vs) == null) throw new Exception("No OID on value set "+vs.getUrl()); page.getValueSets().put(vs.getUrl(), vs); page.getDefinitions().getValuesets().put(vs.getUrl(), vs); page.getDefinitions().getValuesets().put(vs.getUrl()+"|"+vs.getVersion(), vs); } for (ValueSet vs : page.getDefinitions().getBoundValueSets().values()) { page.getVsValidator().validate(page.getValidationErrors(), vs.getUserString("filename"), vs, true, false); } } private void generateCodeSystemsPart1() throws Exception { page.log(" ...code systems", LogMessageType.Process); for (CodeSystem cs : page.getDefinitions().getCodeSystems().values()) { if (cs != null && !cs.hasUserData("external.url")) { if (!cs.hasText()) { cs.setText(new Narrative()); cs.getText().setStatus(NarrativeStatus.EMPTY); } if (!cs.getText().hasDiv()) { cs.getText().setDiv(new XhtmlNode(NodeType.Element)); cs.getText().getDiv().setName("div"); } // if (ToolingExtensions.getOID(cs) == null) // throw new Exception("No OID on code system "+cs.getUrl()); } } } private void generateConceptMaps() throws Exception { for (ConceptMap cm : page.getConceptMaps().values()) { if (cm.hasUserData("generate")) { generateConceptMap(cm); } } } private void generateConceptMap(ConceptMap cm) throws Exception { String filename = cm.getUserString("path"); NarrativeGenerator gen = new NarrativeGenerator("", "", page.getWorkerContext()).setTooCostlyNoteEmpty(PageProcessor.TOO_MANY_CODES_TEXT_EMPTY).setTooCostlyNoteNotEmpty(PageProcessor.TOO_MANY_CODES_TEXT_NOT_EMPTY); gen.generate(cm, null); IParser json = new JsonParser().setOutputStyle(OutputStyle.PRETTY); FileOutputStream s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(filename, ".json")); json.compose(s, cm); s.close(); json = new JsonParser().setOutputStyle(OutputStyle.CANONICAL); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(filename, ".canonical.json")); json.compose(s, cm); s.close(); String n = Utilities.changeFileExt(filename, ""); jsonToXhtml(n, cm.getName(), resource2Json(cm), "conceptmap-instance", "Concept Map", null, wg("vocab")); ttlToXhtml(n, cm.getName(), resource2Ttl(cm), "conceptmap-instance", "Concept Map", null, wg("vocab")); IParser xml = new XmlParser().setOutputStyle(OutputStyle.PRETTY); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(filename, ".xml")); xml.compose(s, cm); s.close(); xml = new XmlParser().setOutputStyle(OutputStyle.CANONICAL); s = new FileOutputStream(page.getFolders().dstDir + Utilities.changeFileExt(filename, ".canonical.xml")); xml.compose(s, cm); s.close(); cloneToXhtml(n, cm.getName(), false, "conceptmap-instance", "Concept Map", null, wg("vocab")); // now, we create an html page from the narrative String narrative = new XhtmlComposer(XhtmlComposer.HTML).compose(cm.getText().getDiv()); String html = TextFile.fileToString(page.getFolders().srcDir + "template-example.html").replace("<%example%>", narrative); html = page.processPageIncludes(Utilities.changeFileExt(filename, ".html"), html, "conceptmap-instance", null, null, null, "Concept Map", null, null, wg("vocab")); TextFile.stringToFile(html, page.getFolders().dstDir + Utilities.changeFileExt(filename, ".html")); conceptMapsFeed.getEntry().add(new BundleEntryComponent().setResource(cm).setFullUrl("http://hl7.org/fhir/"+cm.fhirType()+"/"+cm.getId())); page.getConceptMaps().put(cm.getUrl(), cm); page.getHTMLChecker().registerFile(n + ".html", cm.getName(), HTMLLinkChecker.XHTML_TYPE, false); } public static Map<String, String> splitQuery(URL url) throws UnsupportedEncodingException { Map<String, String> query_pairs = new LinkedHashMap<String, String>(); String query = url.getQuery(); String[] pairs = query.split("&"); for (String pair : pairs) { int idx = pair.indexOf("="); query_pairs.put(URLDecoder.decode(pair.substring(0, idx), "UTF-8"), URLDecoder.decode(pair.substring(idx + 1), "UTF-8")); } return query_pairs; } @Override public javax.xml.transform.Source resolve(String href, String base) throws TransformerException { if (!href.startsWith("http://fhir.healthintersections.com.au/open/ValueSet/$expand")) return null; try { Map<String, String> params = splitQuery(new URL(href)); ValueSet vs = page.getValueSets().get(params.get("identifier")); if (vs == null) { page.log("unable to resolve "+params.get("identifier"), LogMessageType.Process); return null; } vs = page.expandValueSet(vs, true); if (vs == null) { page.log("unable to expand "+params.get("identifier"), LogMessageType.Process); return null; } ByteArrayOutputStream bytes = new ByteArrayOutputStream(); new XmlParser().compose(bytes, vs, false); bytes.close(); return new StreamSource(new ByteArrayInputStream(bytes.toByteArray())); } catch (Exception e) { throw new TransformerException(e); } } }
[ "\"ProgramFiles(X86" ]
[]
[ "ProgramFiles(X8" ]
[]
["ProgramFiles(X8"]
java
1
0
internal/trace/httptrace.go
package trace import ( "context" "fmt" "net/http" "os" "path/filepath" "strconv" "strings" "time" log15 "gopkg.in/inconshreveable/log15.v2" "github.com/felixge/httpsnoop" raven "github.com/getsentry/raven-go" "github.com/gorilla/mux" opentracing "github.com/opentracing/opentracing-go" "github.com/opentracing/opentracing-go/ext" "github.com/prometheus/client_golang/prometheus" "github.com/sourcegraph/sourcegraph/internal/api" "github.com/sourcegraph/sourcegraph/internal/conf" "github.com/sourcegraph/sourcegraph/internal/repotrackutil" "github.com/sourcegraph/sourcegraph/internal/version" ) type key int const ( routeNameKey key = iota userKey key = iota ) var metricLabels = []string{"route", "method", "code", "repo"} var requestDuration = prometheus.NewHistogramVec(prometheus.HistogramOpts{ Namespace: "src", Subsystem: "http", Name: "request_duration_seconds", Help: "The HTTP request latencies in seconds.", Buckets: UserLatencyBuckets, }, metricLabels) var requestHeartbeat = prometheus.NewGaugeVec(prometheus.GaugeOpts{ Namespace: "src", Subsystem: "http", Name: "requests_last_timestamp_unixtime", Help: "Last time a request finished for a http endpoint.", }, metricLabels) func init() { if err := raven.SetDSN(os.Getenv("SENTRY_DSN_BACKEND")); err != nil { log15.Error("sentry.dsn", "error", err) } raven.SetRelease(version.Version()) raven.SetTagsContext(map[string]string{ "service": filepath.Base(os.Args[0]), }) prometheus.MustRegister(requestDuration) prometheus.MustRegister(requestHeartbeat) go func() { conf.Watch(func() { if conf.Get().Critical.Log == nil { return } if conf.Get().Critical.Log.Sentry == nil { return } // An empty dsn value is ignored: not an error. if err := raven.SetDSN(conf.Get().Critical.Log.Sentry.Dsn); err != nil { log15.Error("sentry.dsn", "error", err) } }) }() } // Middleware captures and exports metrics to Prometheus, etc. // // 🚨 SECURITY: This handler is served to all clients, even on private servers to clients who have // not authenticated. It must not reveal any sensitive information. func Middleware(next http.Handler) http.Handler { return raven.Recoverer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() wireContext, err := opentracing.GlobalTracer().Extract( opentracing.HTTPHeaders, opentracing.HTTPHeadersCarrier(r.Header)) if err != nil && err != opentracing.ErrSpanContextNotFound { log15.Error("extracting parent span failed", "error", err) } // start new span span := opentracing.StartSpan("", ext.RPCServerOption(wireContext)) ext.HTTPUrl.Set(span, r.URL.String()) ext.HTTPMethod.Set(span, r.Method) span.SetTag("http.referer", r.Header.Get("referer")) defer span.Finish() rw.Header().Set("X-Trace", SpanURL(span)) ctx = opentracing.ContextWithSpan(ctx, span) routeName := "unknown" ctx = context.WithValue(ctx, routeNameKey, &routeName) var userID int32 ctx = context.WithValue(ctx, userKey, &userID) m := httpsnoop.CaptureMetrics(next, rw, r.WithContext(ctx)) if routeName == "graphql" { // We use the query to denote the type of a GraphQL request, e.g. /.api/graphql?Repositories if r.URL.RawQuery != "" { routeName = "graphql: " + r.URL.RawQuery } else { routeName = "graphql: unknown" } } // route name is only known after the request has been handled span.SetOperationName("Serve: " + routeName) span.SetTag("Route", routeName) ext.HTTPStatusCode.Set(span, uint16(m.Code)) labels := prometheus.Labels{ "route": routeName, "method": strings.ToLower(r.Method), "code": strconv.Itoa(m.Code), "repo": repotrackutil.GetTrackedRepo(api.RepoName(r.URL.Path)), } requestDuration.With(labels).Observe(m.Duration.Seconds()) requestHeartbeat.With(labels).Set(float64(time.Now().Unix())) // if it's not a graphql request, then this includes graphql_error=false in the log entry gqlErr := false span.Context().ForeachBaggageItem(func(k, v string) bool { if k == "graphql.error" { gqlErr = true } return !gqlErr }) log15.Debug("TRACE HTTP", "method", r.Method, "url", r.URL.String(), "routename", routeName, "trace", SpanURL(span), "userAgent", r.UserAgent(), "user", userID, "xForwardedFor", r.Header.Get("X-Forwarded-For"), "written", m.Written, "code", m.Code, "duration", m.Duration, "graphql_error", strconv.FormatBool(gqlErr), ) // Notify sentry if the status code indicates our system had an error (e.g. 5xx). if m.Code >= 500 { raven.CaptureError(&httpErr{status: m.Code, method: r.Method, path: r.URL.Path}, map[string]string{ "method": r.Method, "url": r.URL.String(), "routename": routeName, "userAgent": r.UserAgent(), "user": fmt.Sprintf("%d", userID), "xForwardedFor": r.Header.Get("X-Forwarded-For"), "written": fmt.Sprintf("%d", m.Written), "duration": m.Duration.String(), }) } })) } func TraceRoute(next http.Handler) http.Handler { return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { if p, ok := r.Context().Value(routeNameKey).(*string); ok { if routeName := mux.CurrentRoute(r).GetName(); routeName != "" { *p = routeName } } next.ServeHTTP(rw, r) }) } func TraceUser(ctx context.Context, userID int32) { if p, ok := ctx.Value(userKey).(*int32); ok { *p = userID } } // SetRouteName manually sets the name for the route. This should only be used // for non-mux routed routes (ie middlewares). func SetRouteName(r *http.Request, routeName string) { if p, ok := r.Context().Value(routeNameKey).(*string); ok { *p = routeName } } type httpErr struct { status int method string path string } func (e *httpErr) Error() string { return fmt.Sprintf("HTTP status %d, %s %s", e.status, e.method, e.path) }
[ "\"SENTRY_DSN_BACKEND\"" ]
[]
[ "SENTRY_DSN_BACKEND" ]
[]
["SENTRY_DSN_BACKEND"]
go
1
0
config/ranger/commands.py
from ranger.api.commands import Command class fzf_select(Command): """ :fzf_select Find a file using fzf. With a prefix argument to select only directories. See: https://github.com/junegunn/fzf """ def execute(self): import subprocess import os from ranger.ext.get_executables import get_executables if 'fzf' not in get_executables(): self.fm.notify('Could not find fzf in the PATH.', bad=True) return fd = None if 'fdfind' in get_executables(): fd = 'fdfind' elif 'fd' in get_executables(): fd = 'fd' if fd is not None: hidden = ('--hidden' if self.fm.settings.show_hidden else '') exclude = "--no-ignore-vcs --exclude '.git' --exclude '*.py[co]' --exclude '__pycache__'" only_directories = ('--type directory' if self.quantifier else '') fzf_default_command = '{} --follow {} {} {} --color=always'.format( fd, hidden, exclude, only_directories ) else: hidden = ('-false' if self.fm.settings.show_hidden else r"-path '*/\.*' -prune") exclude = r"\( -name '\.git' -o -iname '\.*py[co]' -o -fstype 'dev' -o -fstype 'proc' \) -prune" only_directories = ('-type d' if self.quantifier else '') fzf_default_command = 'find -L . -mindepth 1 {} -o {} -o {} -print | cut -b3-'.format( hidden, exclude, only_directories ) env = os.environ.copy() env['FZF_DEFAULT_COMMAND'] = fzf_default_command env['FZF_DEFAULT_OPTS'] = '--height=40% --layout=reverse --ansi --preview="{}"'.format(''' ( batcat --color=always {} || bat --color=always {} || cat {} || tree -ahpCL 3 -I '.git' -I '*.py[co]' -I '__pycache__' {} ) 2>/dev/null | head -n 100 ''') fzf = self.fm.execute_command('fzf --no-multi', env=env, universal_newlines=True, stdout=subprocess.PIPE) stdout, _ = fzf.communicate() if fzf.returncode == 0: selected = os.path.abspath(stdout.strip()) if os.path.isdir(selected): self.fm.cd(selected) else: self.fm.select_file(selected)
[]
[]
[]
[]
[]
python
0
0
vendor/github.com/containers/podman/v3/libpod/container_log_linux.go
//+build linux //+build systemd package libpod import ( "context" "fmt" "io" "math" "time" "github.com/containers/podman/v3/libpod/define" "github.com/containers/podman/v3/libpod/logs" journal "github.com/coreos/go-systemd/v22/sdjournal" "github.com/hpcloud/tail/watch" "github.com/pkg/errors" "github.com/sirupsen/logrus" ) const ( // journaldLogOut is the journald priority signifying stdout journaldLogOut = "6" // journaldLogErr is the journald priority signifying stderr journaldLogErr = "3" // bufLen is the length of the buffer to read from a k8s-file // formatted log line // let's set it as 2k just to be safe if k8s-file format ever changes bufLen = 16384 ) func (c *Container) readFromJournal(ctx context.Context, options *logs.LogOptions, logChannel chan *logs.LogLine) error { var config journal.JournalReaderConfig if options.Tail < 0 { config.NumFromTail = 0 } else if options.Tail == 0 { config.NumFromTail = math.MaxUint64 } else { config.NumFromTail = uint64(options.Tail) } if options.Multi { config.Formatter = journalFormatterWithID } else { config.Formatter = journalFormatter } defaultTime := time.Time{} if options.Since != defaultTime { // coreos/go-systemd/sdjournal doesn't correctly handle requests for data in the future // return nothing instead of falsely printing if time.Now().Before(options.Since) { return nil } // coreos/go-systemd/sdjournal expects a negative time.Duration for times in the past config.Since = -time.Since(options.Since) } config.Matches = append(config.Matches, journal.Match{ Field: "CONTAINER_ID_FULL", Value: c.ID(), }) options.WaitGroup.Add(1) r, err := journal.NewJournalReader(config) if err != nil { return err } if r == nil { return errors.Errorf("journal reader creation failed") } if options.Tail == math.MaxInt64 { r.Rewind() } state, err := c.State() if err != nil { return err } if options.Follow && state == define.ContainerStateRunning { go func() { done := make(chan bool) until := make(chan time.Time) go func() { select { case <-ctx.Done(): until <- time.Time{} case <-done: // nothing to do anymore } }() go func() { for { state, err := c.State() if err != nil { until <- time.Time{} logrus.Error(err) break } time.Sleep(watch.POLL_DURATION) if state != define.ContainerStateRunning && state != define.ContainerStatePaused { until <- time.Time{} break } } }() follower := FollowBuffer{logChannel} err := r.Follow(until, follower) if err != nil { logrus.Debugf(err.Error()) } r.Close() options.WaitGroup.Done() done <- true return }() return nil } go func() { bytes := make([]byte, bufLen) // /me complains about no do-while in go ec, err := r.Read(bytes) for ec != 0 && err == nil { // because we are reusing bytes, we need to make // sure the old data doesn't get into the new line bytestr := string(bytes[:ec]) logLine, err2 := logs.NewLogLine(bytestr) if err2 != nil { logrus.Error(err2) continue } logChannel <- logLine ec, err = r.Read(bytes) } if err != nil && err != io.EOF { logrus.Error(err) } r.Close() options.WaitGroup.Done() }() return nil } func journalFormatterWithID(entry *journal.JournalEntry) (string, error) { output, err := formatterPrefix(entry) if err != nil { return "", err } id, ok := entry.Fields["CONTAINER_ID_FULL"] if !ok { return "", fmt.Errorf("no CONTAINER_ID_FULL field present in journal entry") } if len(id) > 12 { id = id[:12] } output += fmt.Sprintf("%s ", id) // Append message msg, err := formatterMessage(entry) if err != nil { return "", err } output += msg return output, nil } func journalFormatter(entry *journal.JournalEntry) (string, error) { output, err := formatterPrefix(entry) if err != nil { return "", err } // Append message msg, err := formatterMessage(entry) if err != nil { return "", err } output += msg return output, nil } func formatterPrefix(entry *journal.JournalEntry) (string, error) { usec := entry.RealtimeTimestamp tsString := time.Unix(0, int64(usec)*int64(time.Microsecond)).Format(logs.LogTimeFormat) output := fmt.Sprintf("%s ", tsString) priority, ok := entry.Fields["PRIORITY"] if !ok { return "", errors.Errorf("no PRIORITY field present in journal entry") } if priority == journaldLogOut { output += "stdout " } else if priority == journaldLogErr { output += "stderr " } else { return "", errors.Errorf("unexpected PRIORITY field in journal entry") } // if CONTAINER_PARTIAL_MESSAGE is defined, the log type is "P" if _, ok := entry.Fields["CONTAINER_PARTIAL_MESSAGE"]; ok { output += fmt.Sprintf("%s ", logs.PartialLogType) } else { output += fmt.Sprintf("%s ", logs.FullLogType) } return output, nil } func formatterMessage(entry *journal.JournalEntry) (string, error) { // Finally, append the message msg, ok := entry.Fields["MESSAGE"] if !ok { return "", fmt.Errorf("no MESSAGE field present in journal entry") } return msg, nil } type FollowBuffer struct { logChannel chan *logs.LogLine } func (f FollowBuffer) Write(p []byte) (int, error) { bytestr := string(p) logLine, err := logs.NewLogLine(bytestr) if err != nil { return -1, err } f.logChannel <- logLine return len(p), nil }
[]
[]
[]
[]
[]
go
null
null
null
driver/postgres/postgres_test.go
package postgres import ( "database/sql" "os" "testing" "github.com/axiomzen/migrate/file" "github.com/axiomzen/migrate/migrate/direction" pipep "github.com/axiomzen/migrate/pipe" ) // TestMigrate runs some additional tests on Migrate(). // Basic testing is already done in migrate/migrate_test.go func TestMigrate(t *testing.T) { if testing.Short() { t.Skip("skipping test in short mode.") } host := os.Getenv("POSTGRES_PORT_5432_TCP_ADDR") port := os.Getenv("POSTGRES_PORT_5432_TCP_PORT") driverURL := "postgres://postgres@" + host + ":" + port + "/template1?sslmode=disable" // prepare clean database connection, err := sql.Open("postgres", driverURL) if err != nil { t.Fatal(err) } if _, err := connection.Exec(` DROP TABLE IF EXISTS yolo; DROP TABLE IF EXISTS ` + tableName + `;`); err != nil { t.Fatal(err) } d := &Driver{} if err := d.Initialize(driverURL); err != nil { t.Fatal(err) } // testing idempotency: second call should be a no-op, since table already exists if err := d.Initialize(driverURL); err != nil { t.Fatal(err) } files := []file.File{ { Path: "/foobar", FileName: "001_foobar.up.sql", Version: 1, Name: "foobar", Direction: direction.Up, Content: []byte(` CREATE TABLE yolo ( id serial not null primary key ); `), }, { Path: "/foobar", FileName: "002_foobar.down.sql", Version: 1, Name: "foobar", Direction: direction.Down, Content: []byte(` DROP TABLE yolo; `), }, { Path: "/foobar", FileName: "002_foobar.up.sql", Version: 2, Name: "foobar", Direction: direction.Up, Content: []byte(` CREATE TABLE error ( id THIS WILL CAUSE AN ERROR ) `), }, { Path: "/foobar", FileName: "20170118205923_demo.up.sql", Version: 20170118205923, Name: "demo", Direction: direction.Up, Content: []byte(` CREATE TABLE demo ( id serial not null primary key ) `), }, { Path: "/foobar", FileName: "20170118205923_demo.down.sql", Version: 20170118205923, Name: "demo", Direction: direction.Down, Content: []byte(` DROP TABLE demo `), }, } pipe := pipep.New() go d.Migrate(files[0], pipe) errs := pipep.ReadErrors(pipe) if len(errs) > 0 { t.Fatal(errs) } pipe = pipep.New() go d.Migrate(files[1], pipe) errs = pipep.ReadErrors(pipe) if len(errs) > 0 { t.Fatal(errs) } pipe = pipep.New() go d.Migrate(files[2], pipe) errs = pipep.ReadErrors(pipe) if len(errs) == 0 { t.Error("Expected test case to fail") } pipe = pipep.New() go d.Migrate(files[3], pipe) errs = pipep.ReadErrors(pipe) if len(errs) > 0 { t.Fatal(errs) } pipe = pipep.New() go d.Migrate(files[4], pipe) errs = pipep.ReadErrors(pipe) if len(errs) > 0 { t.Fatal(errs) } if err := d.Close(); err != nil { t.Fatal(err) } }
[ "\"POSTGRES_PORT_5432_TCP_ADDR\"", "\"POSTGRES_PORT_5432_TCP_PORT\"" ]
[]
[ "POSTGRES_PORT_5432_TCP_PORT", "POSTGRES_PORT_5432_TCP_ADDR" ]
[]
["POSTGRES_PORT_5432_TCP_PORT", "POSTGRES_PORT_5432_TCP_ADDR"]
go
2
0
src/ansiblelint/utils.py
# Copyright (c) 2013-2014 Will Thames <[email protected]> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. """Generic utility helpers.""" import contextlib import inspect import logging import os from argparse import Namespace from collections.abc import ItemsView from functools import lru_cache from pathlib import Path from typing import Any, Callable, Dict, Generator, List, Optional, Tuple, Union import yaml from ansible import constants from ansible.errors import AnsibleError, AnsibleParserError from ansible.parsing.dataloader import DataLoader from ansible.parsing.mod_args import ModuleArgsParser from ansible.parsing.splitter import split_args from ansible.parsing.yaml.constructor import AnsibleConstructor from ansible.parsing.yaml.loader import AnsibleLoader from ansible.parsing.yaml.objects import AnsibleSequence from ansible.plugins.loader import add_all_plugin_dirs from ansible.template import Templar try: from ansible.module_utils.parsing.convert_bool import boolean except ImportError: try: from ansible.utils.boolean import boolean except ImportError: try: from ansible.utils import boolean except ImportError: boolean = constants.mk_boolean from yaml.composer import Composer from yaml.representer import RepresenterError from ansiblelint._internal.rules import ( AnsibleParserErrorRule, LoadingFailureRule, RuntimeErrorRule, ) from ansiblelint.constants import FileType from ansiblelint.errors import MatchError from ansiblelint.file_utils import Lintable, get_yaml_files # ansible-lint doesn't need/want to know about encrypted secrets, so we pass a # string as the password to enable such yaml files to be opened and parsed # successfully. DEFAULT_VAULT_PASSWORD = 'x' PLAYBOOK_DIR = os.environ.get('ANSIBLE_PLAYBOOK_DIR', None) _logger = logging.getLogger(__name__) def parse_yaml_from_file(filepath: str) -> dict: dl = DataLoader() if hasattr(dl, 'set_vault_password'): dl.set_vault_password(DEFAULT_VAULT_PASSWORD) return dl.load_from_file(filepath) def path_dwim(basedir: str, given: str) -> str: dl = DataLoader() dl.set_basedir(basedir) return dl.path_dwim(given) def ansible_template(basedir: str, varname: Any, templatevars, **kwargs) -> Any: dl = DataLoader() dl.set_basedir(basedir) templar = Templar(dl, variables=templatevars) return templar.template(varname, **kwargs) LINE_NUMBER_KEY = '__line__' FILENAME_KEY = '__file__' VALID_KEYS = [ 'name', 'action', 'when', 'async', 'poll', 'notify', 'first_available_file', 'include', 'include_tasks', 'import_tasks', 'import_playbook', 'tags', 'register', 'ignore_errors', 'delegate_to', 'local_action', 'transport', 'remote_user', 'sudo', 'sudo_user', 'sudo_pass', 'when', 'connection', 'environment', 'args', 'any_errors_fatal', 'changed_when', 'failed_when', 'check_mode', 'delay', 'retries', 'until', 'su', 'su_user', 'su_pass', 'no_log', 'run_once', 'become', 'become_user', 'become_method', FILENAME_KEY, ] BLOCK_NAME_TO_ACTION_TYPE_MAP = { 'tasks': 'task', 'handlers': 'handler', 'pre_tasks': 'task', 'post_tasks': 'task', 'block': 'meta', 'rescue': 'meta', 'always': 'meta', } def tokenize(line: str) -> Tuple[str, List[str], Dict]: tokens = line.lstrip().split(" ") if tokens[0] == '-': tokens = tokens[1:] if tokens[0] == 'action:' or tokens[0] == 'local_action:': tokens = tokens[1:] command = tokens[0].replace(":", "") args = list() kwargs = dict() nonkvfound = False for arg in tokens[1:]: if "=" in arg and not nonkvfound: kv = arg.split("=", 1) kwargs[kv[0]] = kv[1] else: nonkvfound = True args.append(arg) return (command, args, kwargs) def _playbook_items(pb_data: dict) -> ItemsView: if isinstance(pb_data, dict): return pb_data.items() if not pb_data: return [] # "if play" prevents failure if the play sequence contains None, # which is weird but currently allowed by Ansible # https://github.com/ansible-community/ansible-lint/issues/849 return [item for play in pb_data if play for item in play.items()] def _set_collections_basedir(basedir: str): # Sets the playbook directory as playbook_paths for the collection loader try: # Ansible 2.10+ # noqa: # pylint:disable=cyclic-import,import-outside-toplevel from ansible.utils.collection_loader import AnsibleCollectionConfig AnsibleCollectionConfig.playbook_paths = basedir except ImportError: # Ansible 2.8 or 2.9 # noqa: # pylint:disable=cyclic-import,import-outside-toplevel from ansible.utils.collection_loader import set_collection_playbook_paths set_collection_playbook_paths(basedir) def find_children(lintable: Lintable) -> List[Lintable]: # noqa: C901 if not lintable.path.exists(): return [] playbook_dir = str(lintable.path.parent) _set_collections_basedir(playbook_dir or os.path.abspath('.')) add_all_plugin_dirs(playbook_dir or '.') if lintable.kind == 'role': playbook_ds = {'roles': [{'role': str(lintable.path)}]} elif lintable.kind not in ("playbook", "tasks"): return [] else: try: playbook_ds = parse_yaml_from_file(str(lintable.path)) except AnsibleError as e: raise SystemExit(str(e)) results = [] basedir = os.path.dirname(str(lintable.path)) # playbook_ds can be an AnsibleUnicode string, which we consider invalid if isinstance(playbook_ds, str): raise MatchError(filename=str(lintable.path), rule=LoadingFailureRule) for item in _playbook_items(playbook_ds): for child in play_children(basedir, item, lintable.kind, playbook_dir): # We avoid processing parametrized children path_str = str(child.path) if "$" in path_str or "{{" in path_str: continue # Repair incorrect paths obtained when old syntax was used, like: # - include: simpletask.yml tags=nginx valid_tokens = list() for token in split_args(path_str): if '=' in token: break valid_tokens.append(token) path = ' '.join(valid_tokens) if path != path_str: child.path = Path(path) child.name = child.path.name results.append(child) return results def template( basedir: str, value: Any, variables, fail_on_undefined=False, **kwargs ) -> Any: try: value = ansible_template( os.path.abspath(basedir), value, variables, **dict(kwargs, fail_on_undefined=fail_on_undefined), ) # Hack to skip the following exception when using to_json filter on a variable. # I guess the filter doesn't like empty vars... except (AnsibleError, ValueError, RepresenterError): # templating failed, so just keep value as is. pass return value def play_children( basedir: str, item: Tuple[str, Any], parent_type, playbook_dir ) -> List[Lintable]: delegate_map: Dict[str, Callable[[str, Any, Any, FileType], List[Lintable]]] = { 'tasks': _taskshandlers_children, 'pre_tasks': _taskshandlers_children, 'post_tasks': _taskshandlers_children, 'block': _taskshandlers_children, 'include': _include_children, 'import_playbook': _include_children, 'roles': _roles_children, 'dependencies': _roles_children, 'handlers': _taskshandlers_children, 'include_tasks': _include_children, 'import_tasks': _include_children, } (k, v) = item add_all_plugin_dirs(os.path.abspath(basedir)) if k in delegate_map: if v: v = template( os.path.abspath(basedir), v, dict(playbook_dir=PLAYBOOK_DIR or os.path.abspath(basedir)), fail_on_undefined=False, ) return delegate_map[k](basedir, k, v, parent_type) return [] def _include_children(basedir: str, k, v, parent_type) -> List[Lintable]: # handle special case include_tasks: name=filename.yml if k == 'include_tasks' and isinstance(v, dict) and 'file' in v: v = v['file'] # handle include: filename.yml tags=blah (command, args, kwargs) = tokenize("{0}: {1}".format(k, v)) result = path_dwim(basedir, args[0]) if not os.path.exists(result): result = path_dwim(os.path.join(os.path.dirname(basedir)), v) return [Lintable(result, kind=parent_type)] def _taskshandlers_children(basedir, k, v, parent_type: FileType) -> List[Lintable]: results: List[Lintable] = [] if v is None: raise MatchError( message="A malformed block was encountered while loading a block.", rule=RuntimeErrorRule(), ) for th in v: # ignore empty tasks, `-` if not th: continue with contextlib.suppress(LookupError): children = _get_task_handler_children_for_tasks_or_playbooks( th, basedir, k, parent_type, ) results.append(children) continue if ( 'include_role' in th or 'import_role' in th ): # lgtm [py/unreachable-statement] th = normalize_task_v2(th) _validate_task_handler_action_for_role(th['action']) results.extend( _roles_children( basedir, k, [th['action'].get("name")], parent_type, main=th['action'].get('tasks_from', 'main'), ) ) continue if 'block' not in th: continue results.extend(_taskshandlers_children(basedir, k, th['block'], parent_type)) if 'rescue' in th: results.extend( _taskshandlers_children(basedir, k, th['rescue'], parent_type) ) if 'always' in th: results.extend( _taskshandlers_children(basedir, k, th['always'], parent_type) ) return results def _get_task_handler_children_for_tasks_or_playbooks( task_handler, basedir: str, k, parent_type: FileType, ) -> Lintable: """Try to get children of taskhandler for include/import tasks/playbooks.""" child_type = k if parent_type == 'playbook' else parent_type task_include_keys = 'include', 'include_tasks', 'import_playbook', 'import_tasks' for task_handler_key in task_include_keys: with contextlib.suppress(KeyError): # ignore empty tasks if not task_handler: continue return Lintable( path_dwim(basedir, task_handler[task_handler_key]), kind=child_type ) raise LookupError( f'The node contains none of: {", ".join(task_include_keys)}', ) def _validate_task_handler_action_for_role(th_action: dict) -> None: """Verify that the task handler action is valid for role include.""" module = th_action['__ansible_module__'] if 'name' not in th_action: raise MatchError(message=f"Failed to find required 'name' key in {module!s}") if not isinstance(th_action['name'], str): raise MatchError( message=f"Value assigned to 'name' key on '{module!s}' is not a string.", ) def _roles_children( basedir: str, k, v, parent_type: FileType, main='main' ) -> List[Lintable]: results: List[Lintable] = [] for role in v: if isinstance(role, dict): if 'role' in role or 'name' in role: if 'tags' not in role or 'skip_ansible_lint' not in role['tags']: results.extend( _look_for_role_files( basedir, role.get('role', role.get('name')), main=main ) ) elif k != 'dependencies': raise SystemExit( 'role dict {0} does not contain a "role" ' 'or "name" key'.format(role) ) else: results.extend(_look_for_role_files(basedir, role, main=main)) return results def _rolepath(basedir: str, role: str) -> Optional[str]: role_path = None possible_paths = [ # if included from a playbook path_dwim(basedir, os.path.join('roles', role)), path_dwim(basedir, role), # if included from roles/[role]/meta/main.yml path_dwim(basedir, os.path.join('..', '..', '..', 'roles', role)), path_dwim(basedir, os.path.join('..', '..', role)), # if checking a role in the current directory path_dwim(basedir, os.path.join('..', role)), ] if constants.DEFAULT_ROLES_PATH: search_locations = constants.DEFAULT_ROLES_PATH if isinstance(search_locations, str): search_locations = search_locations.split(os.pathsep) for loc in search_locations: loc = os.path.expanduser(loc) possible_paths.append(path_dwim(loc, role)) possible_paths.append(path_dwim(basedir, '')) for path_option in possible_paths: if os.path.isdir(path_option): role_path = path_option break if role_path: add_all_plugin_dirs(role_path) return role_path def _look_for_role_files(basedir: str, role: str, main='main') -> List[Lintable]: role_path = _rolepath(basedir, role) if not role_path: return [] results = [] for kind in ['tasks', 'meta', 'handlers']: current_path = os.path.join(role_path, kind) for folder, subdirs, files in os.walk(current_path): for file in files: file_ignorecase = file.lower() if file_ignorecase.endswith(('.yml', '.yaml')): thpath = os.path.join(folder, file) # TODO(ssbarnea): Find correct way to pass kind: FileType results.append(Lintable(thpath, kind=kind)) # type: ignore return results def rolename(filepath): idx = filepath.find('roles/') if idx < 0: return '' role = filepath[idx + 6 :] role = role[: role.find('/')] return role def _kv_to_dict(v): (command, args, kwargs) = tokenize(v) return dict(__ansible_module__=command, __ansible_arguments__=args, **kwargs) def _sanitize_task(task: dict) -> dict: """Return a stripped-off task structure compatible with new Ansible. This helper takes a copy of the incoming task and drops any internally used keys from it. """ result = task.copy() # task is an AnsibleMapping which inherits from OrderedDict, so we need # to use `del` to remove unwanted keys. for k in ['skipped_rules', FILENAME_KEY, LINE_NUMBER_KEY]: if k in result: del result[k] return result def normalize_task_v2(task: Dict[str, Any]) -> Dict[str, Any]: """Ensure tasks have an action key and strings are converted to python objects.""" result = dict() sanitized_task = _sanitize_task(task) mod_arg_parser = ModuleArgsParser(sanitized_task) try: action, arguments, result['delegate_to'] = mod_arg_parser.parse() except AnsibleParserError as e: raise MatchError( rule=AnsibleParserErrorRule(), message=e.message, filename=task.get(FILENAME_KEY, "Unknown"), linenumber=task.get(LINE_NUMBER_KEY, 0), ) # denormalize shell -> command conversion if '_uses_shell' in arguments: action = 'shell' del arguments['_uses_shell'] for (k, v) in list(task.items()): if k in ('action', 'local_action', 'args', 'delegate_to') or k == action: # we don't want to re-assign these values, which were # determined by the ModuleArgsParser() above continue result[k] = v result['action'] = dict(__ansible_module__=action) if '_raw_params' in arguments: result['action']['__ansible_arguments__'] = arguments['_raw_params'].split(' ') del arguments['_raw_params'] else: result['action']['__ansible_arguments__'] = list() if 'argv' in arguments and not result['action']['__ansible_arguments__']: result['action']['__ansible_arguments__'] = arguments['argv'] del arguments['argv'] result['action'].update(arguments) return result def normalize_task_v1(task): # noqa: C901 result = dict() for (k, v) in task.items(): if k in VALID_KEYS or k.startswith('with_'): if k in ('local_action', 'action'): if not isinstance(v, dict): v = _kv_to_dict(v) v['__ansible_arguments__'] = v.get('__ansible_arguments__', list()) result['action'] = v else: result[k] = v else: if isinstance(v, str): v = _kv_to_dict(k + ' ' + v) elif not v: v = dict(__ansible_module__=k) else: if isinstance(v, dict): v.update(dict(__ansible_module__=k)) else: if k == '__line__': # Keep the line number stored result[k] = v continue # Tasks that include playbooks (rather than task files) # can get here # https://github.com/ansible-community/ansible-lint/issues/138 raise RuntimeError( "Was not expecting value %s of type %s for key %s\n" "Task: %s. Check the syntax of your playbook using " "ansible-playbook --syntax-check" % (str(v), type(v), k, str(task)) ) v['__ansible_arguments__'] = v.get('__ansible_arguments__', list()) result['action'] = v if 'module' in result['action']: # this happens when a task uses # local_action: # module: ec2 # etc... result['action']['__ansible_module__'] = result['action']['module'] del result['action']['module'] if 'args' in result: result['action'].update(result.get('args')) del result['args'] return result def normalize_task(task: Dict[str, Any], filename: str) -> Dict[str, Any]: ansible_action_type = task.get('__ansible_action_type__', 'task') if '__ansible_action_type__' in task: del task['__ansible_action_type__'] task = normalize_task_v2(task) task[FILENAME_KEY] = filename task['__ansible_action_type__'] = ansible_action_type return task def task_to_str(task: Dict[str, Any]) -> str: name = task.get("name") if name: return str(name) action = task.get("action") if isinstance(action, str) or not isinstance(action, dict): return str(action) args = " ".join( [ "{0}={1}".format(k, v) for (k, v) in action.items() if k not in [ "__ansible_module__", "__ansible_arguments__", "__line__", "__file__", ] ] ) for item in action.get("__ansible_arguments__", []): args += f" {item}" return u"{0} {1}".format(action["__ansible_module__"], args) def extract_from_list(blocks, candidates: List[str]) -> List[Any]: results = list() for block in blocks: for candidate in candidates: if isinstance(block, dict) and candidate in block: if isinstance(block[candidate], list): results.extend(add_action_type(block[candidate], candidate)) elif block[candidate] is not None: raise RuntimeError( "Key '%s' defined, but bad value: '%s'" % (candidate, str(block[candidate])) ) return results def add_action_type(actions, action_type: str) -> List[Any]: results = list() for action in actions: # ignore empty task if not action: continue action['__ansible_action_type__'] = BLOCK_NAME_TO_ACTION_TYPE_MAP[action_type] results.append(action) return results def get_action_tasks(yaml, file: Lintable) -> List[Any]: tasks = list() if file.kind in ['tasks', 'handlers']: tasks = add_action_type(yaml, file.kind) else: tasks.extend( extract_from_list(yaml, ['tasks', 'handlers', 'pre_tasks', 'post_tasks']) ) # Add sub-elements of block/rescue/always to tasks list tasks.extend(extract_from_list(tasks, ['block', 'rescue', 'always'])) # Remove block/rescue/always elements from tasks list block_rescue_always = ('block', 'rescue', 'always') tasks[:] = [ task for task in tasks if all(k not in task for k in block_rescue_always) ] return [ task for task in tasks if set( ['include', 'include_tasks', 'import_playbook', 'import_tasks'] ).isdisjoint(task.keys()) ] def get_normalized_tasks(yaml, file: Lintable) -> List[Dict[str, Any]]: tasks = get_action_tasks(yaml, file) res = [] for task in tasks: # An empty `tags` block causes `None` to be returned if # the `or []` is not present - `task.get('tags', [])` # does not suffice. if 'skip_ansible_lint' in (task.get('tags') or []): # No need to normalize_task is we are skipping it. continue res.append(normalize_task(task, str(file.path))) return res @lru_cache(maxsize=128) def parse_yaml_linenumbers(data, filename): """Parse yaml as ansible.utils.parse_yaml but with linenumbers. The line numbers are stored in each node's LINE_NUMBER_KEY key. """ def compose_node(parent, index): # the line number where the previous token has ended (plus empty lines) line = loader.line node = Composer.compose_node(loader, parent, index) node.__line__ = line + 1 return node def construct_mapping(node, deep=False): mapping = AnsibleConstructor.construct_mapping(loader, node, deep=deep) if hasattr(node, '__line__'): mapping[LINE_NUMBER_KEY] = node.__line__ else: mapping[LINE_NUMBER_KEY] = mapping._line_number mapping[FILENAME_KEY] = filename return mapping try: kwargs = {} if 'vault_password' in inspect.getfullargspec(AnsibleLoader.__init__).args: kwargs['vault_password'] = DEFAULT_VAULT_PASSWORD loader = AnsibleLoader(data, **kwargs) loader.compose_node = compose_node loader.construct_mapping = construct_mapping data = loader.get_single_data() except (yaml.parser.ParserError, yaml.scanner.ScannerError) as e: raise SystemExit("Failed to parse YAML in %s: %s" % (filename, str(e))) return data def get_first_cmd_arg(task: Dict[str, Any]) -> Any: try: if 'cmd' in task['action']: first_cmd_arg = task['action']['cmd'].split()[0] else: first_cmd_arg = task['action']['__ansible_arguments__'][0] except IndexError: return None return first_cmd_arg def is_playbook(filename: str) -> bool: """ Check if the file is a playbook. Given a filename, it should return true if it looks like a playbook. The function is not supposed to raise exceptions. """ # we assume is a playbook if we loaded a sequence of dictionaries where # at least one of these keys is present: playbooks_keys = { "gather_facts", "hosts", "import_playbook", "post_tasks", "pre_tasks", "roles", "tasks", } # makes it work with Path objects by converting them to strings if not isinstance(filename, str): filename = str(filename) try: f = parse_yaml_from_file(filename) except Exception as e: _logger.warning( "Failed to load %s with %s, assuming is not a playbook.", filename, e ) else: if ( isinstance(f, AnsibleSequence) and hasattr(next(iter(f), {}), 'keys') and playbooks_keys.intersection(next(iter(f), {}).keys()) ): return True return False # pylint: disable=too-many-statements def get_lintables( options: Namespace = Namespace(), args: Optional[List[str]] = None ) -> List[Lintable]: """Detect files and directories that are lintable.""" lintables: List[Lintable] = [] # passing args bypass auto-detection mode if args: for arg in args: lintable = Lintable(arg) if lintable.kind in ("yaml", None): _logger.warning( "Overriding detected file kind '%s' with 'playbook' " "for given positional argument: %s", lintable.kind, arg, ) lintable = Lintable(arg, kind="playbook") lintables.append(lintable) else: for filename in get_yaml_files(options): p = Path(filename) # skip exclusions try: for file_path in options.exclude_paths: if str(p.resolve()).startswith(str(file_path)): raise FileNotFoundError( f'File {file_path} matched exclusion entry: {p}' ) except FileNotFoundError as e: _logger.debug('Ignored %s due to: %s', p, e) continue lintables.append(Lintable(p)) # stage 2: guess roles from current lintables, as there is no unique # file that must be present in any kind of role. _extend_with_roles(lintables) return lintables def _extend_with_roles(lintables: List[Lintable]) -> None: """Detect roles among lintables and adds them to the list.""" for lintable in lintables: parts = lintable.path.parent.parts if 'roles' in parts: role = lintable.path while role.parent.name != "roles" and role.name: role = role.parent if role.exists: lintable = Lintable(role, kind="role") if lintable not in lintables: _logger.debug("Added role: %s", lintable) lintables.append(lintable) def convert_to_boolean(value: Any) -> bool: """Use Ansible to convert something to a boolean.""" return bool(boolean(value)) def nested_items( data: Union[Dict[Any, Any], List[Any]] ) -> Generator[Tuple[Any, Any], None, None]: """Iterate a nested data structure.""" if isinstance(data, dict): for k, v in data.items(): yield k, v for k, v in nested_items(v): yield k, v if isinstance(data, list): for item in data: yield "list-item", item for k, v in nested_items(item): yield k, v
[]
[]
[ "ANSIBLE_PLAYBOOK_DIR" ]
[]
["ANSIBLE_PLAYBOOK_DIR"]
python
1
0
pkg/in/api.go
package in import ( "encoding/json" "fmt" "net/http" "os" "strings" "github.com/aws/aws-lambda-go/events" "github.com/tidwall/gjson" ) // Incident is a type of ticket type Incident struct { Comment string `json:"comment,omitempty"` CommentID string `json:"comment_sysid,omitempty"` Identifier string `json:"id,omitempty"` IntComment string `json:"internal_comment,omitempty"` IntCommentID string `json:"internal_comment_sysid,omitempty"` Description string `json:"description,omitempty"` ExtID string `json:"external_identifier,omitempty"` IntID string `json:"internal_identifier,omitempty"` Priority string `json:"priority,omitempty"` Reporter string `json:"reporter_name,omitempty"` Resolution string `json:"resolution,omitempty"` ResolutionCode string `json:"resolution_code,omitempty"` Service string `json:"business_service,omitempty"` Status string `json:"status,omitempty"` Summary string `json:"summary,omitempty"` } // newIncident initialises an Incident func newIncident() *Incident { return &Incident{} } // checkVars checks incoming payload has the required field values func checkIncidentVars(input string) error { vars := []string{ "DESCRIPTION_FIELD", "INTID_FIELD", "PRIORITY_FIELD", "REPORTER_FIELD", "STATUS_FIELD", "SUMMARY_FIELD", } for _, v := range vars { field, ok := os.LookupEnv(v) if !ok { return fmt.Errorf("missing environment variable: %v", v) } value := gjson.Get(input, field) if !value.Exists() { return fmt.Errorf("missing value in payload: %v", field) } } return nil } // parseIncident gets values from an inbound incident func parseIncident(input string) (*Incident, error) { i := newIncident() i.ExtID = gjson.Get(input, os.Getenv("EXTID_FIELD")).Str // check for required values in new tickets only if i.ExtID == "" { err := checkIncidentVars(input) if err != nil { return nil, err } } i.Description = gjson.Get(input, os.Getenv("DESCRIPTION_FIELD")).Str i.Comment = gjson.Get(input, os.Getenv("COMMENT_FIELD")).Str i.CommentID = gjson.Get(input, os.Getenv("COMMENT_ID_FIELD")).Str i.IntComment = gjson.Get(input, os.Getenv("INTERNAL_COMMENT_FIELD")).Str i.IntCommentID = gjson.Get(input, os.Getenv("INTERNAL_COMMENT_ID_FIELD")).Str i.IntID = gjson.Get(input, os.Getenv("INTID_FIELD")).Str i.Priority = gjson.Get(input, os.Getenv("PRIORITY_FIELD")).Str i.Reporter = gjson.Get(input, os.Getenv("REPORTER_FIELD")).Str i.Resolution = gjson.Get(input, os.Getenv("RESOLUTION_FIELD")).Str i.Service = gjson.Get(input, os.Getenv("SERVICE_FIELD")).Str i.Status = gjson.Get(input, os.Getenv("STATUS_FIELD")).Str i.Summary = gjson.Get(input, os.Getenv("SUMMARY_FIELD")).Str // treat both type of comment as customer visible comments on JSD // initialise comment id if nil as it's being used as sort key switch { case i.IntCommentID == "" && i.CommentID == "": i.CommentID = "0" case i.IntCommentID != "" && i.CommentID == "": i.CommentID = i.IntCommentID i.Comment = i.IntComment i.IntComment = "" case i.IntCommentID == "" && i.CommentID != "": break case i.IntCommentID != "" && i.CommentID != "": break } // assign to an organisation in JSD switch i.Service { case "CSOC": i.Service = "59" case "Cyclamen IT Platform Local": i.Service = "9" case "I-LEAP": i.Service = "58" case "Semaphore": i.Service = "45" default: i.Service = "65" } fmt.Printf("parsed incident: %v, status: %v, comment id: %v\n", i.Identifier, i.Status, i.CommentID) return i, nil } // Handle sends an incoming request to parser and processor, and returns a http response func Handle(request *events.APIGatewayProxyRequest) (events.APIGatewayProxyResponse, error) { inc, err := parseIncident(request.Body) if err != nil { return events.APIGatewayProxyResponse{ StatusCode: http.StatusBadRequest, Body: err.Error(), }, err } // assign identifier according to the api endpoint used // this is to differentiate between tickets initially created in JSD vs SNOW switch request.Resource { case "/v2/add": inc.Identifier = inc.ExtID case "/v2/in": inc.Identifier = inc.IntID default: return events.APIGatewayProxyResponse{ StatusCode: http.StatusBadRequest, Body: err.Error(), }, err } res, err := process(inc) if err != nil { return events.APIGatewayProxyResponse{ StatusCode: http.StatusInternalServerError, Body: err.Error(), }, err } msg := struct { ExtID string `json:"external_identifier,omitempty"` }{ ExtID: strings.Trim(res, `", \`), } bmsg, err := json.Marshal(msg) if err != nil { return events.APIGatewayProxyResponse{ StatusCode: http.StatusInternalServerError, Body: err.Error(), }, err } return events.APIGatewayProxyResponse{ StatusCode: http.StatusOK, Body: string(bmsg), }, nil }
[ "\"EXTID_FIELD\"", "\"DESCRIPTION_FIELD\"", "\"COMMENT_FIELD\"", "\"COMMENT_ID_FIELD\"", "\"INTERNAL_COMMENT_FIELD\"", "\"INTERNAL_COMMENT_ID_FIELD\"", "\"INTID_FIELD\"", "\"PRIORITY_FIELD\"", "\"REPORTER_FIELD\"", "\"RESOLUTION_FIELD\"", "\"SERVICE_FIELD\"", "\"STATUS_FIELD\"", "\"SUMMARY_FIELD\"" ]
[]
[ "COMMENT_ID_FIELD", "COMMENT_FIELD", "INTERNAL_COMMENT_FIELD", "SERVICE_FIELD", "STATUS_FIELD", "DESCRIPTION_FIELD", "INTID_FIELD", "RESOLUTION_FIELD", "EXTID_FIELD", "PRIORITY_FIELD", "REPORTER_FIELD", "INTERNAL_COMMENT_ID_FIELD", "SUMMARY_FIELD" ]
[]
["COMMENT_ID_FIELD", "COMMENT_FIELD", "INTERNAL_COMMENT_FIELD", "SERVICE_FIELD", "STATUS_FIELD", "DESCRIPTION_FIELD", "INTID_FIELD", "RESOLUTION_FIELD", "EXTID_FIELD", "PRIORITY_FIELD", "REPORTER_FIELD", "INTERNAL_COMMENT_ID_FIELD", "SUMMARY_FIELD"]
go
13
0
bot.go
package main import ( "fmt" "io/ioutil" "os" "strconv" "github.com/tidwall/gjson" "log" "time" tele "gopkg.in/tucnak/telebot.v2" ) const dataFile = "data.json" const timeout = 5 var isTest = false func main() { t := os.Getenv("lgbtcntest") if t == "" { isTest = false } b, err := strconv.ParseBool(t) if err == nil { isTest = b } fmt.Printf("[I] TEST MODE: %v\n", isTest) bot, err := tele.NewBot( tele.Settings{ Token: os.Getenv("Token"), Poller: &tele.LongPoller{Timeout: timeout * time.Second, }, }, ) if err != nil { log.Fatal(err) os.Exit(1) } fmt.Println("[I] BOT CREATED WITHOUT ANY ERR.") month := time.Now().Format("01") day := time.Now().Format("02") c := os.Getenv("Chat_ID") ChatID, errC := strconv.ParseInt(c, 10, 64) if errC != nil { fmt.Println("[E] CAN NOT PARSE CHAR_ID TO INT64! EXIT!") os.Exit(1) } fmt.Println("[I] GET CHAT_ID SUCCESSFULLY") // bot.Start() _, errS := bot.Send(tele.ChatID(ChatID), historyToday(month, day), tele.NoPreview, "Markdown") fmt.Println("[I] MSG SENT. EXITING.") // After sending msg, exit, cuz it boosts by CI/FaaS & Cron if errS == nil { os.Exit(0) } fmt.Println(err) os.Exit(1) } func historyToday(month, day string) string { data, _ := ioutil.ReadFile(dataFile) tip := gjson.Get(string(data), month+".tip") today := month + "-" + day if tip.String() != "" { return tip.String() + "\n=====\n\n" + eventList(month, day) + "\n" + today } if isTest { return "[TEST]\n" + eventList(month, day) + "\n" + today } return eventList(month, day) + "\n" + today } func eventList(month, day string) string { data, _ := ioutil.ReadFile(dataFile) count, _ := strconv.Atoi((gjson.Get(string(data), month+"."+day+".#")).String()) event := "" if (gjson.Get(string(data), month+"."+day+".0")).String() != "" { for i := 0; i < count; i++ { event = event + "\n" + (gjson.Get(string(data), month+"."+day+"."+strconv.Itoa(i))).String() } return event } event = "暂无历史今天的性少数群体历程\n你可以[前往 GitHub 提交数据](https://github.com/LGBT-CN/HistoryToday/edit/master/data.json)" return event }
[ "\"lgbtcntest\"", "\"Token\"", "\"Chat_ID\"" ]
[]
[ "Token", "lgbtcntest", "Chat_ID" ]
[]
["Token", "lgbtcntest", "Chat_ID"]
go
3
0
cmd/malwareExecuteScan_generated.go
// Code generated by piper's step-generator. DO NOT EDIT. package cmd import ( "fmt" "os" "time" "github.com/SAP/jenkins-library/pkg/config" "github.com/SAP/jenkins-library/pkg/log" "github.com/SAP/jenkins-library/pkg/telemetry" "github.com/spf13/cobra" ) type malwareExecuteScanOptions struct { Host string `json:"host,omitempty"` Username string `json:"username,omitempty"` Password string `json:"password,omitempty"` File string `json:"file,omitempty"` Timeout string `json:"timeout,omitempty"` } // MalwareExecuteScanCommand Performs a malware scan func MalwareExecuteScanCommand() *cobra.Command { const STEP_NAME = "malwareExecuteScan" metadata := malwareExecuteScanMetadata() var stepConfig malwareExecuteScanOptions var startTime time.Time var createMalwareExecuteScanCmd = &cobra.Command{ Use: STEP_NAME, Short: "Performs a malware scan", Long: `Performs a malware scan`, PreRunE: func(cmd *cobra.Command, _ []string) error { startTime = time.Now() log.SetStepName(STEP_NAME) log.SetVerbose(GeneralConfig.Verbose) path, _ := os.Getwd() fatalHook := &log.FatalHook{CorrelationID: GeneralConfig.CorrelationID, Path: path} log.RegisterHook(fatalHook) err := PrepareConfig(cmd, &metadata, STEP_NAME, &stepConfig, config.OpenPiperFile) if err != nil { log.SetErrorCategory(log.ErrorConfiguration) return err } log.RegisterSecret(stepConfig.Username) log.RegisterSecret(stepConfig.Password) if len(GeneralConfig.HookConfig.SentryConfig.Dsn) > 0 { sentryHook := log.NewSentryHook(GeneralConfig.HookConfig.SentryConfig.Dsn, GeneralConfig.CorrelationID) log.RegisterHook(&sentryHook) } return nil }, Run: func(_ *cobra.Command, _ []string) { telemetryData := telemetry.CustomData{} telemetryData.ErrorCode = "1" handler := func() { config.RemoveVaultSecretFiles() telemetryData.Duration = fmt.Sprintf("%v", time.Since(startTime).Milliseconds()) telemetryData.ErrorCategory = log.GetErrorCategory().String() telemetry.Send(&telemetryData) } log.DeferExitHandler(handler) defer handler() telemetry.Initialize(GeneralConfig.NoTelemetry, STEP_NAME) malwareExecuteScan(stepConfig, &telemetryData) telemetryData.ErrorCode = "0" log.Entry().Info("SUCCESS") }, } addMalwareExecuteScanFlags(createMalwareExecuteScanCmd, &stepConfig) return createMalwareExecuteScanCmd } func addMalwareExecuteScanFlags(cmd *cobra.Command, stepConfig *malwareExecuteScanOptions) { cmd.Flags().StringVar(&stepConfig.Host, "host", os.Getenv("PIPER_host"), "malware scanning host.") cmd.Flags().StringVar(&stepConfig.Username, "username", os.Getenv("PIPER_username"), "User") cmd.Flags().StringVar(&stepConfig.Password, "password", os.Getenv("PIPER_password"), "Password") cmd.Flags().StringVar(&stepConfig.File, "file", os.Getenv("PIPER_file"), "The file which is scanned for malware") cmd.Flags().StringVar(&stepConfig.Timeout, "timeout", `600`, "timeout for http layer in seconds") cmd.MarkFlagRequired("host") cmd.MarkFlagRequired("username") cmd.MarkFlagRequired("password") cmd.MarkFlagRequired("file") } // retrieve step metadata func malwareExecuteScanMetadata() config.StepData { var theMetaData = config.StepData{ Metadata: config.StepMetadata{ Name: "malwareExecuteScan", Aliases: []config.Alias{}, Description: "Performs a malware scan", }, Spec: config.StepSpec{ Inputs: config.StepInputs{ Parameters: []config.StepParameters{ { Name: "host", ResourceRef: []config.ResourceReference{}, Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, Type: "string", Mandatory: true, Aliases: []config.Alias{}, }, { Name: "username", ResourceRef: []config.ResourceReference{ { Name: "malwareScanCredentialsId", Param: "username", Type: "secret", }, }, Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, Type: "string", Mandatory: true, Aliases: []config.Alias{}, }, { Name: "password", ResourceRef: []config.ResourceReference{ { Name: "malwareScanCredentialsId", Param: "password", Type: "secret", }, }, Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, Type: "string", Mandatory: true, Aliases: []config.Alias{}, }, { Name: "file", ResourceRef: []config.ResourceReference{}, Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, Type: "string", Mandatory: true, Aliases: []config.Alias{}, }, { Name: "timeout", ResourceRef: []config.ResourceReference{}, Scope: []string{"PARAMETERS", "STAGES", "STEPS"}, Type: "string", Mandatory: false, Aliases: []config.Alias{}, }, }, }, }, } return theMetaData }
[ "\"PIPER_host\"", "\"PIPER_username\"", "\"PIPER_password\"", "\"PIPER_file\"" ]
[]
[ "PIPER_password", "PIPER_username", "PIPER_file", "PIPER_host" ]
[]
["PIPER_password", "PIPER_username", "PIPER_file", "PIPER_host"]
go
4
0
insolar/jetcoordinator/utils_test.go
// Copyright 2020 Insolar Network Ltd. // All rights reserved. // This material is licensed under the Insolar License version 1.0, // available at https://github.com/insolar/insolar/blob/master/LICENSE.md. package jetcoordinator import ( "crypto/rand" "fmt" "os" "strings" "testing" "github.com/insolar/insolar/insolar" "github.com/insolar/insolar/insolar/gen" "github.com/insolar/insolar/platformpolicy" "github.com/insolar/insolar/utils/entropy" ) // In reality compares no sort vs with sort + in/out conversions of array of empty interfaces // This benchamark results would be suitable for analyzing how much we lost on input/output // conversion only after sorting removal // // prepare benchmarks results: // go test -v ./ledger/jetcoordinator/ -bench=SelectByEntropy -cpu=1 -benchmem -run=NONE > wrapped.txt // SelectByEntropyBench=orig go test -v ./ledger/jetcoordinator/ -bench=SelectByEntropy -cpu=1 -benchmem -run=NONE > orig.txt // // measure overhead: // benchcmp orig.txt wrapped.txt // func BenchmarkSelectByEntropy(b *testing.B) { benchtype := strings.ToLower(os.Getenv("SelectByEntropyBench")) switch benchtype { case "orig", "wrapped": // all ok case "": benchtype = "wrapped" default: panic(fmt.Sprintf("Unknown benchtype %v", benchtype)) } benches := []struct { values int count int }{ {10, 1}, {10, 5}, {10, 10}, {100, 1}, {100, 50}, {100, 100}, {1000, 1}, {1000, 500}, {1000, 1000}, } fmt.Printf("# Bench: %v\n", benchtype) for _, bench := range benches { b.Run( fmt.Sprintf("%v_from_%v", bench.count, bench.values), func(b *testing.B) { if benchtype == "orig" { benchSelectByEntropy(b, bench.values, bench.count) return } benchSelectByEntropyWrapped(b, bench.values, bench.count) }) } } // compiler should avoid to optimize call of benched function var resultsI []interface{} var resultsB [][]byte func benchSelectByEntropy(b *testing.B, valuescount int, count int) { scheme := platformpolicy.NewPlatformCryptographyScheme() entropybytes := randslice(64) values := make([]interface{}, 0, valuescount) for i := 0; i < valuescount; i++ { values = append(values, interface{}(randslice(64))) } b.ResetTimer() for i := 0; i < b.N; i++ { // sort.SliceStable(valuesB, ) // bytes.Compare(a, b) < 0 resultsI, _ = entropy.SelectByEntropy(scheme, entropybytes, values, count) } } // compiler should avoid to optimize call of benched function var refresults []insolar.Reference func benchSelectByEntropyWrapped(b *testing.B, valuescount int, count int) { scheme := platformpolicy.NewPlatformCryptographyScheme() var e insolar.Entropy copy(e[:], randslice(64)) values := make([]insolar.Node, 0, valuescount) for i := 0; i < valuescount; i++ { values = append(values, insolar.Node{ID: gen.Reference()}) } b.ResetTimer() for i := 0; i < b.N; i++ { refresults, _ = getRefs(scheme, e[:], values, count) } } func randslice(size int) []byte { b := make([]byte, size) rand.Read(b) return b }
[ "\"SelectByEntropyBench\"" ]
[]
[ "SelectByEntropyBench" ]
[]
["SelectByEntropyBench"]
go
1
0
core/main.go
package autospotting import ( "io/ioutil" "log" "os" "strings" "sync" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/ec2" "github.com/aws/aws-sdk-go/service/ec2/ec2iface" ) var logger, debug *log.Logger var hourlySavings float64 var savingsMutex = &sync.RWMutex{} // Run starts processing all AWS regions looking for AutoScaling groups // enabled and taking action by replacing more pricy on-demand instances with // compatible and cheaper spot instances. func Run(cfg *Config) { setupLogging(cfg) debug.Println(*cfg) // use this only to list all the other regions ec2Conn := connectEC2(cfg.MainRegion) addDefaultFilteringMode(cfg) addDefaultFilter(cfg) allRegions, err := getRegions(ec2Conn) if err != nil { logger.Println(err.Error()) return } processRegions(allRegions, cfg) } func addDefaultFilteringMode(cfg *Config) { if cfg.TagFilteringMode != "opt-out" { debug.Printf("Configured filtering mode: '%s', considering it as 'opt-in'(default)\n", cfg.TagFilteringMode) cfg.TagFilteringMode = "opt-in" } else { debug.Println("Configured filtering mode: 'opt-out'") } } func addDefaultFilter(cfg *Config) { if len(strings.TrimSpace(cfg.FilterByTags)) == 0 { switch cfg.TagFilteringMode { case "opt-out": cfg.FilterByTags = "spot-enabled=false" default: cfg.FilterByTags = "spot-enabled=true" } } } func disableLogging() { setupLogging(&Config{LogFile: ioutil.Discard}) } func setupLogging(cfg *Config) { logger = log.New(cfg.LogFile, "", cfg.LogFlag) if os.Getenv("AUTOSPOTTING_DEBUG") == "true" { debug = log.New(cfg.LogFile, "", cfg.LogFlag) } else { debug = log.New(ioutil.Discard, "", 0) } } // processAllRegions iterates all regions in parallel, and replaces instances // for each of the ASGs tagged with tags as specified by slice represented by cfg.FilterByTags // by default this is all asg with the tag 'spot-enabled=true'. func processRegions(regions []string, cfg *Config) { var wg sync.WaitGroup for _, r := range regions { wg.Add(1) r := region{name: r, conf: cfg} go func() { if r.enabled() { logger.Printf("Enabled to run in %s, processing region.\n", r.name) r.processRegion() } else { debug.Println("Not enabled to run in", r.name) debug.Println("List of enabled regions:", cfg.Regions) } wg.Done() }() } wg.Wait() } func connectEC2(region string) *ec2.EC2 { sess, err := session.NewSession() if err != nil { panic(err) } return ec2.New(sess, aws.NewConfig().WithRegion(region)) } // getRegions generates a list of AWS regions. func getRegions(ec2conn ec2iface.EC2API) ([]string, error) { var output []string logger.Println("Scanning for available AWS regions") resp, err := ec2conn.DescribeRegions(&ec2.DescribeRegionsInput{}) if err != nil { logger.Println(err.Error()) return nil, err } debug.Println(resp) for _, r := range resp.Regions { if r != nil && r.RegionName != nil { debug.Println("Found region", *r.RegionName) output = append(output, *r.RegionName) } } return output, nil }
[ "\"AUTOSPOTTING_DEBUG\"" ]
[]
[ "AUTOSPOTTING_DEBUG" ]
[]
["AUTOSPOTTING_DEBUG"]
go
1
0
app/drinks_api.py
import requests import os import logging from random import choice from app.exceptions import * MAX_INGREDIENTS_IN_DRINK = 16 logger = logging.getLogger(__name__) try: api_key = os.environ['COCKTAIL_DB_API_KEY'] api_url = f'https://www.thecocktaildb.com/api/json/v2/{api_key}/' except KeyError: api_key = 1 api_url = f'https://www.thecocktaildb.com/api/json/v1/{api_key}/' is_empty = lambda s: (s is None or len(s) == 0) def get_all_ingredients(): """ Return list of all available ingredients from API Returns: list Raises: APIError: error connecting to the API """ url = f'{api_url}list.php?i=list' try: r = requests.get(url) except requests.exceptions.RequestException as e: logging.error('Error connecting to the API') raise APIConnectionError(url) data = r.json() ingredients = [d['strIngredient1'] for d in data['drinks']] logger.info(ingredients) return ingredients def get_drink_by_id(drink_id): """ Return drink info by id Args: drink_id (str) Returns: drink dict from API Raises: APIError: error connecting to the API NoDrinksFound: no drinks with the ingredient """ url = f'{api_url}/lookup.php?i={drink_id}' try: r = requests.get(url) except requests.exceptions.RequestException as e: logging.error('Error connecting to the API') raise APIConnectionError(url) try: data = r.json() return data['drinks'][0] except ValueError: raise NoDrinksFound(f'Invalid ingredient {drink_id}') def get_drink_by_name(drink_name): """ Return drink info by drink name Args: drink_name(str) Returns: drink dict from API Raises: APIError: error connecting to the API NoDrinksFound: no drinks with the ingredient """ url = f'{api_url}/search.php?s={drink_name}' try: r = requests.get(url) except requests.exceptions.RequestException as e: logging.error('Error connecting to the API') raise APIConnectionError(url) try: data = r.json() return data['drinks'][0] except TypeError: raise NoDrinksFound(f'Drink `{drink_name}` not found') def get_random_drink(): """ Returns a random drink Returns: dict """ url = f'{api_url}/random.php' try: r = requests.get(url) return r.json()['drinks'][0] except requests.exceptions.RequestException as e: logging.error('Error connecting to the API') raise APIConnectionError(url) def get_drinks_by_ingredients(ingredients): """ Returns list of drinks which contain the ingredient Args: ingredients (str) Returns: list[dicts]: list of dictionaries that contain info about the drink dict keys: strDrink, strDinkThumb, idDrink Raises: APIError: error connecting to the API NoDrinksFound: no drinks with the ingredients """ url = f'{api_url}/filter.php?i={ingredients}' try: r = requests.get(url) except requests.exceptions.RequestException as e: logging.error('Error connecting to the API') raise APIConnectionError(url) data = r.json()['drinks'] if data == 'None Found': raise NoDrinksFound else: return data def get_random_drink_id_by_ingredients(ingredients): """ Get random drink from all drinks that contain the ingredients Args: ingredients (str) Returns: str - id of the random drink """ drinks = get_drinks_by_ingredients(ingredients) random_drink = choice(drinks) return random_drink['idDrink'] def clean_up_ingredients(drink_dict): """ Return dict with all ingredients (measure + name) in `ingredients` key. Args: drink_dict (dict): drink dict from API Returns: dict """ ingredients_list = [] for i in range(1, MAX_INGREDIENTS_IN_DRINK+1): ingredient = f'strIngredient{i}' measure = f'strMeasure{i}' if is_empty(drink_dict[ingredient]): break else: if is_empty(drink_dict[measure]): ingredients_list.append(drink_dict[ingredient]) else: ingredients_list.append(f"{drink_dict[measure]} {drink_dict[ingredient]}") drink_dict['ingredients'] = '\n'.join(ingredients_list) return drink_dict if __name__ == '__main__': from exceptions import * x = get_drink_by_name('adfsjklsadl') print(x)
[]
[]
[ "COCKTAIL_DB_API_KEY" ]
[]
["COCKTAIL_DB_API_KEY"]
python
1
0
common/flogging/logging.go
/* Copyright IBM Corp. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package flogging import ( "fmt" "io" "os" "sync" "github.com/paul-lee-attorney/fabric-2.1-gm/common/flogging/fabenc" zaplogfmt "github.com/sykesm/zap-logfmt" "go.uber.org/zap" "go.uber.org/zap/zapcore" ) // Config is used to provide dependencies to a Logging instance. type Config struct { // Format is the log record format specifier for the Logging instance. If the // spec is the string "json", log records will be formatted as JSON. Any // other string will be provided to the FormatEncoder. Please see // fabenc.ParseFormat for details on the supported verbs. // // If Format is not provided, a default format that provides basic information will // be used. Format string // LogSpec determines the log levels that are enabled for the logging system. The // spec must be in a format that can be processed by ActivateSpec. // // If LogSpec is not provided, loggers will be enabled at the INFO level. LogSpec string // Writer is the sink for encoded and formatted log records. // // If a Writer is not provided, os.Stderr will be used as the log sink. Writer io.Writer } // Logging maintains the state associated with the fabric logging system. It is // intended to bridge between the legacy logging infrastructure built around // go-logging and the structured, level logging provided by zap. type Logging struct { *LoggerLevels mutex sync.RWMutex encoding Encoding encoderConfig zapcore.EncoderConfig multiFormatter *fabenc.MultiFormatter writer zapcore.WriteSyncer observer Observer } // New creates a new logging system and initializes it with the provided // configuration. func New(c Config) (*Logging, error) { encoderConfig := zap.NewProductionEncoderConfig() encoderConfig.NameKey = "name" l := &Logging{ LoggerLevels: &LoggerLevels{ defaultLevel: defaultLevel, }, encoderConfig: encoderConfig, multiFormatter: fabenc.NewMultiFormatter(), } err := l.Apply(c) if err != nil { return nil, err } return l, nil } // Apply applies the provided configuration to the logging system. func (l *Logging) Apply(c Config) error { err := l.SetFormat(c.Format) if err != nil { return err } if c.LogSpec == "" { c.LogSpec = os.Getenv("FABRIC_LOGGING_SPEC") } if c.LogSpec == "" { c.LogSpec = defaultLevel.String() } err = l.LoggerLevels.ActivateSpec(c.LogSpec) if err != nil { return err } if c.Writer == nil { c.Writer = os.Stderr } l.SetWriter(c.Writer) return nil } // SetFormat updates how log records are formatted and encoded. Log entries // created after this method has completed will use the new format. // // An error is returned if the log format specification cannot be parsed. func (l *Logging) SetFormat(format string) error { l.mutex.Lock() defer l.mutex.Unlock() if format == "" { format = defaultFormat } if format == "json" { l.encoding = JSON return nil } if format == "logfmt" { l.encoding = LOGFMT return nil } formatters, err := fabenc.ParseFormat(format) if err != nil { return err } l.multiFormatter.SetFormatters(formatters) l.encoding = CONSOLE return nil } // SetWriter controls which writer formatted log records are written to. // Writers, with the exception of an *os.File, need to be safe for concurrent // use by multiple go routines. func (l *Logging) SetWriter(w io.Writer) io.Writer { var sw zapcore.WriteSyncer switch t := w.(type) { case *os.File: sw = zapcore.Lock(t) case zapcore.WriteSyncer: sw = t default: sw = zapcore.AddSync(w) } l.mutex.Lock() ow := l.writer l.writer = sw l.mutex.Unlock() return ow } // SetObserver is used to provide a log observer that will be called as log // levels are checked or written.. Only a single observer is supported. func (l *Logging) SetObserver(observer Observer) Observer { l.mutex.Lock() so := l.observer l.observer = observer l.mutex.Unlock() return so } // Write satisfies the io.Write contract. It delegates to the writer argument // of SetWriter or the Writer field of Config. The Core uses this when encoding // log records. func (l *Logging) Write(b []byte) (int, error) { l.mutex.RLock() w := l.writer l.mutex.RUnlock() return w.Write(b) } // Sync satisfies the zapcore.WriteSyncer interface. It is used by the Core to // flush log records before terminating the process. func (l *Logging) Sync() error { l.mutex.RLock() w := l.writer l.mutex.RUnlock() return w.Sync() } // Encoding satisfies the Encoding interface. It determines whether the JSON or // CONSOLE encoder should be used by the Core when log records are written. func (l *Logging) Encoding() Encoding { l.mutex.RLock() e := l.encoding l.mutex.RUnlock() return e } // ZapLogger instantiates a new zap.Logger with the specified name. The name is // used to determine which log levels are enabled. func (l *Logging) ZapLogger(name string) *zap.Logger { if !isValidLoggerName(name) { panic(fmt.Sprintf("invalid logger name: %s", name)) } l.mutex.RLock() core := &Core{ LevelEnabler: l.LoggerLevels, Levels: l.LoggerLevels, Encoders: map[Encoding]zapcore.Encoder{ JSON: zapcore.NewJSONEncoder(l.encoderConfig), CONSOLE: fabenc.NewFormatEncoder(l.multiFormatter), LOGFMT: zaplogfmt.NewEncoder(l.encoderConfig), }, Selector: l, Output: l, Observer: l, } l.mutex.RUnlock() return NewZapLogger(core).Named(name) } func (l *Logging) Check(e zapcore.Entry, ce *zapcore.CheckedEntry) { l.mutex.RLock() observer := l.observer l.mutex.RUnlock() if observer != nil { observer.Check(e, ce) } } func (l *Logging) WriteEntry(e zapcore.Entry, fields []zapcore.Field) { l.mutex.RLock() observer := l.observer l.mutex.RUnlock() if observer != nil { observer.WriteEntry(e, fields) } } // Logger instantiates a new FabricLogger with the specified name. The name is // used to determine which log levels are enabled. func (l *Logging) Logger(name string) *FabricLogger { zl := l.ZapLogger(name) return NewFabricLogger(zl) }
[ "\"FABRIC_LOGGING_SPEC\"" ]
[]
[ "FABRIC_LOGGING_SPEC" ]
[]
["FABRIC_LOGGING_SPEC"]
go
1
0
pyctest_tomopy.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ PyCTest driver for TomoPy """ import os import sys import glob import shutil import argparse import platform import warnings import traceback import subprocess as sp import multiprocessing as mp import pyctest.pyctest as pyctest import pyctest.pycmake as pycmake import pyctest.helpers as helpers from benchmarking import ( default_nitr, default_phantom_size, find_python_nosetest, find_python_coverage, find_ctest_token, build_option_append, build_name_append, create_correct_module_test, create_nosetest_test, create_coverage_test, create_phantom_test, create_globus_test ) def cleanup(path=None, exclude=[]): """ route for cleaning up testing files """ sp.call((sys.executable, os.path.join(os.getcwd(), "setup.py"), "clean")) helpers.RemovePath(os.path.join(os.getcwd(), "tomopy.egg-info")) helpers.RemovePath(os.path.join(os.getcwd(), "dist")) helpers.RemovePath(os.path.join(os.getcwd(), "MANIFEST")) helpers.Cleanup(path, exclude=exclude) def configure(): # set site if set in environ if os.environ.get("CTEST_SITE") is not None: pyctest.CTEST_SITE = os.environ.get("CTEST_SITE") # Get pyctest argument parser that include PyCTest arguments parser = helpers.ArgumentParser(project_name="TomoPy", source_dir=os.getcwd(), binary_dir=os.getcwd(), python_exe=sys.executable, submit=False, ctest_args=["-V"]) # default algorithm choices available_algorithms = ['gridrec', 'art', 'fbp', 'bart', 'mlem', 'osem', 'sirt', 'ospml_hybrid', 'ospml_quad', 'pml_hybrid', 'pml_quad', 'tv', 'grad'] # default phantom choices available_phantoms = ["baboon", "cameraman", "barbara", "checkerboard", "lena", "peppers", "shepp2d", "shepp3d"] # choices for algorithms algorithm_choices = ['gridrec', 'art', 'fbp', 'bart', 'mlem', 'osem', 'sirt', 'ospml_hybrid', 'ospml_quad', 'pml_hybrid', 'pml_quad', 'tv', 'grad', 'none', 'all'] # phantom choices phantom_choices = ["baboon", "cameraman", "barbara", "checkerboard", "lena", "peppers", "shepp2d", "shepp3d", "none", "all"] # number of cores default_ncores = mp.cpu_count() # default algorithm choices default_algorithms = ['gridrec', 'art', 'fbp', 'bart', 'mlem', 'osem', 'sirt', 'ospml_hybrid', 'ospml_quad', 'pml_hybrid', 'pml_quad', 'tv', 'grad'] # default phantom choices default_phantoms = ["baboon", "cameraman", "barbara", "checkerboard", "lena", "peppers", "shepp2d", "shepp3d"] # default globus phantoms default_globus_phantoms = ["tomo_00001"] parser.add_argument("-n", "--ncores", help="number of cores", type=int, default=default_ncores) parser.add_argument("-i", "--num-iter", help="number of iterations", type=int, default=default_nitr) parser.add_argument("--phantoms", help="Phantoms to simulate", type=str, nargs='*', choices=phantom_choices, default=default_phantoms) parser.add_argument("--exclude-phantoms", help="Phantoms to simulate", type=str, nargs='*', choices=default_phantoms, default=[]) parser.add_argument("--phantom-size", type=int, help="Size parameter for the phantom reconstructions", default=default_phantom_size) parser.add_argument("--algorithms", help="Algorithms to use", type=str, nargs='*', choices=algorithm_choices, default=default_algorithms) parser.add_argument("--globus-path", help="Path to tomobank datasets", type=str, default=None) parser.add_argument("--globus-phantoms", help="Globus phantom files (without extension)", type=str, default=default_globus_phantoms, nargs='*') parser.add_argument("--skip-cleanup", help="Skip cleanup of any old pyctest files", action='store_true', default=False) parser.add_argument("--cleanup", help="Cleanup of any old pyctest files and exit", action='store_true', default=False) parser.add_argument("--coverage", help="Enable coverage for compiled code", action='store_true', default=False) parser.add_argument("--disable-phantom-tests", help="Disable running phantom tests", action='store_true', default=False) parser.add_argument("--customize-build-name", help="Customize the build name", type=str, default=None) parser.add_argument("--cmake-args", help="CMake arguments passed to build", type=str, default=[]) parser.add_argument("--cuda-arch", help="CUDA architecture flag", type=int, default=53) def add_bool_opt(args, opt, enable_opt, disable_opt): if enable_opt and disable_opt: msg = """\nWarning! python options for CMake argument '{}' was enabled \ AND disabled.\nGiving priority to disable...\n""".format(opt) warnings.warn(msg) enable_opt = False if enable_opt: args.cmake_args.append("-D{}:BOOL={}".format(opt, "ON")) if disable_opt: args.cmake_args.append("-D{}:BOOL={}".format(opt, "OFF")) def add_option(parser, lc_name, disp_name): # enable option parser.add_argument("--enable-{}".format(lc_name), action='store_true', help="Explicitly enable {} build".format(disp_name)) # disable option parser.add_argument("--disable-{}".format(lc_name), action='store_true', help="Explicitly disnable {} build".format(disp_name)) add_option(parser, "cuda", "CUDA") add_option(parser, "nvtx", "NVTX (NVIDIA Nsight)") add_option(parser, "arch", "Hardware optimized") add_option(parser, "avx512", "AVX-512 optimized") add_option(parser, "gperf", "gperftools") add_option(parser, "timemory", "TiMemory") add_option(parser, "sanitizer", "Enable sanitizer (default=leak)") add_option(parser, "tasking", "Tasking library (PTL)") parser.add_argument("--sanitizer-type", default="leak", help="Set the sanitizer type", type=str, choices=["leak", "thread", "address", "memory"]) args = parser.parse_args() add_bool_opt(args, "TOMOPY_USE_CUDA", args.enable_cuda, args.disable_cuda) add_bool_opt(args, "TOMOPY_USE_NVTX", args.enable_nvtx, args.disable_nvtx) if args.enable_avx512 and not args.enable_arch: args.enable_arch = True args.disable_arch = False add_bool_opt(args, "TOMOPY_USE_ARCH", args.enable_arch, args.disable_arch) add_bool_opt(args, "TOMOPY_USE_AVX512", args.enable_avx512, args.disable_avx512) add_bool_opt(args, "TOMOPY_USE_GPERF", args.enable_gperf, args.disable_gperf) add_bool_opt(args, "TOMOPY_USE_TIMEMORY", args.enable_timemory, args.disable_timemory) add_bool_opt(args, "TOMOPY_USE_SANITIZER", args.enable_sanitizer, args.disable_sanitizer) add_bool_opt(args, "TOMOPY_USE_PTL", args.enable_tasking, args.disable_tasking) if args.enable_sanitizer: args.cmake_args.append("-DSANITIZER_TYPE:STRING={}".format(args.sanitizer_type)) if args.enable_cuda: args.cmake_args.append("-DCUDA_ARCH={}".format(args.cuda_arch)) if len(args.cmake_args) > 0: print("\n\n\tCMake arguments set via command line: {}\n".format( args.cmake_args)) if args.cleanup: cleanup(pyctest.BINARY_DIRECTORY) sys.exit(0) if not args.skip_cleanup: cleanup(pyctest.BINARY_DIRECTORY) def remove_entry(entry, container): if entry in container: container.remove(entry) def remove_duplicates(container): container = list(set(container)) if "all" in args.algorithms: remove_entry("all", args.algorithms) args.algorithms.extend(available_algorithms) if "all" in args.phantoms: remove_entry("all", args.phantoms) args.phantoms.extend(available_phantoms) if "none" in args.algorithms: args.algorithms = [] if "none" in args.phantoms: args.phantoms = [] for p in args.exclude_phantoms: if p in args.phantoms: args.phantoms.remove(p) remove_duplicates(args.algorithms) remove_duplicates(args.phantoms) git_exe = helpers.FindExePath("git") if git_exe is not None: pyctest.UPDATE_COMMAND = "{}".format(git_exe) pyctest.set("CTEST_UPDATE_TYPE", "git") if args.enable_sanitizer: pyctest.set("CTEST_MEMORYCHECK_TYPE", "{}Sanitizer".format( args.sanitizer_type.lower().capitalize())) return args def run_pyctest(): ''' Configure PyCTest and execute ''' # run argparse, checkout source, copy over files args = configure() # shorthand directories source_dir = pyctest.SOURCE_DIRECTORY # executables pyexe = pyctest.PYTHON_EXECUTABLE pycoverage = find_python_coverage() gcovcommand = helpers.FindExePath("gcov") if gcovcommand is None: args.coverage = False # properties bench_props = { "WORKING_DIRECTORY" : pyctest.SOURCE_DIRECTORY, "DEPENDS" : "nosetests", "TIMEOUT" : "10800" } # CTEST_TOKEN find_ctest_token() # BUILD_NAME pyctest.BUILD_NAME = "[{}]".format(pyctest.GetGitBranch(source_dir)) build_name_append(platform.uname()[0], separate=False, suffix="") build_name_append(helpers.GetSystemVersionInfo(), prefix="(", suffix=")") build_name_append(platform.uname()[4], separate=False, prefix="") build_name_append(platform.python_version(), prefix="[Python ") build_name_append(args.sanitizer_type.lower(), check=args.enable_sanitizer) build_name_append("PTL", check=args.enable_tasking) build_name_append(args.customize_build_name) build_name_append("coverage", check=args.coverage) # BUILD_COMMAND pyctest.BUILD_COMMAND = "{} setup.py --hide-listing -q install".format(pyexe) pyctest.BUILD_COMMAND += " --build-type=Debug" if args.coverage else "" pyctest.BUILD_COMMAND += " -- {}".format(" ".join(args.cmake_args)) build_option_append(args.enable_sanitizer, "TOMOPY_USE_SANITIZER", "ON") build_option_append(args.enable_sanitizer, "SANITIZER_TYPE", args.sanitizer_type) build_option_append(args.coverage, "TOMOPY_USE_COVERAGE", "ON") print("TomoPy BUILD_COMMAND: '{}'...".format(pyctest.BUILD_COMMAND)) # COVERAGE_COMMAND pyctest.COVERAGE_COMMAND = "{};xml".format(pycoverage) if args.coverage: pyctest.COVERAGE_COMMAND = "{}".format(gcovcommand) pyctest.set("CTEST_COVERAGE_EXTRA_FLAGS", "-m") pyctest.set("CTEST_EXTRA_COVERAGE_GLOB", "{}/*.gcno".format(source_dir)) # unit tests create_correct_module_test() create_nosetest_test(args) create_coverage_test(args) # globus tests for phantom in args.globus_phantoms: for algorithm in args.algorithms: create_globus_test(args, bench_props, algorithm, phantom) # phantom tests for phantom in args.phantoms: create_phantom_test(args, bench_props, phantom) print('Running PyCTest:\n\n\t{}\n\n'.format(pyctest.BUILD_NAME)) pyctest.run() if __name__ == "__main__": try: run_pyctest() except Exception as e: print('Error running pyctest - {}'.format(e)) exc_type, exc_value, exc_trback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_trback, limit=10) sys.exit(1) sys.exit(0)
[]
[]
[ "CTEST_SITE" ]
[]
["CTEST_SITE"]
python
1
0
twitch/types.go
package twitch type Channel struct { Status string `json:"status"` } type Stream struct { Viewers int64 `json:"viewers"` Game string `json:"game"` StreamType string `json:"stream_type"` Channel Channel `json:"channel"` }
[]
[]
[]
[]
[]
go
null
null
null
geocamUtil/management/commandUtil.py
#!/usr/bin/env python # __BEGIN_LICENSE__ # Copyright (C) 2008-2010 United States Government as represented by # the Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # __END_LICENSE__ import os import sys import imp import re import itertools import subprocess from django.core.management.base import BaseCommand STATUS_DIR_TEMPLATE = '%(siteDir)s/build/management/commandStatus/' def getSiteDir(): # if DJANGO_SETTINGS_MODULE='geocamShare.settings', modImpPath='geocamShare' modImpPath = re.sub(r'\..*$', '', os.environ['DJANGO_SETTINGS_MODULE']) d = imp.find_module(modImpPath)[1] if d != '' and not d.endswith('/'): d += '/' return d def getConfirmation(description, default=True, auto=False): if default is True: choices = '[Y/n]' else: choices = '[y/N]' prompt = '%s? %s ' % (description, choices) if auto: sys.stdout.write(prompt) sys.stdout.flush() if default: print 'y' else: print 'n' return default else: while 1: sys.stdout.write(prompt) sys.stdout.flush() response = raw_input().strip().lower() if not response: return default elif response == 'y': return True elif response == 'n': return False def getCommandStatusFileName(commandName): statusDir = STATUS_DIR_TEMPLATE % dict(siteDir=getSiteDir()) return '%s%sStatus.txt' % (statusDir, commandName) def getCommandStatus(commandName): statusName = getCommandStatusFileName(commandName) if os.path.exists(statusName): return file(statusName, 'r').read()[:-1] else: return None def writeStatusFile(path, text): d = os.path.dirname(path) if not os.path.exists(d): os.makedirs(d) f = file(path, 'w') f.write(text + '\n') f.close() def writeCommandStatus(commandName, text): statusName = getCommandStatusFileName(commandName) writeStatusFile(statusName, text) def getConfirmationUseStatus(commandName, description): if getCommandStatus(commandName) is None: return True else: print ('Looks like command %s has finished already, based on file %s' % (commandName, getCommandStatusFileName(commandName))) return getConfirmation('%s (%s) anyway' % (description, commandName), default=False) class PathCommand(BaseCommand): def handle(self, *args, **options): if args: # user specified apps to prep impPaths = args else: # user did not specify, default to all apps in INSTALLED_APPS from django.conf import settings impPaths = settings.INSTALLED_APPS self.handleImportPaths(impPaths, options) def handleImportPaths(self, impPaths, options): pass # override in derived classes DEFAULT_LINT_IGNORE_PATTERNS = ['/external/', '/build/', '/doc_src/', '/attic/', '/jquery'] DEFAULT_LINT_IGNORE_REGEXES = [re.compile(pat) for pat in DEFAULT_LINT_IGNORE_PATTERNS] def parseLintignoreLine(x): if x.startswith('#'): return [] elif x.strip() == '': return [] elif x.startswith('\\'): return [x[1:]] else: return [x] def joinLists(lists): return itertools.chain(*lists) def parseLintignoreText(lintignoreText): """ *lintignoreText* should be the text of a 'lintignore' file. Return the corresponding list of regexes to match against file paths. """ lintignoreLines = joinLists([parseLintignoreLine(x) for x in lintignoreText.splitlines()]) return [re.compile(x) for x in lintignoreLines] def pathIsNotIgnored(path, lintignoreRegexes): return all([not r.search(path) for r in lintignoreRegexes]) def lintignore(pathsText): """ *pathsText* should be a string containing paths separated by newlines (like output from the UNIX 'find' command). Return a string in the same format, filtering out any paths that should be ignored according to the 'lintignore' file. Look for the 'lintignore' file at '<site>/management/lintignore'. If that file does not exist, use DEFAULT_LINT_IGNORE_PATTERNS. """ lintignorePath = os.path.join(getSiteDir(), 'management', 'lintignore') if os.path.exists(lintignorePath): lintignoreRegexes = parseLintignoreText(open(lintignorePath, 'r').read()) else: lintignoreRegexes = DEFAULT_LINT_IGNORE_REGEXES paths = pathsText.splitlines() unignoredFiles = [p for p in paths if pathIsNotIgnored(p, lintignoreRegexes)] return '\n'.join(unignoredFiles) def pipeToCommand(cmd, text, verbosity): if verbosity > 1: print >> sys.stderr, 'piping input to: %s' % cmd proc = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE) proc.communicate(text) ret = proc.returncode if verbosity > 1: if ret != 0: print >> sys.stderr, 'warning: command exited with non-zero return value %d' % ret return ret def dosys(cmd, verbosity): if verbosity > 1: print >> sys.stderr, cmd ret = os.system(cmd) if ret != 0: if verbosity > 1: print >> sys.stderr, 'command exited with non-zero return value %s' % ret return ret
[]
[]
[ "DJANGO_SETTINGS_MODULE" ]
[]
["DJANGO_SETTINGS_MODULE"]
python
1
0
compotes/wsgi.py
""" WSGI config for compotes project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/4.0/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "compotes.settings") application = get_wsgi_application()
[]
[]
[]
[]
[]
python
0
0
flask/app/db.py
import os from pymongo import MongoClient MONGODB_URI = ( "mongodb://" "{user}:{password}@{hostname}:{port}/{db}?authSource={authSource}".format( user=os.environ.get("MONGO_INITDB_ROOT_USERNAME"), password=os.environ.get("MONGO_INITDB_ROOT_PASSWORD"), hostname=os.environ.get("HOSTNAME"), port=27017, db=os.environ.get("MONGO_DB"), authSource=os.environ.get("AUTH_SOURCE"), ) ) client = MongoClient(MONGODB_URI)
[]
[]
[ "MONGO_INITDB_ROOT_USERNAME", "HOSTNAME", "MONGO_DB", "AUTH_SOURCE", "MONGO_INITDB_ROOT_PASSWORD" ]
[]
["MONGO_INITDB_ROOT_USERNAME", "HOSTNAME", "MONGO_DB", "AUTH_SOURCE", "MONGO_INITDB_ROOT_PASSWORD"]
python
5
0
services/searchengine/bleveengine/bleve_test.go
// Copyright (c) 2015-present Mattermost, Inc. All Rights Reserved. // See LICENSE.txt for license information. package bleveengine import ( "io/ioutil" "os" "testing" "github.com/blevesearch/bleve" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" "github.com/mattermost/mattermost-server/v5/model" "github.com/mattermost/mattermost-server/v5/services/searchengine" "github.com/mattermost/mattermost-server/v5/store/searchlayer" "github.com/mattermost/mattermost-server/v5/store/searchtest" "github.com/mattermost/mattermost-server/v5/store/sqlstore" "github.com/mattermost/mattermost-server/v5/store/storetest" "github.com/mattermost/mattermost-server/v5/testlib" ) type BleveEngineTestSuite struct { suite.Suite SQLSettings *model.SqlSettings SQLSupplier *sqlstore.SqlSupplier SearchEngine *searchengine.Broker Store *searchlayer.SearchStore BleveEngine *BleveEngine IndexDir string } func TestBleveEngineTestSuite(t *testing.T) { suite.Run(t, new(BleveEngineTestSuite)) } func (s *BleveEngineTestSuite) setupIndexes() { indexDir, err := ioutil.TempDir("", "mmbleve") if err != nil { s.Require().FailNow("Cannot setup bleveengine tests: %s", err.Error()) } s.IndexDir = indexDir } func (s *BleveEngineTestSuite) setupStore() { driverName := os.Getenv("MM_SQLSETTINGS_DRIVERNAME") if driverName == "" { driverName = model.DATABASE_DRIVER_POSTGRES } s.SQLSettings = storetest.MakeSqlSettings(driverName) s.SQLSupplier = sqlstore.NewSqlSupplier(*s.SQLSettings, nil) cfg := &model.Config{} cfg.SetDefaults() cfg.BleveSettings.EnableIndexing = model.NewBool(true) cfg.BleveSettings.EnableSearching = model.NewBool(true) cfg.BleveSettings.EnableAutocomplete = model.NewBool(true) cfg.BleveSettings.IndexDir = model.NewString(s.IndexDir) cfg.SqlSettings.DisableDatabaseSearch = model.NewBool(true) s.SearchEngine = searchengine.NewBroker(cfg, nil) s.Store = searchlayer.NewSearchLayer(&testlib.TestStore{Store: s.SQLSupplier}, s.SearchEngine, cfg) s.BleveEngine = NewBleveEngine(cfg, nil) s.BleveEngine.indexSync = true s.SearchEngine.RegisterBleveEngine(s.BleveEngine) if err := s.BleveEngine.Start(); err != nil { s.Require().FailNow("Cannot start bleveengine: %s", err.Error()) } } func (s *BleveEngineTestSuite) SetupSuite() { s.setupIndexes() s.setupStore() } func (s *BleveEngineTestSuite) TearDownSuite() { os.RemoveAll(s.IndexDir) s.SQLSupplier.Close() storetest.CleanupSqlSettings(s.SQLSettings) } func (s *BleveEngineTestSuite) TestBleveSearchStoreTests() { searchTestEngine := &searchtest.SearchTestEngine{ Driver: searchtest.ENGINE_BLEVE, } s.Run("TestSearchChannelStore", func() { searchtest.TestSearchChannelStore(s.T(), s.Store, searchTestEngine) }) s.Run("TestSearchUserStore", func() { searchtest.TestSearchUserStore(s.T(), s.Store, searchTestEngine) }) s.Run("TestSearchPostStore", func() { searchtest.TestSearchPostStore(s.T(), s.Store, searchTestEngine) }) } func (s *BleveEngineTestSuite) TestDeleteChannelPosts() { s.Run("Should remove all the posts that belongs to a channel", func() { s.BleveEngine.PurgeIndexes() teamID := model.NewId() userID := model.NewId() channelID := model.NewId() channelToAvoidID := model.NewId() posts := make([]*model.Post, 0) for i := 0; i < 10; i++ { post := createPost(userID, channelID, "test one two three") appErr := s.SearchEngine.BleveEngine.IndexPost(post, teamID) require.Nil(s.T(), appErr) posts = append(posts, post) } postToAvoid := createPost(userID, channelToAvoidID, "test one two three") appErr := s.SearchEngine.BleveEngine.IndexPost(postToAvoid, teamID) require.Nil(s.T(), appErr) s.SearchEngine.BleveEngine.DeleteChannelPosts(channelID) doc, err := s.BleveEngine.PostIndex.Document(postToAvoid.Id) require.Nil(s.T(), err) require.Equal(s.T(), postToAvoid.Id, doc.ID) numberDocs, err := s.BleveEngine.PostIndex.DocCount() require.Nil(s.T(), err) require.Equal(s.T(), 1, int(numberDocs)) }) s.Run("Shouldn't do anything if there is not posts for the selected channel", func() { s.BleveEngine.PurgeIndexes() teamID := model.NewId() userID := model.NewId() channelID := model.NewId() channelToDeleteID := model.NewId() post := createPost(userID, channelID, "test one two three") appErr := s.SearchEngine.BleveEngine.IndexPost(post, teamID) require.Nil(s.T(), appErr) s.SearchEngine.BleveEngine.DeleteChannelPosts(channelToDeleteID) _, err := s.BleveEngine.PostIndex.Document(post.Id) require.Nil(s.T(), err) numberDocs, err := s.BleveEngine.PostIndex.DocCount() require.Nil(s.T(), err) require.Equal(s.T(), 1, int(numberDocs)) }) } func (s *BleveEngineTestSuite) TestDeleteUserPosts() { s.Run("Should remove all the posts that belongs to a user", func() { s.BleveEngine.PurgeIndexes() teamID := model.NewId() userID := model.NewId() userToAvoidID := model.NewId() channelID := model.NewId() posts := make([]*model.Post, 0) for i := 0; i < 10; i++ { post := createPost(userID, channelID, "test one two three") appErr := s.SearchEngine.BleveEngine.IndexPost(post, teamID) require.Nil(s.T(), appErr) posts = append(posts, post) } postToAvoid := createPost(userToAvoidID, channelID, "test one two three") appErr := s.SearchEngine.BleveEngine.IndexPost(postToAvoid, teamID) require.Nil(s.T(), appErr) s.SearchEngine.BleveEngine.DeleteUserPosts(userID) doc, err := s.BleveEngine.PostIndex.Document(postToAvoid.Id) require.Nil(s.T(), err) require.Equal(s.T(), postToAvoid.Id, doc.ID) numberDocs, err := s.BleveEngine.PostIndex.DocCount() require.Nil(s.T(), err) require.Equal(s.T(), 1, int(numberDocs)) }) s.Run("Shouldn't do anything if there is not posts for the selected user", func() { s.BleveEngine.PurgeIndexes() teamID := model.NewId() userID := model.NewId() userToDeleteID := model.NewId() channelID := model.NewId() post := createPost(userID, channelID, "test one two three") appErr := s.SearchEngine.BleveEngine.IndexPost(post, teamID) require.Nil(s.T(), appErr) s.SearchEngine.BleveEngine.DeleteUserPosts(userToDeleteID) _, err := s.BleveEngine.PostIndex.Document(post.Id) require.Nil(s.T(), err) numberDocs, err := s.BleveEngine.PostIndex.DocCount() require.Nil(s.T(), err) require.Equal(s.T(), 1, int(numberDocs)) }) } func (s *BleveEngineTestSuite) TestDeletePosts() { s.BleveEngine.PurgeIndexes() teamID := model.NewId() userID := model.NewId() userToAvoidID := model.NewId() channelID := model.NewId() posts := make([]*model.Post, 0) for i := 0; i < 10; i++ { post := createPost(userID, channelID, "test one two three") appErr := s.SearchEngine.BleveEngine.IndexPost(post, teamID) require.Nil(s.T(), appErr) posts = append(posts, post) } postToAvoid := createPost(userToAvoidID, channelID, "test one two three") appErr := s.SearchEngine.BleveEngine.IndexPost(postToAvoid, teamID) require.Nil(s.T(), appErr) query := bleve.NewTermQuery(userID) query.SetField("UserId") search := bleve.NewSearchRequest(query) count, err := s.BleveEngine.deletePosts(search, 1) require.Nil(s.T(), err) require.Equal(s.T(), 10, int(count)) doc, err := s.BleveEngine.PostIndex.Document(postToAvoid.Id) require.Nil(s.T(), err) require.Equal(s.T(), postToAvoid.Id, doc.ID) numberDocs, err := s.BleveEngine.PostIndex.DocCount() require.Nil(s.T(), err) require.Equal(s.T(), 1, int(numberDocs)) }
[ "\"MM_SQLSETTINGS_DRIVERNAME\"" ]
[]
[ "MM_SQLSETTINGS_DRIVERNAME" ]
[]
["MM_SQLSETTINGS_DRIVERNAME"]
go
1
0
src/ZODB/tests/testSerialize.py
############################################################################## # # Copyright (c) 2004 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import doctest import sys import unittest from persistent import Persistent from persistent.wref import WeakRef import zope.testing.setupstack import ZODB.tests.util from ZODB import serialize from ZODB._compat import Pickler, PersistentUnpickler, BytesIO, _protocol, IS_JYTHON class PersistentObject(Persistent): pass class ClassWithNewargs(int): def __new__(cls, value): return int.__new__(cls, value) def __getnewargs__(self): return int(self), class ClassWithoutNewargs(object): def __init__(self, value): self.value = value def make_pickle(ob): sio = BytesIO() p = Pickler(sio, _protocol) p.dump(ob) return sio.getvalue() def _factory(conn, module_name, name): return globals()[name] class SerializerTestCase(unittest.TestCase): # old format: (module, name), None old_style_without_newargs = make_pickle( ((__name__, "ClassWithoutNewargs"), None)) # old format: (module, name), argtuple old_style_with_newargs = make_pickle( ((__name__, "ClassWithNewargs"), (1,))) # new format: klass new_style_without_newargs = make_pickle( ClassWithoutNewargs) # new format: klass, argtuple new_style_with_newargs = make_pickle( (ClassWithNewargs, (1,))) def test_getClassName(self): r = serialize.ObjectReader(factory=_factory) eq = self.assertEqual eq(r.getClassName(self.old_style_with_newargs), __name__ + ".ClassWithNewargs") eq(r.getClassName(self.new_style_with_newargs), __name__ + ".ClassWithNewargs") eq(r.getClassName(self.old_style_without_newargs), __name__ + ".ClassWithoutNewargs") eq(r.getClassName(self.new_style_without_newargs), __name__ + ".ClassWithoutNewargs") def test_getGhost(self): # Use a TestObjectReader since we need _get_class() to be # implemented; otherwise this is just a BaseObjectReader. class TestObjectReader(serialize.ObjectReader): # A production object reader would optimize this, but we # don't need to in a test def _get_class(self, module, name): __import__(module) return getattr(sys.modules[module], name) r = TestObjectReader(factory=_factory) g = r.getGhost(self.old_style_with_newargs) self.assertTrue(isinstance(g, ClassWithNewargs)) self.assertEqual(g, 1) g = r.getGhost(self.old_style_without_newargs) self.assertTrue(isinstance(g, ClassWithoutNewargs)) g = r.getGhost(self.new_style_with_newargs) self.assertTrue(isinstance(g, ClassWithNewargs)) g = r.getGhost(self.new_style_without_newargs) self.assertTrue(isinstance(g, ClassWithoutNewargs)) def test_myhasattr(self): class OldStyle(object): bar = "bar" def __getattr__(self, name): if name == "error": raise ValueError("whee!") else: raise AttributeError(name) class NewStyle(object): bar = "bar" def _raise(self): raise ValueError("whee!") error = property(_raise) self.assertRaises(ValueError, serialize.myhasattr, OldStyle(), "error") self.assertRaises(ValueError, serialize.myhasattr, NewStyle(), "error") self.assertTrue(serialize.myhasattr(OldStyle(), "bar")) self.assertTrue(serialize.myhasattr(NewStyle(), "bar")) self.assertTrue(not serialize.myhasattr(OldStyle(), "rat")) self.assertTrue(not serialize.myhasattr(NewStyle(), "rat")) def test_persistent_id_noload(self): # make sure we can noload weak references and other list-based # references like we expect. Protect explicitly against the # breakage in CPython 2.7 and zodbpickle < 0.6.0 o = PersistentObject() o._p_oid = b'abcd' top = PersistentObject() top._p_oid = b'efgh' top.ref = WeakRef(o) pickle = serialize.ObjectWriter().serialize(top) refs = [] u = PersistentUnpickler(None, refs.append, BytesIO(pickle)) u.noload() u.noload() self.assertEqual(refs, [['w', (b'abcd',)]]) class SerializerFunctestCase(unittest.TestCase): def setUp(self): import tempfile self._tempdir = tempfile.mkdtemp(suffix='serializerfunc') def tearDown(self): import shutil shutil.rmtree(self._tempdir) def test_funky_datetime_serialization(self): import os import subprocess fqn = os.path.join(self._tempdir, 'Data.fs') prep_args = [sys.executable, '-c', 'from ZODB.tests.testSerialize import _functest_prep; ' '_functest_prep(%s)' % repr(fqn)] # buildout doesn't arrange for the sys.path to be exported, # so force it ourselves environ = os.environ.copy() if IS_JYTHON: # Jython 2.7rc2 has a bug; if its Lib directory is # specifically put on the PYTHONPATH, then it doesn't add # it itself, which means it fails to 'import site' because # it can't import '_jythonlib' and the whole process fails # We would use multiprocessing here, but it doesn't exist on jython sys_path = [x for x in sys.path if not x.endswith('Lib') and x != '__classpath__' and x!= '__pyclasspath__/'] else: sys_path = sys.path environ['PYTHONPATH'] = os.pathsep.join(sys_path) subprocess.check_call(prep_args, env=environ) load_args = [sys.executable, '-c', 'from ZODB.tests.testSerialize import _functest_load; ' '_functest_load(%s)' % repr(fqn)] subprocess.call(load_args, env=environ) def _working_failing_datetimes(): import datetime WORKING = datetime.datetime(5375, 12, 31, 23, 59, 59) # Any date after 5375 A.D. appears to trigger this bug. FAILING = datetime.datetime(5376, 12, 31, 23, 59, 59) return WORKING, FAILING def _functest_prep(fqn): # Prepare the database with a BTree which won't deserialize # if the bug is present. # run in separate process) import transaction from BTrees.OOBTree import OOBTree from ZODB import DB WORKING, FAILING = _working_failing_datetimes() db = DB(fqn) conn = db.open() try: root = conn.root() tree = root['tree'] = OOBTree() tree[WORKING] = 'working' tree[FAILING] = 'failing' transaction.commit() finally: # Windoze conn.close() db.close() def _functest_load(fqn): # Open the database and attempt to deserialize the tree # (run in separate process) from ZODB import DB WORKING, FAILING = _working_failing_datetimes() db = DB(fqn) conn = db.open() try: root = conn.root() tree = root['tree'] assert tree[WORKING] == 'working' assert tree[FAILING] == 'failing' finally: # Windoze conn.close() db.close() def test_suite(): return unittest.TestSuite(( unittest.makeSuite(SerializerTestCase), unittest.makeSuite(SerializerFunctestCase), doctest.DocTestSuite("ZODB.serialize", checker=ZODB.tests.util.checker), ))
[]
[]
[]
[]
[]
python
0
0
plugins/tuling.py
# -*- coding:utf-8 -*- __author__ = 'fansly' import os import json import requests TULING_KEY = os.getenv('TULING_KEY') def get_response(openid, msg): api = 'http://openapi.tuling123.com/openapi/api/v2' dat = { "perception": { "inputText": { "text": msg }, "inputImage": { "url": "imageUrl" }, "selfInfo": { "location": { "city": "北京", "province": "北京", "street": "信息路" } } }, "userInfo": { "apiKey": TULING_KEY, "userId": openid } } dat = json.dumps(dat) r = requests.post(api, data=dat).json() mesage = r['results'][0]['values']['text'] print(r['results'][0]['values']['text']) return mesage
[]
[]
[ "TULING_KEY" ]
[]
["TULING_KEY"]
python
1
0
examples/url_shortener/main.go
package main import ( "context" "io/ioutil" "log" "net/http" "os" "github.com/matthewpi/pgx/v4" "github.com/matthewpi/pgx/v4/log/testingadapter" "github.com/matthewpi/pgx/v4/pgxpool" ) var db *pgxpool.Pool func getUrlHandler(w http.ResponseWriter, req *http.Request) { var url string err := db.QueryRow(context.Background(), "select url from shortened_urls where id=$1", req.URL.Path).Scan(&url) switch err { case nil: http.Redirect(w, req, url, http.StatusSeeOther) case pgx.ErrNoRows: http.NotFound(w, req) default: http.Error(w, "Internal server error", http.StatusInternalServerError) } } func putUrlHandler(w http.ResponseWriter, req *http.Request) { id := req.URL.Path var url string if body, err := ioutil.ReadAll(req.Body); err == nil { url = string(body) } else { http.Error(w, "Internal server error", http.StatusInternalServerError) return } if _, err := db.Exec(context.Background(), `insert into shortened_urls(id, url) values ($1, $2) on conflict (id) do update set url=excluded.url`, id, url); err == nil { w.WriteHeader(http.StatusOK) } else { http.Error(w, "Internal server error", http.StatusInternalServerError) } } func deleteUrlHandler(w http.ResponseWriter, req *http.Request) { if _, err := db.Exec(context.Background(), "delete from shortened_urls where id=$1", req.URL.Path); err == nil { w.WriteHeader(http.StatusOK) } else { http.Error(w, "Internal server error", http.StatusInternalServerError) } } func urlHandler(w http.ResponseWriter, req *http.Request) { switch req.Method { case "GET": getUrlHandler(w, req) case "PUT": putUrlHandler(w, req) case "DELETE": deleteUrlHandler(w, req) default: w.Header().Add("Allow", "GET, PUT, DELETE") w.WriteHeader(http.StatusMethodNotAllowed) } } type logger struct{} func (*logger) Log(args ...interface{}) { log.Println(args...) } func main() { logger := testingadapter.NewLogger(&logger{}) poolConfig, err := pgxpool.ParseConfig(os.Getenv("DATABASE_URL")) if err != nil { log.Fatal("Unable to parse DATABASE_URL", "error", err) os.Exit(1) } poolConfig.ConnConfig.Logger = logger db, err = pgxpool.ConnectConfig(context.Background(), poolConfig) if err != nil { log.Fatal("Unable to create connection pool", "error", err) os.Exit(1) } http.HandleFunc("/", urlHandler) log.Println("Starting URL shortener on localhost:8080") err = http.ListenAndServe("localhost:8080", nil) if err != nil { log.Fatal("Unable to start web server", "error", err) os.Exit(1) } }
[ "\"DATABASE_URL\"" ]
[]
[ "DATABASE_URL" ]
[]
["DATABASE_URL"]
go
1
0
Train/simplified_trainer_example_cheplike.py
''' Compatible with the dataset here: /eos/cms/store/cmst3/group/hgcal/CMG_studies/pepr/Oct2021_production/Gun20Part_CHEPDef_NoPropagate/NanoML and (soon) /eos/cms/store/cmst3/group/hgcal/CMG_studies/pepr/Oct2021_production/Gun20Part_CHEPDef_NoPropagate/NanoMLTracks On flatiron: /mnt/ceph/users/jkieseler/HGCalML_data/OctProd/NanoML not compatible with datasets before end of October 2021 ''' from callback_wrappers import build_callbacks from experiment_database_manager import ExperimentDatabaseManager import tensorflow as tf from argparse import ArgumentParser # from K import Layer import numpy as np from tensorflow.keras.layers import Reshape,BatchNormalization, Dropout, Add from LayersRagged import RaggedConstructTensor from GravNetLayersRagged import WeightFeatures,WeightedNeighbourMeans,DownSample, CreateIndexFromMajority, ProcessFeatures, SoftPixelCNN, RaggedGravNet, DistanceWeightedMessagePassing from initializers import EyeInitializer from tensorflow.keras.layers import Multiply, Dense, Concatenate, GaussianDropout from datastructures import TrainData_NanoML from plotting_callbacks import plotEventDuringTraining, plotGravNetCoordsDuringTraining, plotClusteringDuringTraining, plotClusterSummary from DeepJetCore.DJCLayers import StopGradient,ScalarMultiply, SelectFeatures, ReduceSumEntirely from clr_callback import CyclicLR from model_blocks import create_outputs from GravNetLayersRagged import MultiBackScatter,EdgeCreator, EdgeSelector from GravNetLayersRagged import GroupScoreFromEdgeScores,NoiseFilter from GravNetLayersRagged import ProcessFeatures,SoftPixelCNN, RaggedGravNet from GravNetLayersRagged import DistanceWeightedMessagePassing,MultiBackScatterOrGather from GravNetLayersRagged import NeighbourGroups,AccumulateNeighbours,SelectFromIndices from GravNetLayersRagged import RecalcDistances, ElementScaling, RemoveSelfRef, CastRowSplits from Layers import CreateTruthSpectatorWeights, ManualCoordTransform,RaggedGlobalExchange,LocalDistanceScaling,CheckNaN,NeighbourApproxPCA, SortAndSelectNeighbours, LLLocalClusterCoordinates,DistanceWeightedMessagePassing,CreateGlobalIndices, SelectFromIndices, MultiBackScatter, KNN, MessagePassing, RobustModel from Layers import GausActivation,GooeyBatchNorm #make a new line from model_blocks import create_outputs, noise_pre_filter from Regularizers import AverageDistanceRegularizer from model_blocks import first_coordinate_adjustment, reduce, pre_selection_model_full from lossLayers import LLNeighbourhoodClassifier, LLNotNoiseClassifier from lossLayers import LLFullObjectCondensation, LLClusterCoordinates,LLEdgeClassifier from debugLayers import PlotCoordinates ''' make this about coordinate shifts ''' def gravnet_model(Inputs, td, viscosity=0.8, print_viscosity=False, fluidity_decay=5e-4, # reaches after about 7k batches max_viscosity=0.95, debug_outdir=None ): # Input preprocessing below. Not much to change here orig_inputs = td.interpretAllModelInputs(Inputs,returndict=True) orig_t_spectator_weight = CreateTruthSpectatorWeights(threshold=3., minimum=1e-1, active=True )([orig_inputs['t_spectator'], orig_inputs['t_idx']]) #can be loaded - or use pre-selected dataset (to be made) pre_selection = pre_selection_model_full(orig_inputs, debug_outdir, reduction_threshold=0.5, use_edges=True, trainable=False, #use this as a static model debugplots_after=-1, #no debug plots omit_reduction=False ) ''' pre_selection has the following dict items: ['features'] = selfeat ['coords'] = coords ['addfeat'] = x (pre-processed features) ['energy'] = energy (energy sums in case there was a grouping) ['group_backgather']=group_backgather ['noise_backscatter_N']=noise_backscatter[0] ['noise_backscatter_idx']=noise_backscatter[1] ['rs']=rs Selected truth information: ['t_idx'], ['t_energy'], ['t_pos'], ['t_time'], ['t_pid'], ['t_spectator'], ['t_fully_contained'] full N_hits dimension!! out['orig_t_idx'] = orig_inputs['t_idx'] out['orig_t_energy'] = orig_inputs['t_energy'] #for validation ''' ########## from here on everything is based on the pre-selection; only extend at the very end for the loss t_spectator_weight = CreateTruthSpectatorWeights(threshold=3., minimum=1e-1, active=True )([pre_selection['t_spectator'], pre_selection['t_idx']]) rs = pre_selection['rs'] x = Concatenate()([pre_selection['coords'], pre_selection['features'], pre_selection['addfeat']]) energy = pre_selection['energy'] coords = pre_selection['coords'] t_idx = pre_selection['t_idx'] scatterids = [pre_selection['group_backgather'], [ pre_selection['noise_backscatter_N'],pre_selection['noise_backscatter_idx'] ]] #add them here directly allfeat = [MultiBackScatterOrGather()([x, scatterids])] allcoords= [MultiBackScatterOrGather()([coords, scatterids])] n_cluster_space_coordinates = 3 total_iterations=5 for i in range(total_iterations): # derive new coordinates for clustering x = RaggedGlobalExchange()([x, rs]) x = Dense(64,activation='relu')(x) x = Dense(64,activation='relu')(x) x = Dense(64,activation='relu')(x) x = GooeyBatchNorm(viscosity=viscosity, max_viscosity=max_viscosity, fluidity_decay=fluidity_decay)(x) ### reduction done #exchange information, create coordinates x = Concatenate()([coords,x]) x, gncoords, gnnidx, gndist = RaggedGravNet(n_neighbours=64, n_dimensions=7, n_filters=128, n_propagate=64, )([x, rs]) x = DistanceWeightedMessagePassing([64,64,32,32,16,16])([x,gnnidx,gndist]) x = Dense(64,activation='relu')(x) x = Dense(64,activation='relu')(x) x = Dense(64,activation='relu')(x) x = GooeyBatchNorm(viscosity=viscosity, max_viscosity=max_viscosity, fluidity_decay=fluidity_decay)(x) allfeat.append(MultiBackScatterOrGather()([x, scatterids])) ####### back to non-reduced space #x = MultiBackScatterOrGather()([x, scatterids]) x = Concatenate()(allfeat+allcoords) x = GooeyBatchNorm(viscosity=viscosity, max_viscosity=max_viscosity, fluidity_decay=fluidity_decay)(x) #do one more exchange with all x = Dense(64,activation='relu')(x) x = Dense(64,activation='relu')(x) x = Dense(64,activation='relu')(x) x = GooeyBatchNorm(viscosity=viscosity, max_viscosity=max_viscosity, fluidity_decay=fluidity_decay)(x) x = Concatenate()(allcoords+[x]) pred_beta, pred_ccoords, pred_dist, pred_energy, \ pred_pos, pred_time, pred_id = create_outputs(x, orig_inputs['features'], fix_distance_scale=False, n_ccoords=n_cluster_space_coordinates) row_splits = CastRowSplits()(orig_inputs['row_splits']) # loss pred_beta = LLFullObjectCondensation(print_loss=True, scale=1., energy_loss_weight=1e-2, position_loss_weight=1e-2, timing_loss_weight=1e-2, beta_loss_scale=1., too_much_beta_scale=.01, use_energy_weights=True, q_min=2.5, #div_repulsion=True, # cont_beta_loss=True, # beta_gradient_damping=0.999, # phase_transition=1, huber_energy_scale=3, use_average_cc_pos=0.5, # smoothen it out a bit name="FullOCLoss" )( # oc output and payload [pred_beta, pred_ccoords, pred_dist, pred_energy, pred_pos, pred_time, pred_id] + # truth information [orig_inputs['t_idx'] , orig_inputs['t_energy'] , orig_inputs['t_pos'] , orig_inputs['t_time'] , orig_inputs['t_pid'] , orig_t_spectator_weight , row_splits]) model_outputs = [('pred_beta', pred_beta), ('pred_ccoords', pred_ccoords), ('pred_energy', pred_energy), ('pred_pos', pred_pos), ('pred_time', pred_time), ('pred_id', pred_id), ('pred_dist', pred_dist), ('row_splits', row_splits)] return RobustModel(model_inputs=Inputs, model_outputs=model_outputs) import training_base_hgcal train = training_base_hgcal.HGCalTraining(testrun=False, resumeSilently=True, renewtokens=False) if not train.modelSet(): train.setModel(gravnet_model, td=train.train_data.dataclass(), debug_outdir=train.outputDir+'/intplots') train.setCustomOptimizer(tf.keras.optimizers.Adam( #larger->slower forgetting #beta_1: linear #beta_2: sq #make it slower for our weird fluctuating batches #beta_1=0.99, #0.9 #beta_2=0.99999 #0.999 #clipnorm=0.001 #amsgrad=True, #epsilon=1e-2 )) #get pretrained preselection weights from model_tools import apply_weights_from_path import os path_to_pretrained = os.getenv("HGCALML")+'/models/pre_selection/KERAS_check_model_last.h5' train.keras_model = apply_weights_from_path(path_to_pretrained,train.keras_model) # train.compileModel(learningrate=1e-4, loss=None) verbosity = 2 import os samplepath=train.val_data.getSamplePath(train.val_data.samples[0]) # publishpath = '[email protected]:/eos/home-j/jkiesele/www/files/HGCalML_trainings/'+os.path.basename(os.path.normpath(train.outputDir)) cb = [] #cb += [plotClusteringDuringTraining( # use_backgather_idx=8 + i, # outputfile=train.outputDir + "/localclust/cluster_" + str(i) + '_', # samplefile=samplepath, # after_n_batches=500, # on_epoch_end=False, # publish=None, # use_event=0) # for i in [0, 2, 4]] # cb += [ plotEventDuringTraining( outputfile=train.outputDir + "/condensation/c_"+str(i), samplefile=samplepath, after_n_batches=500, batchsize=200000, on_epoch_end=False, publish=None, use_event=i) for i in range(5) ] #cb += [ # plotGravNetCoordsDuringTraining( # outputfile=train.outputDir + "/localcoords/coord_" + str(i), # samplefile=samplepath, # after_n_batches=500, # batchsize=200000, # on_epoch_end=False, # publish=None, # use_event=0, # use_prediction_idx=8+i, # ) # for i in [1,3,5] # between 16 and 21 #] # #cb += build_callbacks(train) #by hand from plotting_callbacks import plotClusterSummary cb += [ plotClusterSummary( outputfile=train.outputDir + "/clustering/", samplefile=train.val_data.getSamplePath(train.val_data.samples[0]), after_n_batches=800 ) ] #cb=[] learningrate = 1e-4 nbatch = 120000 train.compileModel(learningrate=learningrate, #gets overwritten by CyclicLR callback anyway loss=None, metrics=None, ) model, history = train.trainModel(nepochs=3, run_eagerly=True, batchsize=nbatch, extend_truth_list_by = len(train.keras_model.outputs_keys), #just adapt truth list to avoid keras error (no effect on model) batchsize_use_sum_of_squares=False, checkperiod=1, # saves a checkpoint model every N epochs verbose=verbosity, backup_after_batches=500, additional_callbacks=cb) print("freeze BN") # Note the submodel here its not just train.keras_model for l in train.keras_model.model.layers: if 'gooey_batch_norm' in l.name: l.max_viscosity = 0.99 l.fluidity_decay= 1e-4 #reaches constant 1 after about one epoch if 'FullOCLoss' in l.name: l.use_average_cc_pos = 0.1 l.q_min = 2. l.cont_beta_loss=False l.energy_loss_weight=1e-2 #etc l.position_loss_weight=1e-2 if 'edge_selector' in l.name: l.use_truth=False#IMPORTANT #also stop GravNetLLLocalClusterLoss* from being evaluated learningrate/=10. nbatch = 120000 train.compileModel(learningrate=learningrate, loss=None, metrics=None) model, history = train.trainModel(nepochs=121, run_eagerly=True, batchsize=nbatch, extend_truth_list_by = len(train.keras_model.outputs_keys), #just adapt truth list to avoid keras error (no effect on model) batchsize_use_sum_of_squares=False, checkperiod=1, # saves a checkpoint model every N epochs verbose=verbosity, backup_after_batches=500, additional_callbacks=cb) #
[]
[]
[ "HGCALML" ]
[]
["HGCALML"]
python
1
0
chunair/kicad-footprint-generator-master/scripts/TerminalBlock_MetzConnect/make_TerminalBlock_MetzConnect.py
#!/usr/bin/env python import sys import os import math # ensure that the kicad-footprint-generator directory is available #sys.path.append(os.environ.get('KIFOOTPRINTGENERATOR')) # enable package import from parent directory #sys.path.append("D:\hardware\KiCAD\kicad-footprint-generator") # enable package import from parent directory sys.path.append(os.path.join(sys.path[0],"..","..","kicad_mod")) # load kicad_mod path sys.path.append(os.path.join(sys.path[0],"..","..")) # load kicad_mod path sys.path.append(os.path.join(sys.path[0],"..","tools")) # load kicad_mod path from KicadModTree import * # NOQA from footprint_scripts_terminal_blocks import * if __name__ == '__main__': script_generated_note="script-generated using https://github.com/pointhi/kicad-footprint-generator/scripts/TerminalBlock_MetzConnect"; classname="TerminalBlock_MetzConnect" pins=[2,3,4,5,6] rm=5 package_height=12.5 leftbottom_offset=[rm/2, 6.5] ddrill=1.4 pad=[2.7,2.7] screw_diameter=2.2 bevel_height=[5.4,9.2] vsegment_lines_offset=[] opening=[3.5,4] opening_xoffset=0 opening_yoffset=1 opening_elliptic=False secondDrillDiameter=0 secondDrillOffset=[2.5,-5] secondDrillPad=pad secondHoleDiameter=0 secondHoleOffset=[0,0] thirdHoleDiameter=0 thirdHoleOffset=[1.25,0] fourthHoleDiameter=0 fourthHoleOffset=[1.25,-5.75] fifthHoleDiameter=0 fifthHoleOffset=[1.25,-0.75] secondEllipseSize=[3.6,2.8] secondEllipseOffset=[0,-4.4] fabref_offset=[0,-1] nibbleSize=[] nibblePos=[] for p in pins: name="Type205_RT045{0:02}UBLC".format(p) webpage="http://www.metz-connect.com/de/system/files/productfiles/Datenblatt_312051_RT045xxUBLC_OFF-022759T.pdf" footprint_name="TerminalBlock_MetzConnect_{0}_1x{2:02}_P{1:3.2f}mm_45Degree".format(name, rm, p) classname_description="terminal block Metz Connect {0}".format(name, rm) makeTerminalBlock45Degree(footprint_name=footprint_name, pins=p, rm=rm, package_height=package_height, leftbottom_offset=leftbottom_offset, ddrill=ddrill, pad=pad, vsegment_lines_offset=vsegment_lines_offset, opening=opening, opening_xoffset=opening_xoffset, opening_yoffset=opening_yoffset, opening_elliptic=opening_elliptic, bevel_height=bevel_height, secondHoleDiameter=secondHoleDiameter, secondHoleOffset=secondHoleOffset, thirdHoleDiameter=thirdHoleDiameter, thirdHoleOffset=thirdHoleOffset, fourthHoleDiameter=fourthHoleDiameter, fourthHoleOffset=fourthHoleOffset, fifthHoleDiameter=fifthHoleDiameter,fifthHoleOffset=fifthHoleOffset, secondDrillDiameter=secondDrillDiameter,secondDrillOffset=secondDrillOffset,secondDrillPad=secondDrillPad, secondEllipseSize=secondEllipseSize,secondEllipseOffset=secondEllipseOffset, nibbleSize=nibbleSize, nibblePos=nibblePos, fabref_offset=fabref_offset, tags_additional=[], lib_name="${KICAD6_3DMODEL_DIR}/"+classname, classname=classname, classname_description=classname_description, webpage=webpage, script_generated_note=script_generated_note) pins=range(2,6+1) rm=3.81 package_height=7.3 leftbottom_offset=[1.85, 3.6] ddrill=0.7 pad=[1.4,1.4] screw_diameter=2.5 bevel_height=[0.6,1.9,package_height-2] slit_screw=True screw_pin_offset=[0,0] secondHoleDiameter=0 secondHoleOffset=[0,0] thirdHoleDiameter=[2,0.5] thirdHoleOffset=[0,-(3.6-0.5/2)] fourthHoleDiameter=0 fourthHoleOffset=[0,0] fabref_offset=[0,2.45] nibbleSize=[] nibblePos=[] for p in pins: name="Type086_RT034{0:02}HBLC".format(p) webpage="http://www.metz-connect.com/de/system/files/productfiles/Datenblatt_310861_RT034xxHBLC_OFF-026114K.pdf" footprint_name="TerminalBlock_MetzConnect_{0}_1x{2:02}_P{1:3.2f}mm_Horizontal".format(name, rm, p) classname_description="terminal block Metz Connect {0}".format(name, rm) makeTerminalBlockStd(footprint_name=footprint_name, pins=p, rm=rm, package_height=package_height, leftbottom_offset=leftbottom_offset, ddrill=ddrill, pad=pad, screw_diameter=screw_diameter, bevel_height=bevel_height, slit_screw=slit_screw, screw_pin_offset=screw_pin_offset, secondHoleDiameter=secondHoleDiameter, secondHoleOffset=secondHoleOffset, thirdHoleDiameter=thirdHoleDiameter, thirdHoleOffset=thirdHoleOffset, fourthHoleDiameter=fourthHoleDiameter, fourthHoleOffset=fourthHoleOffset, nibbleSize=nibbleSize, nibblePos=nibblePos, fabref_offset=fabref_offset, tags_additional=[], lib_name='${KICAD6_3DMODEL_DIR}/'+classname, classname=classname, classname_description=classname_description, webpage=webpage, script_generated_note=script_generated_note) pins=range(2,6+1) rm=5.0 package_height=10.5 leftbottom_offset=[2.5, 4] ddrill=1.4 pad=[2.8,2.8] screw_diameter=3.2 bevel_height=[2,package_height-4.5,package_height-3.5] slit_screw=True screw_pin_offset=[0,0] secondHoleDiameter=0 secondHoleOffset=[0,0] thirdHoleDiameter=0 thirdHoleOffset=[0,-4] fourthHoleDiameter=0 fourthHoleOffset=[0,0] fabref_offset=[0,4.5] nibbleSize=[] nibblePos=[] for p in pins: name="Type011_RT055{0:02}HBWC".format(p) webpage="http://www.metz-connect.com/de/system/files/productfiles/Datenblatt_310111_RT055xxHBLC_OFF-022717S.pdf" footprint_name="TerminalBlock_MetzConnect_{0}_1x{2:02}_P{1:3.2f}mm_Horizontal".format(name, rm, p) classname_description="terminal block Metz Connect {0}".format(name, rm) makeTerminalBlockStd(footprint_name=footprint_name, pins=p, rm=rm, package_height=package_height, leftbottom_offset=leftbottom_offset, ddrill=ddrill, pad=pad, screw_diameter=screw_diameter, bevel_height=bevel_height, slit_screw=slit_screw, screw_pin_offset=screw_pin_offset, secondHoleDiameter=secondHoleDiameter, secondHoleOffset=secondHoleOffset, thirdHoleDiameter=thirdHoleDiameter, thirdHoleOffset=thirdHoleOffset, fourthHoleDiameter=fourthHoleDiameter, fourthHoleOffset=fourthHoleOffset, nibbleSize=nibbleSize, nibblePos=nibblePos, fabref_offset=fabref_offset, tags_additional=[], lib_name='${KICAD6_3DMODEL_DIR}/'+classname, classname=classname, classname_description=classname_description, webpage=webpage, script_generated_note=script_generated_note) pins=range(2,5+1) rm=10 package_height=8.2 leftbottom_offset=[2.9, 4.1] ddrill=1.3 pad=[2.6,2.6] screw_diameter=3 bevel_height=[2,package_height-2] slit_screw=True screw_pin_offset=[0,0] secondHoleDiameter=0 secondHoleOffset=[0,0] thirdHoleDiameter=0 thirdHoleOffset=[0,-4] fourthHoleDiameter=0 fourthHoleOffset=[0,0] fabref_offset=[0,3] nibbleSize=[] nibblePos=[] for p in pins: name="Type067_RT019{0:02}HDWC".format(p) webpage="http://www.metz-connect.com/de/system/files/productfiles/Datenblatt_310671_RT019xxHDWC_OFF-023605N.pdf" footprint_name="TerminalBlock_MetzConnect_{0}_1x{2:02}_P{1:3.2f}mm_Horizontal".format(name, rm, p) classname_description="terminal block Metz Connect {0}".format(name, rm) makeTerminalBlockStd(footprint_name=footprint_name, pins=p, rm=rm, package_height=package_height, leftbottom_offset=leftbottom_offset, ddrill=ddrill, pad=pad, screw_diameter=screw_diameter, bevel_height=bevel_height, slit_screw=slit_screw, screw_pin_offset=screw_pin_offset, secondHoleDiameter=secondHoleDiameter, secondHoleOffset=secondHoleOffset, thirdHoleDiameter=thirdHoleDiameter, thirdHoleOffset=thirdHoleOffset, fourthHoleDiameter=fourthHoleDiameter, fourthHoleOffset=fourthHoleOffset, nibbleSize=nibbleSize, nibblePos=nibblePos, fabref_offset=fabref_offset, tags_additional=[], lib_name='${KICAD6_3DMODEL_DIR}/'+classname, classname=classname, classname_description=classname_description, webpage=webpage, script_generated_note=script_generated_note) pins=range(2,3+1) rm=9.52 package_height=12.5 leftbottom_offset=[4.76, 8] ddrill=1.3 pad=[2.6,2.6] screw_diameter=4 bevel_height=[0.5,4.5,package_height-2] slit_screw=True screw_pin_offset=[0,0.5] secondHoleDiameter=0 secondHoleOffset=[0,0] thirdHoleDiameter=[2,1] thirdHoleOffset=[0,-4] fourthHoleDiameter=0 fourthHoleOffset=[0,0] fabref_offset=[0,3.5] nibbleSize=[] nibblePos=[] for p in pins: name="Type703_RT10N{0:02}HGLU".format(p) webpage="http://www.metz-connect.com/de/system/files/productfiles/Datenblatt_317031_RT10NxxHGLU_OFF-022897S.pdf" footprint_name="TerminalBlock_MetzConnect_{0}_1x{2:02}_P{1:3.2f}mm_Horizontal".format(name, rm, p) classname_description="terminal block Metz Connect {0}".format(name, rm) makeTerminalBlockStd(footprint_name=footprint_name, pins=p, rm=rm, package_height=package_height, leftbottom_offset=leftbottom_offset, ddrill=ddrill, pad=pad, screw_diameter=screw_diameter, bevel_height=bevel_height, slit_screw=slit_screw, screw_pin_offset=screw_pin_offset, secondHoleDiameter=secondHoleDiameter, secondHoleOffset=secondHoleOffset, thirdHoleDiameter=thirdHoleDiameter, thirdHoleOffset=thirdHoleOffset, fourthHoleDiameter=fourthHoleDiameter, fourthHoleOffset=fourthHoleOffset, nibbleSize=nibbleSize, nibblePos=nibblePos, fabref_offset=fabref_offset, tags_additional=[], lib_name='${KICAD6_3DMODEL_DIR}/'+classname, classname=classname, classname_description=classname_description, webpage=webpage, script_generated_note=script_generated_note) pins=range(2,6+1) rm=5.0 package_height=8.3 leftbottom_offset=[2.5, 4] ddrill=1.3 pad=[2.6,2.6] screw_diameter=3 bevel_height=[0.5,2,package_height-2] slit_screw=True screw_pin_offset=[0,0] secondHoleDiameter=0 secondHoleOffset=[0,0] thirdHoleDiameter=[2,1] thirdHoleOffset=[0,-(4.3-0.5)] fourthHoleDiameter=0 fourthHoleOffset=[0,0] fabref_offset=[0,2.9] nibbleSize=[] nibblePos=[] for p in pins: name="Type094_RT035{0:02}HBLU".format(p) webpage="http://www.metz-connect.com/ru/system/files/productfiles/Data_sheet_310941_RT035xxHBLU_OFF-022742T.pdf" footprint_name="TerminalBlock_MetzConnect_{0}_1x{2:02}_P{1:3.2f}mm_Horizontal".format(name, rm, p) classname_description="terminal block Metz Connect {0}".format(name, rm) makeTerminalBlockStd(footprint_name=footprint_name, pins=p, rm=rm, package_height=package_height, leftbottom_offset=leftbottom_offset, ddrill=ddrill, pad=pad, screw_diameter=screw_diameter, bevel_height=bevel_height, slit_screw=slit_screw, screw_pin_offset=screw_pin_offset, secondHoleDiameter=secondHoleDiameter, secondHoleOffset=secondHoleOffset, thirdHoleDiameter=thirdHoleDiameter, thirdHoleOffset=thirdHoleOffset, fourthHoleDiameter=fourthHoleDiameter, fourthHoleOffset=fourthHoleOffset, nibbleSize=nibbleSize, nibblePos=nibblePos, fabref_offset=fabref_offset, tags_additional=[], lib_name='${KICAD6_3DMODEL_DIR}/'+classname, classname=classname, classname_description=classname_description, webpage=webpage, script_generated_note=script_generated_note) pins=range(2,6+1) rm=5.08 package_height=8 leftbottom_offset=[2.54, 4] ddrill=1.3 pad=[2.5,2.5] screw_diameter=3 bevel_height=[2,package_height-2] slit_screw=True screw_pin_offset=[0,0] secondHoleDiameter=0 secondHoleOffset=[0,0] thirdHoleDiameter=0 thirdHoleOffset=[0,-4] fourthHoleDiameter=0 fourthHoleOffset=[0,0] fabref_offset=[0,3] nibbleSize=[] nibblePos=[] for p in pins: name="Type101_RT016{0:02}HBWC".format(p) webpage="http://www.metz-connect.com/de/system/files/productfiles/Datenblatt_311011_RT016xxHBWC_OFF-022771S.pdf" footprint_name="TerminalBlock_MetzConnect_{0}_1x{2:02}_P{1:3.2f}mm_Horizontal".format(name, rm, p) classname_description="terminal block Metz Connect {0}".format(name, rm) makeTerminalBlockStd(footprint_name=footprint_name, pins=p, rm=rm, package_height=package_height, leftbottom_offset=leftbottom_offset, ddrill=ddrill, pad=pad, screw_diameter=screw_diameter, bevel_height=bevel_height, slit_screw=slit_screw, screw_pin_offset=screw_pin_offset, secondHoleDiameter=secondHoleDiameter, secondHoleOffset=secondHoleOffset, thirdHoleDiameter=thirdHoleDiameter, thirdHoleOffset=thirdHoleOffset, fourthHoleDiameter=fourthHoleDiameter, fourthHoleOffset=fourthHoleOffset, nibbleSize=nibbleSize, nibblePos=nibblePos, fabref_offset=fabref_offset, tags_additional=[], lib_name='${KICAD6_3DMODEL_DIR}/'+classname, classname=classname, classname_description=classname_description, webpage=webpage, script_generated_note=script_generated_note) pins=range(2,6+1) rm=3.5 package_height=6.5 leftbottom_offset=[rm/2, 3.7] ddrill=1.2 pad=[2.3,2.3] screw_diameter=2.75 bevel_height=[1.5] slit_screw=True screw_pin_offset=[0,0] secondHoleDiameter=0 secondHoleOffset=[0,0] thirdHoleDiameter=0 thirdHoleOffset=[0,-4] fourthHoleDiameter=0 fourthHoleOffset=[0,0] fabref_offset=[0,2.5] nibbleSize=[] nibblePos=[] for p in pins: name="Type059_RT063{0:02}HBWC".format(p) webpage="http://www.metz-connect.com/de/system/files/productfiles/Datenblatt_310591_RT063xxHBWC_OFF-022684T.pdf" footprint_name="TerminalBlock_MetzConnect_{0}_1x{2:02}_P{1:3.2f}mm_Horizontal".format(name, rm, p) classname_description="terminal block Metz Connect {0}".format(name, rm) makeTerminalBlockStd(footprint_name=footprint_name, pins=p, rm=rm, package_height=package_height, leftbottom_offset=leftbottom_offset, ddrill=ddrill, pad=pad, screw_diameter=screw_diameter, bevel_height=bevel_height, slit_screw=slit_screw, screw_pin_offset=screw_pin_offset, secondHoleDiameter=secondHoleDiameter, secondHoleOffset=secondHoleOffset, thirdHoleDiameter=thirdHoleDiameter, thirdHoleOffset=thirdHoleOffset, fourthHoleDiameter=fourthHoleDiameter, fourthHoleOffset=fourthHoleOffset, nibbleSize=nibbleSize, nibblePos=nibblePos, fabref_offset=fabref_offset, tags_additional=[], lib_name='${KICAD6_3DMODEL_DIR}/'+classname, classname=classname, classname_description=classname_description, webpage=webpage, script_generated_note=script_generated_note) pins=range(2,3+1) rm=5.08 package_height=11 leftbottom_offset=[2.54, 5.5] ddrill=1.4 pad=[2.6,2.6] screw_diameter=3.5 bevel_height=[0.5,3,package_height-1.8] slit_screw=True screw_pin_offset=[0,-0.3] secondHoleDiameter=0 secondHoleOffset=[0,0] thirdHoleDiameter=0 thirdHoleOffset=[0,-4] fourthHoleDiameter=0 fourthHoleOffset=[0,0] fabref_offset=[0,3.5] nibbleSize=[] nibblePos=[] for p in pins: name="Type073_RT026{0:02}HBLU".format(p) webpage="http://www.metz-connect.com/de/system/files/productfiles/Datenblatt_310731_RT026xxHBLU_OFF-022792U.pdf" footprint_name="TerminalBlock_MetzConnect_{0}_1x{2:02}_P{1:3.2f}mm_Horizontal".format(name, rm, p) classname_description="terminal block Metz Connect {0}".format(name, rm) makeTerminalBlockStd(footprint_name=footprint_name, pins=p, rm=rm, package_height=package_height, leftbottom_offset=leftbottom_offset, ddrill=ddrill, pad=pad, screw_diameter=screw_diameter, bevel_height=bevel_height, slit_screw=slit_screw, screw_pin_offset=screw_pin_offset, secondHoleDiameter=secondHoleDiameter, secondHoleOffset=secondHoleOffset, thirdHoleDiameter=thirdHoleDiameter, thirdHoleOffset=thirdHoleOffset, fourthHoleDiameter=fourthHoleDiameter, fourthHoleOffset=fourthHoleOffset, nibbleSize=nibbleSize, nibblePos=nibblePos, fabref_offset=fabref_offset, tags_additional=[], lib_name='${KICAD6_3DMODEL_DIR}/'+classname, classname=classname, classname_description=classname_description, webpage=webpage, script_generated_note=script_generated_note) pins=range(2,3+1) rm=6.35 package_height=12.5 leftbottom_offset=[3.175, 8] ddrill=1.3 pad=[2.5,2.5] screw_diameter=4 bevel_height=[0.5,5.5,package_height-2] slit_screw=True screw_pin_offset=[0,0] secondHoleDiameter=0 secondHoleOffset=[0,0] thirdHoleDiameter=[2,1] thirdHoleOffset=[0,-4] fourthHoleDiameter=0 fourthHoleOffset=[0,0] fabref_offset=[0,3.5] nibbleSize=[] nibblePos=[] for p in pins: name="Type701_RT11L{0:02}HGLU".format(p) webpage="http://www.metz-connect.com/de/system/files/productfiles/Datenblatt_317011_RT11LxxHGLU_OFF-022798U.pdf" footprint_name="TerminalBlock_MetzConnect_{0}_1x{2:02}_P{1:3.2f}mm_Horizontal".format(name, rm, p) classname_description="terminal block Metz Connect {0}".format(name, rm) makeTerminalBlockStd(footprint_name=footprint_name, pins=p, rm=rm, package_height=package_height, leftbottom_offset=leftbottom_offset, ddrill=ddrill, pad=pad, screw_diameter=screw_diameter, bevel_height=bevel_height, slit_screw=slit_screw, screw_pin_offset=screw_pin_offset, secondHoleDiameter=secondHoleDiameter, secondHoleOffset=secondHoleOffset, thirdHoleDiameter=thirdHoleDiameter, thirdHoleOffset=thirdHoleOffset, fourthHoleDiameter=fourthHoleDiameter, fourthHoleOffset=fourthHoleOffset, nibbleSize=nibbleSize, nibblePos=nibblePos, fabref_offset=fabref_offset, tags_additional=[], lib_name='${KICAD6_3DMODEL_DIR}/'+classname, classname=classname, classname_description=classname_description, webpage=webpage, script_generated_note=script_generated_note) pins=range(2,6+1) rm=7.5 package_height=9 leftbottom_offset=[3.75, 4.5] ddrill=1.3 pad=[2.5,2.5] screw_diameter=3 bevel_height=[2,package_height-2] slit_screw=True screw_pin_offset=[0,0] secondHoleDiameter=0 secondHoleOffset=[0,0] thirdHoleDiameter=0 thirdHoleOffset=[rm/2,0] fourthHoleDiameter=0 fourthHoleOffset=[0,0] fabref_offset=[0,3.5] nibbleSize=[] nibblePos=[] for p in pins: name="Type171_RT137{0:02}HBWC".format(p) webpage="http://www.metz-connect.com/de/system/files/productfiles/Datenblatt_311711_RT137xxHBWC_OFF-022811Q.pdf" footprint_name="TerminalBlock_MetzConnect_{0}_1x{2:02}_P{1:3.2f}mm_Horizontal".format(name, rm, p) classname_description="terminal block Metz Connect {0}".format(name, rm) makeTerminalBlockStd(footprint_name=footprint_name, pins=p, rm=rm, package_height=package_height, leftbottom_offset=leftbottom_offset, ddrill=ddrill, pad=pad, screw_diameter=screw_diameter, bevel_height=bevel_height, slit_screw=slit_screw, screw_pin_offset=screw_pin_offset, secondHoleDiameter=secondHoleDiameter, secondHoleOffset=secondHoleOffset, thirdHoleDiameter=thirdHoleDiameter, thirdHoleOffset=thirdHoleOffset, fourthHoleDiameter=fourthHoleDiameter, fourthHoleOffset=fourthHoleOffset, nibbleSize=nibbleSize, nibblePos=nibblePos, fabref_offset=fabref_offset, tags_additional=[], lib_name='${KICAD6_3DMODEL_DIR}/'+classname, classname=classname, classname_description=classname_description, webpage=webpage, script_generated_note=script_generated_note) pins=range(2,6+1) rm=7.5 package_height=11 leftbottom_offset=[3.75, 5.5] ddrill=1.4 pad=[2.6,2.6] screw_diameter=3 bevel_height=[0.6,2.5,package_height-1.9] slit_screw=True screw_pin_offset=[0,0] secondHoleDiameter=0 secondHoleOffset=[0,0] thirdHoleDiameter=0 thirdHoleOffset=[rm/2,0] fourthHoleDiameter=0 fourthHoleOffset=[0,0] fabref_offset=[0,4] nibbleSize=[] nibblePos=[] for p in pins: name="Type175_RT027{0:02}HBLC".format(p) webpage="http://www.metz-connect.com/de/system/files/productfiles/Datenblatt_311751_RT027xxHBLC_OFF-022814U.pdf" footprint_name="TerminalBlock_MetzConnect_{0}_1x{2:02}_P{1:3.2f}mm_Horizontal".format(name, rm, p) classname_description="terminal block Metz Connect {0}".format(name, rm) makeTerminalBlockStd(footprint_name=footprint_name, pins=p, rm=rm, package_height=package_height, leftbottom_offset=leftbottom_offset, ddrill=ddrill, pad=pad, screw_diameter=screw_diameter, bevel_height=bevel_height, slit_screw=slit_screw, screw_pin_offset=screw_pin_offset, secondHoleDiameter=secondHoleDiameter, secondHoleOffset=secondHoleOffset, thirdHoleDiameter=thirdHoleDiameter, thirdHoleOffset=thirdHoleOffset, fourthHoleDiameter=fourthHoleDiameter, fourthHoleOffset=fourthHoleOffset, nibbleSize=nibbleSize, nibblePos=nibblePos, fabref_offset=fabref_offset, tags_additional=[], lib_name='${KICAD6_3DMODEL_DIR}/'+classname, classname=classname, classname_description=classname_description, webpage=webpage, script_generated_note=script_generated_note) pins=range(2,4+1) rm=5 package_height=8 leftbottom_offset=[2.5,4] ddrill=1.3 pad=[2.5,2.5] screw_diameter=3 bevel_height=[2,package_height-2] slit_screw=True screw_pin_offset=[0,0] secondHoleDiameter=0 secondHoleOffset=[0,0] thirdHoleDiameter=0 thirdHoleOffset=[rm/2,0] fourthHoleDiameter=0 fourthHoleOffset=[0,0] fabref_offset=[0,3] nibbleSize=[] nibblePos=[] for p in pins: name="Type055_RT015{0:02}HDWU".format(p) webpage="http://www.metz-connect.com/de/system/files/productfiles/Datenblatt_310551_RT015xxHDWU_OFF-022723S.pdf" footprint_name="TerminalBlock_MetzConnect_{0}_1x{2:02}_P{1:3.2f}mm_Horizontal".format(name, rm, p) classname_description="terminal block Metz Connect {0}".format(name, rm) makeTerminalBlockStd(footprint_name=footprint_name, pins=p, rm=rm, package_height=package_height, leftbottom_offset=leftbottom_offset, ddrill=ddrill, pad=pad, screw_diameter=screw_diameter, bevel_height=bevel_height, slit_screw=slit_screw, screw_pin_offset=screw_pin_offset, secondHoleDiameter=secondHoleDiameter, secondHoleOffset=secondHoleOffset, thirdHoleDiameter=thirdHoleDiameter, thirdHoleOffset=thirdHoleOffset, fourthHoleDiameter=fourthHoleDiameter, fourthHoleOffset=fourthHoleOffset, nibbleSize=nibbleSize, nibblePos=nibblePos, fabref_offset=fabref_offset, tags_additional=[], lib_name='${KICAD6_3DMODEL_DIR}/'+classname, classname=classname, classname_description=classname_description, webpage=webpage, script_generated_note=script_generated_note)
[]
[]
[ "KIFOOTPRINTGENERATOR" ]
[]
["KIFOOTPRINTGENERATOR"]
python
1
0
subnet/kube/kube.go
// Copyright 2016 flannel authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package kube import ( "encoding/json" "errors" "fmt" "io/ioutil" "net" "os" "time" "github.com/coreos/flannel/pkg/ip" "github.com/coreos/flannel/subnet" "github.com/golang/glog" "golang.org/x/net/context" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/util/strategicpatch" "k8s.io/apimachinery/pkg/util/wait" "k8s.io/apimachinery/pkg/watch" clientset "k8s.io/client-go/kubernetes" listers "k8s.io/client-go/listers/core/v1" "k8s.io/client-go/pkg/api" "k8s.io/client-go/pkg/api/v1" "k8s.io/client-go/rest" "k8s.io/client-go/tools/cache" "k8s.io/client-go/tools/clientcmd" ) var ( ErrUnimplemented = errors.New("unimplemented") ) const ( resyncPeriod = 5 * time.Minute nodeControllerSyncTimeout = 10 * time.Minute subnetKubeManagedAnnotation = "flannel.alpha.coreos.com/kube-subnet-manager" backendDataAnnotation = "flannel.alpha.coreos.com/backend-data" backendTypeAnnotation = "flannel.alpha.coreos.com/backend-type" backendPublicIPAnnotation = "flannel.alpha.coreos.com/public-ip" backendPublicIPOverwriteAnnotation = "flannel.alpha.coreos.com/public-ip-overwrite" netConfPath = "/etc/kube-flannel/net-conf.json" ) type kubeSubnetManager struct { client clientset.Interface nodeName string nodeStore listers.NodeLister nodeController cache.Controller subnetConf *subnet.Config events chan subnet.Event } func NewSubnetManager(apiUrl, kubeconfig string) (subnet.Manager, error) { var cfg *rest.Config var err error // Use out of cluster config if the URL or kubeconfig have been specified. Otherwise use incluster config. if apiUrl != "" || kubeconfig != "" { cfg, err = clientcmd.BuildConfigFromFlags(apiUrl, kubeconfig) if err != nil { return nil, fmt.Errorf("unable to create k8s config: %v", err) } } else { cfg, err = rest.InClusterConfig() if err != nil { return nil, fmt.Errorf("unable to initialize inclusterconfig: %v", err) } } c, err := clientset.NewForConfig(cfg) if err != nil { return nil, fmt.Errorf("unable to initialize client: %v", err) } // The kube subnet mgr needs to know the k8s node name that it's running on so it can annotate it. // If we're running as a pod then the POD_NAME and POD_NAMESPACE will be populated and can be used to find the node // name. Otherwise, the environment variable NODE_NAME can be passed in. nodeName := os.Getenv("NODE_NAME") if nodeName == "" { podName := os.Getenv("POD_NAME") podNamespace := os.Getenv("POD_NAMESPACE") if podName == "" || podNamespace == "" { return nil, fmt.Errorf("env variables POD_NAME and POD_NAMESPACE must be set") } pod, err := c.Pods(podNamespace).Get(podName, metav1.GetOptions{}) if err != nil { return nil, fmt.Errorf("error retrieving pod spec for '%s/%s': %v", podNamespace, podName, err) } nodeName = pod.Spec.NodeName if nodeName == "" { return nil, fmt.Errorf("node name not present in pod spec '%s/%s'", podNamespace, podName) } } netConf, err := ioutil.ReadFile(netConfPath) if err != nil { return nil, fmt.Errorf("failed to read net conf: %v", err) } sc, err := subnet.ParseConfig(string(netConf)) if err != nil { return nil, fmt.Errorf("error parsing subnet config: %s", err) } sm, err := newKubeSubnetManager(c, sc, nodeName) if err != nil { return nil, fmt.Errorf("error creating network manager: %s", err) } go sm.Run(context.Background()) glog.Infof("Waiting %s for node controller to sync", nodeControllerSyncTimeout) err = wait.Poll(time.Second, nodeControllerSyncTimeout, func() (bool, error) { return sm.nodeController.HasSynced(), nil }) if err != nil { return nil, fmt.Errorf("error waiting for nodeController to sync state: %v", err) } glog.Infof("Node controller sync successful") return sm, nil } func newKubeSubnetManager(c clientset.Interface, sc *subnet.Config, nodeName string) (*kubeSubnetManager, error) { var ksm kubeSubnetManager ksm.client = c ksm.nodeName = nodeName ksm.subnetConf = sc ksm.events = make(chan subnet.Event, 5000) indexer, controller := cache.NewIndexerInformer( &cache.ListWatch{ ListFunc: func(options metav1.ListOptions) (runtime.Object, error) { return ksm.client.CoreV1().Nodes().List(options) }, WatchFunc: func(options metav1.ListOptions) (watch.Interface, error) { return ksm.client.CoreV1().Nodes().Watch(options) }, }, &v1.Node{}, resyncPeriod, cache.ResourceEventHandlerFuncs{ AddFunc: func(obj interface{}) { ksm.handleAddLeaseEvent(subnet.EventAdded, obj) }, UpdateFunc: ksm.handleUpdateLeaseEvent, DeleteFunc: func(obj interface{}) { ksm.handleAddLeaseEvent(subnet.EventRemoved, obj) }, }, cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc}, ) ksm.nodeController = controller ksm.nodeStore = listers.NewNodeLister(indexer) return &ksm, nil } func (ksm *kubeSubnetManager) handleAddLeaseEvent(et subnet.EventType, obj interface{}) { n := obj.(*v1.Node) if s, ok := n.Annotations[subnetKubeManagedAnnotation]; !ok || s != "true" { return } l, err := nodeToLease(*n) if err != nil { glog.Infof("Error turning node %q to lease: %v", n.ObjectMeta.Name, err) return } ksm.events <- subnet.Event{et, l} } func (ksm *kubeSubnetManager) handleUpdateLeaseEvent(oldObj, newObj interface{}) { o := oldObj.(*v1.Node) n := newObj.(*v1.Node) if s, ok := n.Annotations[subnetKubeManagedAnnotation]; !ok || s != "true" { return } if o.Annotations[backendDataAnnotation] == n.Annotations[backendDataAnnotation] && o.Annotations[backendTypeAnnotation] == n.Annotations[backendTypeAnnotation] && o.Annotations[backendPublicIPAnnotation] == n.Annotations[backendPublicIPAnnotation] { return // No change to lease } l, err := nodeToLease(*n) if err != nil { glog.Infof("Error turning node %q to lease: %v", n.ObjectMeta.Name, err) return } ksm.events <- subnet.Event{subnet.EventAdded, l} } func (ksm *kubeSubnetManager) GetNetworkConfig(ctx context.Context) (*subnet.Config, error) { return ksm.subnetConf, nil } func (ksm *kubeSubnetManager) AcquireLease(ctx context.Context, attrs *subnet.LeaseAttrs) (*subnet.Lease, error) { cachedNode, err := ksm.nodeStore.Get(ksm.nodeName) if err != nil { return nil, err } nobj, err := api.Scheme.DeepCopy(cachedNode) if err != nil { return nil, err } n := nobj.(*v1.Node) if n.Spec.PodCIDR == "" { return nil, fmt.Errorf("node %q pod cidr not assigned", ksm.nodeName) } bd, err := attrs.BackendData.MarshalJSON() if err != nil { return nil, err } _, cidr, err := net.ParseCIDR(n.Spec.PodCIDR) if err != nil { return nil, err } if n.Annotations[backendDataAnnotation] != string(bd) || n.Annotations[backendTypeAnnotation] != attrs.BackendType || n.Annotations[backendPublicIPAnnotation] != attrs.PublicIP.String() || n.Annotations[subnetKubeManagedAnnotation] != "true" || (n.Annotations[backendPublicIPOverwriteAnnotation] != "" && n.Annotations[backendPublicIPOverwriteAnnotation] != attrs.PublicIP.String()) { n.Annotations[backendTypeAnnotation] = attrs.BackendType n.Annotations[backendDataAnnotation] = string(bd) if n.Annotations[backendPublicIPOverwriteAnnotation] != "" { if n.Annotations[backendPublicIPAnnotation] != n.Annotations[backendPublicIPOverwriteAnnotation] { glog.Infof("Overriding public ip with '%s' from node annotation '%s'", n.Annotations[backendPublicIPOverwriteAnnotation], backendPublicIPOverwriteAnnotation) n.Annotations[backendPublicIPAnnotation] = n.Annotations[backendPublicIPOverwriteAnnotation] } } else { n.Annotations[backendPublicIPAnnotation] = attrs.PublicIP.String() } n.Annotations[subnetKubeManagedAnnotation] = "true" oldData, err := json.Marshal(cachedNode) if err != nil { return nil, err } newData, err := json.Marshal(n) if err != nil { return nil, err } patchBytes, err := strategicpatch.CreateTwoWayMergePatch(oldData, newData, v1.Node{}) if err != nil { return nil, fmt.Errorf("failed to create patch for node %q: %v", ksm.nodeName, err) } _, err = ksm.client.CoreV1().Nodes().Patch(ksm.nodeName, types.StrategicMergePatchType, patchBytes, "status") if err != nil { return nil, err } } return &subnet.Lease{ Subnet: ip.FromIPNet(cidr), Attrs: *attrs, Expiration: time.Now().Add(24 * time.Hour), }, nil } func (ksm *kubeSubnetManager) WatchLeases(ctx context.Context, cursor interface{}) (subnet.LeaseWatchResult, error) { select { case event := <-ksm.events: return subnet.LeaseWatchResult{ Events: []subnet.Event{event}, }, nil case <-ctx.Done(): return subnet.LeaseWatchResult{}, nil } } func (ksm *kubeSubnetManager) Run(ctx context.Context) { glog.Infof("Starting kube subnet manager") ksm.nodeController.Run(ctx.Done()) } func nodeToLease(n v1.Node) (l subnet.Lease, err error) { l.Attrs.PublicIP, err = ip.ParseIP4(n.Annotations[backendPublicIPAnnotation]) if err != nil { return l, err } l.Attrs.BackendType = n.Annotations[backendTypeAnnotation] l.Attrs.BackendData = json.RawMessage(n.Annotations[backendDataAnnotation]) _, cidr, err := net.ParseCIDR(n.Spec.PodCIDR) if err != nil { return l, err } l.Subnet = ip.FromIPNet(cidr) return l, nil } // unimplemented func (ksm *kubeSubnetManager) RenewLease(ctx context.Context, lease *subnet.Lease) error { return ErrUnimplemented } func (ksm *kubeSubnetManager) WatchLease(ctx context.Context, sn ip.IP4Net, cursor interface{}) (subnet.LeaseWatchResult, error) { return subnet.LeaseWatchResult{}, ErrUnimplemented } func (ksm *kubeSubnetManager) Name() string { return fmt.Sprintf("Kubernetes Subnet Manager - %s", ksm.nodeName) }
[ "\"NODE_NAME\"", "\"POD_NAME\"", "\"POD_NAMESPACE\"" ]
[]
[ "POD_NAMESPACE", "NODE_NAME", "POD_NAME" ]
[]
["POD_NAMESPACE", "NODE_NAME", "POD_NAME"]
go
3
0
test/acceptance/testing.go
package acceptance import ( "bytes" "encoding/json" "fmt" "io" "io/ioutil" "os" "os/exec" "path" "strings" "testing" "github.com/cloudskiff/driftctl/pkg/analyser" cmderrors "github.com/cloudskiff/driftctl/pkg/cmd/errors" "github.com/pkg/errors" "github.com/sirupsen/logrus" "github.com/cloudskiff/driftctl/test" "github.com/spf13/cobra" "github.com/cloudskiff/driftctl/logger" "github.com/cloudskiff/driftctl/pkg/cmd" ) type AccCheck struct { PreExec func() PostExec func() Env map[string]string Check func(result *ScanResult, stdout string, err error) } type AccTestCase struct { Path string Args []string OnStart func() OnEnd func() Checks []AccCheck tmpResultFilePath string } func (c *AccTestCase) createResultFile(t *testing.T) error { tmpDir := t.TempDir() file, err := ioutil.TempFile(tmpDir, "result") if err != nil { return err } defer file.Close() c.tmpResultFilePath = file.Name() return nil } func (c *AccTestCase) validate() error { if c.Checks == nil || len(c.Checks) == 0 { return fmt.Errorf("checks attribute must be defined") } if c.Path == "" { return fmt.Errorf("path attribute must be defined") } for _, arg := range c.Args { if arg == "--output" || arg == "-o" { return fmt.Errorf("--output flag should not be defined in test case, it is automatically tested") } } return nil } func (c *AccTestCase) getResultFilePath() string { return c.tmpResultFilePath } func (c *AccTestCase) getResult(t *testing.T) *ScanResult { analysis := analyser.Analysis{} result, err := ioutil.ReadFile(c.getResultFilePath()) if err != nil { return nil } if err := json.Unmarshal(result, &analysis); err != nil { return nil } return NewScanResult(t, analysis) } /** * Retrieve env from os.Environ() but override every variable prefixed with ACC_ * e.g. ACC_AWS_PROFILE will override AWS_PROFILE */ func (c *AccTestCase) resolveTerraformEnv() []string { environMap := make(map[string]string, len(os.Environ())) const PREFIX string = "ACC_" for _, e := range os.Environ() { envKeyValue := strings.SplitN(e, "=", 2) if strings.HasPrefix(envKeyValue[0], PREFIX) { varName := strings.TrimPrefix(envKeyValue[0], PREFIX) environMap[varName] = envKeyValue[1] continue } if _, exist := environMap[envKeyValue[0]]; !exist { environMap[envKeyValue[0]] = envKeyValue[1] } } results := make([]string, 0, len(environMap)) for k, v := range environMap { results = append(results, fmt.Sprintf("%s=%s", k, v)) } return results } func (c *AccTestCase) terraformInit() error { _, err := os.Stat(path.Join(c.Path, ".terraform")) if os.IsNotExist(err) { logrus.Debug("Running terraform init ...") cmd := exec.Command("terraform", "init", "-input=false") cmd.Dir = c.Path cmd.Env = c.resolveTerraformEnv() out, err := cmd.CombinedOutput() if err != nil { return errors.Wrap(err, string(out)) } logrus.Debug("Terraform init done") } return nil } func (c *AccTestCase) terraformApply() error { logrus.Debug("Running terraform apply ...") cmd := exec.Command("terraform", "apply", "-auto-approve") cmd.Dir = c.Path cmd.Env = c.resolveTerraformEnv() out, err := cmd.CombinedOutput() if err != nil { return errors.Wrap(err, string(out)) } logrus.Debug("Terraform apply done") return nil } func (c *AccTestCase) terraformDestroy() error { logrus.Debug("Running terraform destroy ...") cmd := exec.Command("terraform", "destroy", "-auto-approve") cmd.Dir = c.Path cmd.Env = c.resolveTerraformEnv() out, err := cmd.CombinedOutput() if err != nil { return errors.Wrap(err, string(out)) } logrus.Debug("Terraform destroy done") return nil } func runDriftCtlCmd(driftctlCmd *cmd.DriftctlCmd) (*cobra.Command, string, error) { old := os.Stdout // keep backup of the real stdout r, w, _ := os.Pipe() os.Stdout = w cmd, cmdErr := driftctlCmd.ExecuteC() // Ignore not in sync errors in acceptance test context if _, isNotInSyncErr := cmdErr.(cmderrors.InfrastructureNotInSync); isNotInSyncErr { cmdErr = nil } outC := make(chan string) // copy the output in a separate goroutine so printing can't block indefinitely go func() { var buf bytes.Buffer _, _ = io.Copy(&buf, r) outC <- buf.String() }() // back to normal state w.Close() os.Stdout = old // restoring the real stdout out := <-outC return cmd, out, cmdErr } func Run(t *testing.T, c AccTestCase) { if os.Getenv("DRIFTCTL_ACC") == "" { t.Skip() } if err := c.validate(); err != nil { t.Fatal(err) } if c.OnStart != nil { c.OnStart() } // Disable terraform version checks // @link https://www.terraform.io/docs/commands/index.html#upgrade-and-security-bulletin-checks checkpoint := os.Getenv("CHECKPOINT_DISABLE") os.Setenv("CHECKPOINT_DISABLE", "true") // Execute terraform init if .terraform folder is not found in test folder err := c.terraformInit() if err != nil { t.Fatal(err) } err = c.terraformApply() if err != nil { t.Fatal(err) } defer func() { err := c.terraformDestroy() os.Setenv("CHECKPOINT_DISABLE", checkpoint) if err != nil { t.Fatal(err) } }() logger.Init(logger.GetConfig()) driftctlCmd := cmd.NewDriftctlCmd(test.Build{}) err = c.createResultFile(t) if err != nil { t.Fatal(err) } if c.Args != nil { c.Args = append([]string{""}, c.Args...) c.Args = append(c.Args, "--from", fmt.Sprintf("tfstate://%s", path.Join(c.Path, "terraform.tfstate")), "--output", fmt.Sprintf("json://%s", c.getResultFilePath()), ) } os.Args = c.Args for _, check := range c.Checks { if check.Check == nil { t.Fatal("Check attribute must be defined") } if check.PreExec != nil { check.PreExec() } if len(check.Env) > 0 { for key, value := range check.Env { os.Setenv(key, value) } } _, out, cmdErr := runDriftCtlCmd(driftctlCmd) if len(check.Env) > 0 { for key := range check.Env { _ = os.Unsetenv(key) } } check.Check(c.getResult(t), out, cmdErr) if check.PostExec != nil { check.PostExec() } } if c.OnEnd != nil { c.OnEnd() } }
[ "\"DRIFTCTL_ACC\"", "\"CHECKPOINT_DISABLE\"" ]
[]
[ "DRIFTCTL_ACC", "CHECKPOINT_DISABLE" ]
[]
["DRIFTCTL_ACC", "CHECKPOINT_DISABLE"]
go
2
0
train.py
import os import argparse import datetime import tensorflow as tf import yolo.config as cfg from yolo.yolo_net import YOLONet from utils.timer import Timer from utils.pascal_voc import pascal_voc slim = tf.contrib.slim class Solver(object): def __init__(self, net, data): self.net = net self.data = data self.weights_file = cfg.WEIGHTS_FILE self.max_iter = cfg.MAX_ITER self.initial_learning_rate = cfg.LEARNING_RATE self.decay_steps = cfg.DECAY_STEPS self.decay_rate = cfg.DECAY_RATE self.staircase = cfg.STAIRCASE self.summary_iter = cfg.SUMMARY_ITER self.save_iter = cfg.SAVE_ITER self.output_dir = os.path.join( cfg.OUTPUT_DIR, datetime.datetime.now().strftime('%Y_%m_%d_%H_%M')) if not os.path.exists(self.output_dir): os.makedirs(self.output_dir) self.save_cfg() self.variable_to_restore = tf.global_variables() self.saver = tf.train.Saver(self.variable_to_restore, max_to_keep=None) self.ckpt_file = os.path.join(self.output_dir, 'yolo') self.summary_op = tf.summary.merge_all() self.writer = tf.summary.FileWriter(self.output_dir, flush_secs=60) self.global_step = tf.train.create_global_step() self.learning_rate = tf.train.exponential_decay( self.initial_learning_rate, self.global_step, self.decay_steps, self.decay_rate, self.staircase, name='learning_rate') self.optimizer = tf.train.GradientDescentOptimizer( learning_rate=self.learning_rate) self.train_op = slim.learning.create_train_op( self.net.total_loss, self.optimizer, global_step=self.global_step) gpu_options = tf.GPUOptions() config = tf.ConfigProto(gpu_options=gpu_options) self.sess = tf.Session(config=config) self.sess.run(tf.global_variables_initializer()) # if self.weights_file is not None: # print('Restoring weights from: ' + self.weights_file) # self.saver.restore(self.sess, self.weights_file) self.writer.add_graph(self.sess.graph) def train(self): train_timer = Timer() load_timer = Timer() for step in range(1, self.max_iter + 1): load_timer.tic() images, labels = self.data.get() load_timer.toc() feed_dict = {self.net.images: images, self.net.labels: labels} if step % self.summary_iter == 0: if step % (self.summary_iter * 1) == 0: train_timer.tic() summary_str, loss, _ = self.sess.run( [self.summary_op, self.net.total_loss, self.train_op], feed_dict=feed_dict) train_timer.toc() # log_str = '''{} Epoch: {}, Step: {}, Learning rate: {},''' # ''' Loss: {:5.3f}\nSpeed: {:.3f}s/iter,''' # '''' Load: {:.3f}s/iter, Remain: {}'''.format( # datetime.datetime.now().strftime('%m-%d %H:%M:%S'), # self.data.epoch, # int(step), # round(self.learning_rate.eval(session=self.sess), 6), # loss, # train_timer.average_time, # load_timer.average_time, # train_timer.remain(step, self.max_iter)) log_str = '{} Epoch: {}, Step: {}, Learning rate: {}, Loss: {:5.3f}\nSpeed: {:.3f}s/iter,Load: {:.3f}s/iter, Remain: {}'.format( datetime.datetime.now().strftime('%m-%d %H:%M:%S'), self.data.epoch, int(step), round(self.learning_rate.eval(session=self.sess), 6), loss, train_timer.average_time, load_timer.average_time, train_timer.remain(step, self.max_iter)) print(log_str) else: train_timer.tic() summary_str, _ = self.sess.run( [self.summary_op, self.train_op], feed_dict=feed_dict) train_timer.toc() self.writer.add_summary(summary_str, step) else: train_timer.tic() self.sess.run(self.train_op, feed_dict=feed_dict) train_timer.toc() if step % self.save_iter == 0: print('{} Saving checkpoint file to: {}'.format( datetime.datetime.now().strftime('%m-%d %H:%M:%S'), self.output_dir)) self.saver.save( self.sess, self.ckpt_file, global_step=self.global_step) def save_cfg(self): with open(os.path.join(self.output_dir, 'config.txt'), 'w') as f: cfg_dict = cfg.__dict__ for key in sorted(cfg_dict.keys()): if key[0].isupper(): cfg_str = '{}: {}\n'.format(key, cfg_dict[key]) f.write(cfg_str) def update_config_paths(data_dir, weights_file): cfg.DATA_PATH = data_dir cfg.PASCAL_PATH = os.path.join(data_dir, 'pascal_voc') cfg.CACHE_PATH = os.path.join(cfg.PASCAL_PATH, 'cache') cfg.OUTPUT_DIR = os.path.join(cfg.PASCAL_PATH, 'output') cfg.WEIGHTS_DIR = os.path.join(cfg.PASCAL_PATH, 'weights') cfg.WEIGHTS_FILE = os.path.join(cfg.WEIGHTS_DIR, weights_file) parser = argparse.ArgumentParser() parser.add_argument('--weights', default="YOLO_small.ckpt", type=str) parser.add_argument('--data_dir', default="data", type=str) parser.add_argument('--threshold', default=0.2, type=float) parser.add_argument('--iou_threshold', default=0.5, type=float) parser.add_argument('--gpu', default='', type=str) cfg.GPU = "0" os.environ['CUDA_VISIBLE_DEVICES'] = cfg.GPU yolo = YOLONet() pascal = pascal_voc('train') solver = Solver(yolo, pascal) print('Start training ...') solver.train() print('Done training.') print("com")
[]
[]
[ "CUDA_VISIBLE_DEVICES" ]
[]
["CUDA_VISIBLE_DEVICES"]
python
1
0
tensorflow_data_validation/utils/profile_util_test.py
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for profile utilities.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl.testing import absltest import apache_beam as beam import numpy as np import six from tensorflow_data_validation.utils import profile_util class ProfileUtilTest(absltest.TestCase): def test_profile_input(self): examples = [ { 'a': np.array([1.0, 2.0], dtype=np.floating), 'b': np.array(['a', 'b', 'c', 'e'], dtype=np.object), }, { 'a': np.array([3.0, 4.0, np.NaN, 5.0], dtype=np.floating), }, { 'b': np.array(['d', 'e', 'f'], dtype=np.object), 'd': np.array([10, 20, 30], dtype=np.integer), }, { 'b': np.array(['a', 'b', 'c'], dtype=np.object), }, { 'c': np.array(['d', 'e', 'f'], dtype=np.object), }, ] expected_distributions = { 'int_feature_values_count': [3L, 3L, 3L, 1L], 'float_feature_values_count': [2L, 4L, 6L, 2L], 'string_feature_values_count': [3L, 4L, 13L, 4L], } p = beam.Pipeline() _ = ( p | 'Create' >> beam.Create(examples) | 'Profile' >> profile_util.Profile()) runner = p.run() runner.wait_until_finish() result_metrics = runner.metrics() num_metrics = len( result_metrics.query(beam.metrics.metric.MetricsFilter().with_namespace( profile_util.METRICS_NAMESPACE))['counters']) self.assertEqual(num_metrics, 1) counter = result_metrics.query(beam.metrics.metric.MetricsFilter() .with_name('num_instances'))['counters'] self.assertEqual(len(counter), 1) self.assertEqual(counter[0].committed, 5L) for distribution_name, expected_value in six.iteritems( expected_distributions): metric_filter = beam.metrics.metric.MetricsFilter().with_name( distribution_name) distribution = result_metrics.query(metric_filter)['distributions'] self.assertEqual(len(distribution), 1) self.assertEqual([ distribution[0].committed.min, distribution[0].committed.max, distribution[0].committed.sum, distribution[0].committed.count ], expected_value) if __name__ == '__main__': absltest.main()
[]
[]
[]
[]
[]
python
null
null
null
src/app.py
from bottle import route, run, static_file import os import math import random import time import json def get_number_of_prime_bits(): """ The number of bits to pull from rng for prime candidates :return: number of bits as int """ return int(os.environ.get('N_BITS', '40')) def get_search_time(): """ The number of seconds to search for primes :return: number of seconds as int """ return int(os.environ.get('SEARCH_TIME', '2')) def get_static_root(): """ Return root directory for static content :return static content root directory as string """ return os.environ.get('STATIC_CONTENT_ROOT', '/opt/app-root/src/static') @route('/') def root_page(): """ Route for root path, just try to return index.html """ return static_file('/index.html', get_static_root()) @route('/static/<path:path>') def server_static(path): """ Any path not defined more specifically is handled as static content """ return static_file(path, get_static_root()) @route('/healthz') def health(): """ Status endpoint for health checks """ return 'OK' @route('/api/v1/primes') def primes(): # algorithm from https://stackoverflow.com/questions/4114167/ def is_prime(n): if n == 2: return True if n % 2 == 0 or n <= 1: return False sqr = int(math.sqrt(n)) + 1 for divisor in range(3, sqr, 2): if n % divisor == 0: return False return True # calculate the end time end_ts = time.time() + get_search_time() # loop until end time (will actually go over, but that's life) prime_list = [] while time.time() < end_ts: candidate = random.getrandbits(get_number_of_prime_bits()) if is_prime(candidate): prime_list.append(candidate) prime_list.sort(reverse=True) # return the prime list as JSON res = json.dumps(prime_list) print(res) return res if __name__ == '__main__': # Here we simply start the server process run(host='0.0.0.0', port=8080, reloader=True)
[]
[]
[ "N_BITS", "STATIC_CONTENT_ROOT", "SEARCH_TIME" ]
[]
["N_BITS", "STATIC_CONTENT_ROOT", "SEARCH_TIME"]
python
3
0
sessionctx/binloginfo/binloginfo_test.go
// Copyright 2016 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. package binloginfo_test import ( "context" "net" "os" "strconv" "sync" "testing" "time" . "github.com/pingcap/check" "github.com/pingcap/errors" "github.com/pingcap/failpoint" "github.com/pingcap/parser/model" "github.com/pingcap/parser/mysql" "github.com/pingcap/parser/terror" pumpcli "github.com/pingcap/tidb-tools/tidb-binlog/pump_client" "github.com/pingcap/tidb/ddl" "github.com/pingcap/tidb/domain" "github.com/pingcap/tidb/kv" "github.com/pingcap/tidb/session" "github.com/pingcap/tidb/sessionctx" "github.com/pingcap/tidb/sessionctx/binloginfo" "github.com/pingcap/tidb/store/mockstore" "github.com/pingcap/tidb/table" "github.com/pingcap/tidb/table/tables" "github.com/pingcap/tidb/types" "github.com/pingcap/tidb/util/codec" "github.com/pingcap/tidb/util/collate" "github.com/pingcap/tidb/util/logutil" "github.com/pingcap/tidb/util/testkit" binlog "github.com/pingcap/tipb/go-binlog" "google.golang.org/grpc" ) func TestT(t *testing.T) { CustomVerboseFlag = true logLevel := os.Getenv("log_level") logutil.InitLogger(logutil.NewLogConfig(logLevel, logutil.DefaultLogFormat, "", logutil.EmptyFileLogConfig, false)) TestingT(t) } type mockBinlogPump struct { mu struct { sync.Mutex payloads [][]byte mockFail bool } } func (p *mockBinlogPump) WriteBinlog(ctx context.Context, req *binlog.WriteBinlogReq) (*binlog.WriteBinlogResp, error) { p.mu.Lock() defer p.mu.Unlock() if p.mu.mockFail { return &binlog.WriteBinlogResp{}, errors.New("mock fail") } p.mu.payloads = append(p.mu.payloads, req.Payload) return &binlog.WriteBinlogResp{}, nil } // PullBinlogs implements PumpServer interface. func (p *mockBinlogPump) PullBinlogs(req *binlog.PullBinlogReq, srv binlog.Pump_PullBinlogsServer) error { return nil } var _ = Suite(&testBinlogSuite{}) type testBinlogSuite struct { store kv.Storage domain *domain.Domain unixFile string serv *grpc.Server pump *mockBinlogPump client *pumpcli.PumpsClient ddl ddl.DDL } const maxRecvMsgSize = 64 * 1024 func (s *testBinlogSuite) SetUpSuite(c *C) { store, err := mockstore.NewMockTikvStore() c.Assert(err, IsNil) s.store = store session.SetSchemaLease(0) s.unixFile = "/tmp/mock-binlog-pump" + strconv.FormatInt(time.Now().UnixNano(), 10) l, err := net.Listen("unix", s.unixFile) c.Assert(err, IsNil) s.serv = grpc.NewServer(grpc.MaxRecvMsgSize(maxRecvMsgSize)) s.pump = new(mockBinlogPump) binlog.RegisterPumpServer(s.serv, s.pump) go s.serv.Serve(l) opt := grpc.WithDialer(func(addr string, timeout time.Duration) (net.Conn, error) { return net.DialTimeout("unix", addr, timeout) }) clientCon, err := grpc.Dial(s.unixFile, opt, grpc.WithInsecure()) c.Assert(err, IsNil) c.Assert(clientCon, NotNil) tk := testkit.NewTestKit(c, s.store) s.domain, err = session.BootstrapSession(store) c.Assert(err, IsNil) tk.MustExec("use test") sessionDomain := domain.GetDomain(tk.Se.(sessionctx.Context)) s.ddl = sessionDomain.DDL() s.client = binloginfo.MockPumpsClient(binlog.NewPumpClient(clientCon)) s.ddl.SetBinlogClient(s.client) } func (s *testBinlogSuite) TearDownSuite(c *C) { s.ddl.Stop() s.serv.Stop() os.Remove(s.unixFile) s.domain.Close() s.store.Close() } func (s *testBinlogSuite) TestBinlog(c *C) { tk := testkit.NewTestKit(c, s.store) tk.MustExec("use test") tk.Se.GetSessionVars().BinlogClient = s.client pump := s.pump tk.MustExec("drop table if exists local_binlog") ddlQuery := "create table local_binlog (id int unique key, name varchar(10)) shard_row_id_bits=1" binlogDDLQuery := "create table local_binlog (id int unique key, name varchar(10)) /*!90000 shard_row_id_bits=1 */" tk.MustExec(ddlQuery) var matched bool // got matched pre DDL and commit DDL for i := 0; i < 10; i++ { preDDL, commitDDL, _ := getLatestDDLBinlog(c, pump, binlogDDLQuery) if preDDL != nil && commitDDL != nil { if preDDL.DdlJobId == commitDDL.DdlJobId { c.Assert(commitDDL.StartTs, Equals, preDDL.StartTs) c.Assert(commitDDL.CommitTs, Greater, commitDDL.StartTs) matched = true break } } time.Sleep(time.Millisecond * 10) } c.Assert(matched, IsTrue) tk.MustExec("insert local_binlog values (1, 'abc'), (2, 'cde')") prewriteVal := getLatestBinlogPrewriteValue(c, pump) c.Assert(prewriteVal.SchemaVersion, Greater, int64(0)) c.Assert(prewriteVal.Mutations[0].TableId, Greater, int64(0)) expected := [][]types.Datum{ {types.NewIntDatum(1), types.NewCollationStringDatum("abc", mysql.DefaultCollationName, collate.DefaultLen)}, {types.NewIntDatum(2), types.NewCollationStringDatum("cde", mysql.DefaultCollationName, collate.DefaultLen)}, } gotRows := mutationRowsToRows(c, prewriteVal.Mutations[0].InsertedRows, 2, 4) c.Assert(gotRows, DeepEquals, expected) tk.MustExec("update local_binlog set name = 'xyz' where id = 2") prewriteVal = getLatestBinlogPrewriteValue(c, pump) oldRow := [][]types.Datum{ {types.NewIntDatum(2), types.NewCollationStringDatum("cde", mysql.DefaultCollationName, collate.DefaultLen)}, } newRow := [][]types.Datum{ {types.NewIntDatum(2), types.NewCollationStringDatum("xyz", mysql.DefaultCollationName, collate.DefaultLen)}, } gotRows = mutationRowsToRows(c, prewriteVal.Mutations[0].UpdatedRows, 1, 3) c.Assert(gotRows, DeepEquals, oldRow) gotRows = mutationRowsToRows(c, prewriteVal.Mutations[0].UpdatedRows, 7, 9) c.Assert(gotRows, DeepEquals, newRow) tk.MustExec("delete from local_binlog where id = 1") prewriteVal = getLatestBinlogPrewriteValue(c, pump) gotRows = mutationRowsToRows(c, prewriteVal.Mutations[0].DeletedRows, 1, 3) expected = [][]types.Datum{ {types.NewIntDatum(1), types.NewCollationStringDatum("abc", mysql.DefaultCollationName, collate.DefaultLen)}, } c.Assert(gotRows, DeepEquals, expected) // Test table primary key is not integer. tk.MustExec("create table local_binlog2 (name varchar(64) primary key, age int)") tk.MustExec("insert local_binlog2 values ('abc', 16), ('def', 18)") tk.MustExec("delete from local_binlog2 where name = 'def'") prewriteVal = getLatestBinlogPrewriteValue(c, pump) c.Assert(prewriteVal.Mutations[0].Sequence[0], Equals, binlog.MutationType_DeleteRow) expected = [][]types.Datum{ {types.NewStringDatum("def"), types.NewIntDatum(18), types.NewIntDatum(-1), types.NewIntDatum(2)}, } gotRows = mutationRowsToRows(c, prewriteVal.Mutations[0].DeletedRows, 1, 3, 4, 5) c.Assert(gotRows, DeepEquals, expected) // Test Table don't have primary key. tk.MustExec("create table local_binlog3 (c1 int, c2 int)") tk.MustExec("insert local_binlog3 values (1, 2), (1, 3), (2, 3)") tk.MustExec("update local_binlog3 set c1 = 3 where c1 = 2") prewriteVal = getLatestBinlogPrewriteValue(c, pump) // The encoded update row is [oldColID1, oldColVal1, oldColID2, oldColVal2, -1, handle, // newColID1, newColVal2, newColID2, newColVal2, -1, handle] gotRows = mutationRowsToRows(c, prewriteVal.Mutations[0].UpdatedRows, 7, 9) expected = [][]types.Datum{ {types.NewIntDatum(3), types.NewIntDatum(3)}, } c.Assert(gotRows, DeepEquals, expected) expected = [][]types.Datum{ {types.NewIntDatum(-1), types.NewIntDatum(3), types.NewIntDatum(-1), types.NewIntDatum(3)}, } gotRows = mutationRowsToRows(c, prewriteVal.Mutations[0].UpdatedRows, 4, 5, 10, 11) c.Assert(gotRows, DeepEquals, expected) tk.MustExec("delete from local_binlog3 where c1 = 3 and c2 = 3") prewriteVal = getLatestBinlogPrewriteValue(c, pump) c.Assert(prewriteVal.Mutations[0].Sequence[0], Equals, binlog.MutationType_DeleteRow) gotRows = mutationRowsToRows(c, prewriteVal.Mutations[0].DeletedRows, 1, 3, 4, 5) expected = [][]types.Datum{ {types.NewIntDatum(3), types.NewIntDatum(3), types.NewIntDatum(-1), types.NewIntDatum(3)}, } c.Assert(gotRows, DeepEquals, expected) // Test Mutation Sequence. tk.MustExec("create table local_binlog4 (c1 int primary key, c2 int)") tk.MustExec("insert local_binlog4 values (1, 1), (2, 2), (3, 2)") tk.MustExec("begin") tk.MustExec("delete from local_binlog4 where c1 = 1") tk.MustExec("insert local_binlog4 values (1, 1)") tk.MustExec("update local_binlog4 set c2 = 3 where c1 = 3") tk.MustExec("commit") prewriteVal = getLatestBinlogPrewriteValue(c, pump) c.Assert(prewriteVal.Mutations[0].Sequence, DeepEquals, []binlog.MutationType{ binlog.MutationType_DeleteRow, binlog.MutationType_Insert, binlog.MutationType_Update, }) // Test statement rollback. tk.MustExec("create table local_binlog5 (c1 int primary key)") tk.MustExec("begin") tk.MustExec("insert into local_binlog5 value (1)") // This statement execute fail and should not write binlog. _, err := tk.Exec("insert into local_binlog5 value (4),(3),(1),(2)") c.Assert(err, NotNil) tk.MustExec("commit") prewriteVal = getLatestBinlogPrewriteValue(c, pump) c.Assert(prewriteVal.Mutations[0].Sequence, DeepEquals, []binlog.MutationType{ binlog.MutationType_Insert, }) checkBinlogCount(c, pump) pump.mu.Lock() originBinlogLen := len(pump.mu.payloads) pump.mu.Unlock() tk.MustExec("set @@global.autocommit = 0") tk.MustExec("set @@global.autocommit = 1") pump.mu.Lock() newBinlogLen := len(pump.mu.payloads) pump.mu.Unlock() c.Assert(newBinlogLen, Equals, originBinlogLen) } func (s *testBinlogSuite) TestMaxRecvSize(c *C) { info := &binloginfo.BinlogInfo{ Data: &binlog.Binlog{ Tp: binlog.BinlogType_Prewrite, PrewriteValue: make([]byte, maxRecvMsgSize+1), }, Client: s.client, } binlogWR := info.WriteBinlog(1) err := binlogWR.GetError() c.Assert(err, NotNil) c.Assert(terror.ErrCritical.Equal(err), IsFalse, Commentf("%v", err)) } func getLatestBinlogPrewriteValue(c *C, pump *mockBinlogPump) *binlog.PrewriteValue { var bin *binlog.Binlog pump.mu.Lock() for i := len(pump.mu.payloads) - 1; i >= 0; i-- { payload := pump.mu.payloads[i] bin = new(binlog.Binlog) bin.Unmarshal(payload) if bin.Tp == binlog.BinlogType_Prewrite { break } } pump.mu.Unlock() c.Assert(bin, NotNil) preVal := new(binlog.PrewriteValue) preVal.Unmarshal(bin.PrewriteValue) return preVal } func getLatestDDLBinlog(c *C, pump *mockBinlogPump, ddlQuery string) (preDDL, commitDDL *binlog.Binlog, offset int) { pump.mu.Lock() for i := len(pump.mu.payloads) - 1; i >= 0; i-- { payload := pump.mu.payloads[i] bin := new(binlog.Binlog) bin.Unmarshal(payload) if bin.Tp == binlog.BinlogType_Commit && bin.DdlJobId > 0 { commitDDL = bin } if bin.Tp == binlog.BinlogType_Prewrite && bin.DdlJobId != 0 { preDDL = bin } if preDDL != nil && commitDDL != nil { offset = i break } } pump.mu.Unlock() c.Assert(preDDL.DdlJobId, Greater, int64(0)) c.Assert(preDDL.StartTs, Greater, int64(0)) c.Assert(preDDL.CommitTs, Equals, int64(0)) c.Assert(string(preDDL.DdlQuery), Equals, ddlQuery) return } func checkBinlogCount(c *C, pump *mockBinlogPump) { var bin *binlog.Binlog prewriteCount := 0 ddlCount := 0 pump.mu.Lock() length := len(pump.mu.payloads) for i := length - 1; i >= 0; i-- { payload := pump.mu.payloads[i] bin = new(binlog.Binlog) bin.Unmarshal(payload) if bin.Tp == binlog.BinlogType_Prewrite { if bin.DdlJobId != 0 { ddlCount++ } else { prewriteCount++ } } } pump.mu.Unlock() c.Assert(ddlCount, Greater, 0) match := false for i := 0; i < 10; i++ { pump.mu.Lock() length = len(pump.mu.payloads) pump.mu.Unlock() if (prewriteCount+ddlCount)*2 == length { match = true break } time.Sleep(time.Millisecond * 10) } c.Assert(match, IsTrue) } func mutationRowsToRows(c *C, mutationRows [][]byte, columnValueOffsets ...int) [][]types.Datum { var rows = make([][]types.Datum, 0) for _, mutationRow := range mutationRows { datums, err := codec.Decode(mutationRow, 5) c.Assert(err, IsNil) for i := range datums { if datums[i].Kind() == types.KindBytes { datums[i].SetBytesAsString(datums[i].GetBytes(), mysql.DefaultCollationName, collate.DefaultLen) } } row := make([]types.Datum, 0, len(columnValueOffsets)) for _, colOff := range columnValueOffsets { row = append(row, datums[colOff]) } rows = append(rows, row) } return rows } func (s *testBinlogSuite) TestBinlogForSequence(c *C) { c.Assert(failpoint.Enable("github.com/pingcap/tidb/store/tikv/mockSyncBinlogCommit", `return(true)`), IsNil) defer func() { c.Assert(failpoint.Disable("github.com/pingcap/tidb/store/tikv/mockSyncBinlogCommit"), IsNil) }() tk := testkit.NewTestKit(c, s.store) tk.MustExec("use test") s.pump.mu.Lock() s.pump.mu.payloads = s.pump.mu.payloads[:0] s.pump.mu.Unlock() tk.Se.GetSessionVars().BinlogClient = s.client tk.MustExec("drop sequence if exists seq") // the default start = 1, increment = 1. tk.MustExec("create sequence seq cache 3") // trigger the sequence cache allocation. tk.MustQuery("select nextval(seq)").Check(testkit.Rows("1")) sequenceTable := testGetTableByName(c, tk.Se, "test", "seq") tc, ok := sequenceTable.(*tables.TableCommon) c.Assert(ok, Equals, true) _, end, round := tc.GetSequenceCommon().GetSequenceBaseEndRound() c.Assert(end, Equals, int64(3)) c.Assert(round, Equals, int64(0)) // Check the sequence binlog. // Got matched pre DDL and commit DDL. ok = mustGetDDLBinlog(s, "select setval(`test`.`seq`, 3)", c) c.Assert(ok, IsTrue) // Invalidate the current sequence cache. tk.MustQuery("select setval(seq, 5)").Check(testkit.Rows("5")) // trigger the next sequence cache allocation. tk.MustQuery("select nextval(seq)").Check(testkit.Rows("6")) _, end, round = tc.GetSequenceCommon().GetSequenceBaseEndRound() c.Assert(end, Equals, int64(8)) c.Assert(round, Equals, int64(0)) ok = mustGetDDLBinlog(s, "select setval(`test`.`seq`, 8)", c) c.Assert(ok, IsTrue) tk.MustExec("create database test2") tk.MustExec("use test2") tk.MustExec("drop sequence if exists seq2") tk.MustExec("create sequence seq2 start 1 increment -2 cache 3 minvalue -10 maxvalue 10 cycle") // trigger the sequence cache allocation. tk.MustQuery("select nextval(seq2)").Check(testkit.Rows("1")) sequenceTable = testGetTableByName(c, tk.Se, "test2", "seq2") tc, ok = sequenceTable.(*tables.TableCommon) c.Assert(ok, Equals, true) _, end, round = tc.GetSequenceCommon().GetSequenceBaseEndRound() c.Assert(end, Equals, int64(-3)) c.Assert(round, Equals, int64(0)) ok = mustGetDDLBinlog(s, "select setval(`test2`.`seq2`, -3)", c) c.Assert(ok, IsTrue) tk.MustQuery("select setval(seq2, -100)").Check(testkit.Rows("-100")) // trigger the sequence cache allocation. tk.MustQuery("select nextval(seq2)").Check(testkit.Rows("10")) _, end, round = tc.GetSequenceCommon().GetSequenceBaseEndRound() c.Assert(end, Equals, int64(6)) c.Assert(round, Equals, int64(1)) ok = mustGetDDLBinlog(s, "select setval(`test2`.`seq2`, 6)", c) c.Assert(ok, IsTrue) // Test dml txn is independent from sequence txn. tk.MustExec("drop sequence if exists seq") tk.MustExec("create sequence seq cache 3") tk.MustExec("drop table if exists t") tk.MustExec("create table t (a int default next value for seq)") // sequence txn commit first then the dml txn. tk.MustExec("insert into t values(-1),(default),(-1),(default)") // binlog list like [... ddl prewrite(offset), ddl commit, dml prewrite, dml commit] _, _, offset := getLatestDDLBinlog(c, s.pump, "select setval(`test2`.`seq`, 3)") s.pump.mu.Lock() c.Assert(offset+3, Equals, len(s.pump.mu.payloads)-1) s.pump.mu.Unlock() } // Sometimes this test doesn't clean up fail, let the function name begin with 'Z' // so it runs last and would not disrupt other tests. func (s *testBinlogSuite) TestZIgnoreError(c *C) { tk := testkit.NewTestKit(c, s.store) tk.MustExec("use test") tk.Se.GetSessionVars().BinlogClient = s.client tk.MustExec("drop table if exists t") tk.MustExec("create table t (id int)") binloginfo.SetIgnoreError(true) s.pump.mu.Lock() s.pump.mu.mockFail = true s.pump.mu.Unlock() tk.MustExec("insert into t values (1)") tk.MustExec("insert into t values (1)") // Clean up. s.pump.mu.Lock() s.pump.mu.mockFail = false s.pump.mu.Unlock() binloginfo.DisableSkipBinlogFlag() binloginfo.SetIgnoreError(false) } func (s *testBinlogSuite) TestPartitionedTable(c *C) { // This test checks partitioned table write binlog with table ID, rather than partition ID. tk := testkit.NewTestKit(c, s.store) tk.MustExec("use test") tk.Se.GetSessionVars().BinlogClient = s.client tk.MustExec("drop table if exists t") tk.MustExec(`create table t (id int) partition by range (id) ( partition p0 values less than (1), partition p1 values less than (4), partition p2 values less than (7), partition p3 values less than (10))`) tids := make([]int64, 0, 10) for i := 0; i < 10; i++ { tk.MustExec("insert into t values (?)", i) prewriteVal := getLatestBinlogPrewriteValue(c, s.pump) tids = append(tids, prewriteVal.Mutations[0].TableId) } c.Assert(len(tids), Equals, 10) for i := 1; i < 10; i++ { c.Assert(tids[i], Equals, tids[0]) } } func (s *testBinlogSuite) TestDeleteSchema(c *C) { tk := testkit.NewTestKit(c, s.store) tk.MustExec("use test") tk.MustExec("CREATE TABLE `b1` (`id` int(11) NOT NULL AUTO_INCREMENT, `job_id` varchar(50) NOT NULL, `split_job_id` varchar(30) DEFAULT NULL, PRIMARY KEY (`id`), KEY `b1` (`job_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;") tk.MustExec("CREATE TABLE `b2` (`id` int(11) NOT NULL AUTO_INCREMENT, `job_id` varchar(50) NOT NULL, `batch_class` varchar(20) DEFAULT NULL, PRIMARY KEY (`id`), UNIQUE KEY `bu` (`job_id`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4") tk.MustExec("insert into b2 (job_id, batch_class) values (2, 'TEST');") tk.MustExec("insert into b1 (job_id) values (2);") // This test cover a bug that the final schema and the binlog row inconsistent. // The final schema of this SQL should be the schema of table b1, rather than the schema of join result. tk.MustExec("delete from b1 where job_id in (select job_id from b2 where batch_class = 'TEST') or split_job_id in (select job_id from b2 where batch_class = 'TEST');") tk.MustExec("delete b1 from b2 right join b1 on b1.job_id = b2.job_id and batch_class = 'TEST';") } func (s *testBinlogSuite) TestAddSpecialComment(c *C) { testCase := []struct { input string result string }{ { "create table t1 (id int ) shard_row_id_bits=2;", "create table t1 (id int ) /*!90000 shard_row_id_bits=2 */ ;", }, { "create table t1 (id int ) shard_row_id_bits=2 pre_split_regions=2;", "create table t1 (id int ) /*!90000 shard_row_id_bits=2 pre_split_regions=2 */ ;", }, { "create table t1 (id int ) shard_row_id_bits=2 pre_split_regions=2;", "create table t1 (id int ) /*!90000 shard_row_id_bits=2 pre_split_regions=2 */ ;", }, { "create table t1 (id int ) shard_row_id_bits=2 engine=innodb pre_split_regions=2;", "create table t1 (id int ) /*!90000 shard_row_id_bits=2 pre_split_regions=2 */ engine=innodb ;", }, { "create table t1 (id int ) pre_split_regions=2 shard_row_id_bits=2;", "create table t1 (id int ) /*!90000 shard_row_id_bits=2 pre_split_regions=2 */ ;", }, { "create table t6 (id int ) shard_row_id_bits=2 shard_row_id_bits=3 pre_split_regions=2;", "create table t6 (id int ) /*!90000 shard_row_id_bits=2 shard_row_id_bits=3 pre_split_regions=2 */ ;", }, { "create table t1 (id int primary key auto_random(2));", "create table t1 (id int primary key /*T![auto_rand] auto_random(2) */ );", }, { "create table t1 (id int primary key auto_random);", "create table t1 (id int primary key /*T![auto_rand] auto_random */ );", }, { "create table t1 (id int auto_random ( 4 ) primary key);", "create table t1 (id int /*T![auto_rand] auto_random ( 4 ) */ primary key);", }, { "create table t1 (id int auto_random ( 4 ) primary key);", "create table t1 (id int /*T![auto_rand] auto_random ( 4 ) */ primary key);", }, { "create table t1 (id int auto_random ( 3 ) primary key) auto_random_base = 100;", "create table t1 (id int /*T![auto_rand] auto_random ( 3 ) */ primary key) /*T![auto_rand_base] auto_random_base = 100 */ ;", }, { "create table t1 (id int auto_random primary key) auto_random_base = 50;", "create table t1 (id int /*T![auto_rand] auto_random */ primary key) /*T![auto_rand_base] auto_random_base = 50 */ ;", }, { "create table t1 (id int auto_increment key) auto_id_cache 100;", "create table t1 (id int auto_increment key) /*T![auto_id_cache] auto_id_cache 100 */ ;", }, { "create table t1 (id int auto_increment unique) auto_id_cache 10;", "create table t1 (id int auto_increment unique) /*T![auto_id_cache] auto_id_cache 10 */ ;", }, { "create table t1 (id int) auto_id_cache = 5;", "create table t1 (id int) /*T![auto_id_cache] auto_id_cache = 5 */ ;", }, { "create table t1 (id int) auto_id_cache=5;", "create table t1 (id int) /*T![auto_id_cache] auto_id_cache=5 */ ;", }, { "create table t1 (id int) /*T![auto_id_cache] auto_id_cache=5 */ ;", "create table t1 (id int) /*T![auto_id_cache] auto_id_cache=5 */ ;", }, } for _, ca := range testCase { re := binloginfo.AddSpecialComment(ca.input) c.Assert(re, Equals, ca.result) } } func mustGetDDLBinlog(s *testBinlogSuite, ddlQuery string, c *C) (matched bool) { for i := 0; i < 10; i++ { preDDL, commitDDL, _ := getLatestDDLBinlog(c, s.pump, ddlQuery) if preDDL != nil && commitDDL != nil { if preDDL.DdlJobId == commitDDL.DdlJobId { c.Assert(commitDDL.StartTs, Equals, preDDL.StartTs) c.Assert(commitDDL.CommitTs, Greater, commitDDL.StartTs) matched = true break } } time.Sleep(time.Millisecond * 30) } return } func testGetTableByName(c *C, ctx sessionctx.Context, db, table string) table.Table { dom := domain.GetDomain(ctx) // Make sure the table schema is the new schema. err := dom.Reload() c.Assert(err, IsNil) tbl, err := dom.InfoSchema().TableByName(model.NewCIStr(db), model.NewCIStr(table)) c.Assert(err, IsNil) return tbl }
[ "\"log_level\"" ]
[]
[ "log_level" ]
[]
["log_level"]
go
1
0
dss/util/auth/auth0.py
import requests import os import logging from dss import Config from dss import dynamodb as db from dss.error import DSSForbiddenException, DSSException from .authorize import Authorize, always_allow_admins logger = logging.getLogger(__name__) class FlacMixin(Authorize): """ Mixin class for Auth0 Authorize class to use fine-level access control (FLAC) table to check if a user is allowed to access a given UUID. """ flac_lookup_table_name = f"dss-auth-lookup-{os.environ['DSS_DEPLOYMENT_STAGE']}" def _assert_authorized_flac(self, **kwargs): """ kwargs contains information from both the original API function call and from the security decorator. Use both to look up this UUID in the FLAC table. """ # uuid = kwargs['uuid'] # method = kwargs['method'] # email = self.token_email # group = self.token_group # Do FLAC lookup here self.assert_required_parameters(kwargs, ["uuid", "method"]) uuid = kwargs.get('uuid') try: flac_attributes = db.get_item(table=self.flac_lookup_table_name, hash_key=uuid) except db.DynamoDBItemNotFound as ex: msg = f'uuid: {uuid} was not found in the flac table' logger.info(msg, ex) return else: try: self.assert_auth0authz_groups_intersects(flac_attributes['groups']) except DSSForbiddenException: # Re-raise the exception with better context msg = f'User: {self.token} does not have sufficient privileges for object: {flac_attributes}' raise DSSForbiddenException(msg) else: return # TODO what about users? should the class be able to handle users and/or groups? class Auth0AuthZGroupsMixin(Authorize): """ Mixin class for Auth0 Authorize class to access groups information added to the JWT by the Auth0 AuthZ extension. These are the groups used to determine FLAC access. (Note: the Auth0 AuthZ extension adds groups, roles, and permissions, but here we just use groups.) """ @classmethod def get_auth0authz_claim(self): oidc_audience = Config.get_audience()[0] return f"{oidc_audience}auth0" @property def auth0authz_groups(self): """Property for the groups added to the JWT by the Auth0 AuthZ plugin""" # First get the portion of the token added by the Auth0 AuthZ extension auth0authz_claim = self.get_auth0authz_claim() self._assert_required_token_parameters([auth0authz_claim]) auth0authz_token = self.token[auth0authz_claim] # Second extract the groups from this portion auth0authz_groups_claim = "groups" self.assert_required_parameters(auth0authz_token, [auth0authz_groups_claim]) groups = self.token[auth0authz_claim][auth0authz_groups_claim] return groups def assert_auth0authz_groups_intersects(self, groups): """ Assert that the intersection of Auth0 AuthZ groups and user-provided groups has cardinality greater than zero (intersection has at least 1 member). """ cardinality = len(set(self.auth0authz_groups).intersection(set(groups))) if cardinality > 0: return else: raise DSSForbiddenException() class Auth0(FlacMixin, Auth0AuthZGroupsMixin): """ Implements the Auth0 security flow, which implements different authorization checks based on whether operations are create/read/update/delete operations. Decorator examples: @security.assert_security(method='create', groups=['dbio', 'grp']) @security.assert_security(method='read') @security.assert_security(method='update', groups=['dbio', 'grp']) @security.assert_security(method='delete') """ def __init__(self): self.session = requests.Session() self.valid_methods = {'group': self._group, 'create': self._create, 'read': self._read, 'update': self._update, 'delete': self._delete} def security_flow(self, **kwargs): """ Dispatch pattern: the assert_security decorator will specify the type of operation (CRUD), which is passed through to the kwargs of this method, and used to call the correct method. """ # TODO add some type of jwt inspection self.assert_required_parameters(kwargs, ['method']) method = kwargs['method'] # Ensure method is valid if method is None or method not in self.valid_methods.keys(): err = f'Unable to locate auth_method {method} for request, valid methods are: ' err += f'{", ".join(self.valid_methods)}' raise DSSException(500, err) # Further kwarg processing should happen from # inside the method that needs the info. # Dispatch to correct method executed_method = self.valid_methods[method] executed_method(**kwargs) @always_allow_admins def _group(self, **kwargs): """Auth checks for 'group' API actions""" # This just checks that the JWT group is in the # list of allowed groups specified in the decorator self.assert_required_parameters(kwargs, ['groups']) self._assert_authorized_group(kwargs['groups']) return @always_allow_admins def _create(self, **kwargs): """Auth checks for 'create' API actions""" # Only check that the token group is in the security decorator's list of allowed groups self.assert_required_parameters(kwargs, ['groups']) self._assert_authorized_group(kwargs['groups']) return @always_allow_admins def _read(self, **kwargs): """Auth checks for 'read' API actions""" # Data is public if there is no FLAC table entry. self._assert_authorized_flac(**kwargs) return @always_allow_admins def _update(self, **kwargs): """Auth checks for 'update' API actions""" # Update requires read and create access # Assert user has read access read_kwargs = kwargs.copy() read_kwargs['method'] = 'read' self._read(**read_kwargs) # Assert user has create access create_kwargs = kwargs.copy() create_kwargs['method'] = 'create' self.assert_required_parameters(create_kwargs, ['groups']) self._create(**create_kwargs) return @always_allow_admins def _delete(self, **kwargs): """Auth checks for 'delete' API actions""" err = f"Delete action is only allowed for admin users, user: {self.token_email} is not permitted" raise DSSForbiddenException(err)
[]
[]
[ "DSS_DEPLOYMENT_STAGE" ]
[]
["DSS_DEPLOYMENT_STAGE"]
python
1
0
humans/wsgi.py
""" WSGI config for humans project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MODULE", "humans.settings") application = get_wsgi_application()
[]
[]
[]
[]
[]
python
0
0
go/QnAMaker/rest/publish-kb.go
package main // <dependencies> import ( "bytes" "fmt" "log" "net/http" "os" ) // </dependencies> // <main> func main() { /* * Configure the local environment: * Set the QNA_MAKER_SUBSCRIPTION_KEY, QNA_MAKER_ENDPOINT, and QNA_MAKER_KB_ID * environment variables on your local machine using * the appropriate method for your preferred shell (Bash, PowerShell, Command * Prompt, etc.). * * If the environment variable is created after the application is launched in a * console or with Visual Studio, the shell (or Visual Studio) needs to be closed * and reloaded to take the environment variable into account. */ if "" == os.Getenv("QNA_MAKER_ENDPOINT") { log.Fatal("Please set/export the environment variable QNA_MAKER_ENDPOINT.") } var endpoint string = os.Getenv("QNA_MAKER_ENDPOINT") if "" == os.Getenv("QNA_MAKER_SUBSCRIPTION_KEY") { log.Fatal("Please set/export the environment variable QNA_MAKER_SUBSCRIPTION_KEY.") } var subscription_key string = os.Getenv("QNA_MAKER_SUBSCRIPTION_KEY") if "" == os.Getenv("QNA_MAKER_KB_ID") { log.Fatal("Please set/export the environment variable QNA_MAKER_KB_ID.") } var kb_id string = os.Getenv("QNA_MAKER_KB_ID") var service string = "/qnamaker/v4.0" var method string = "/knowledgebases/" var uri = endpoint + service + method + kb_id var content = bytes.NewBuffer([]byte(nil)); req, _ := http.NewRequest("POST", uri, content) req.Header.Add("Ocp-Apim-Subscription-Key", subscription_key) client := &http.Client{} response, err := client.Do(req) if err != nil { panic(err) } // print 204 - success code fmt.Println(response.StatusCode) } // </main>
[ "\"QNA_MAKER_ENDPOINT\"", "\"QNA_MAKER_ENDPOINT\"", "\"QNA_MAKER_SUBSCRIPTION_KEY\"", "\"QNA_MAKER_SUBSCRIPTION_KEY\"", "\"QNA_MAKER_KB_ID\"", "\"QNA_MAKER_KB_ID\"" ]
[]
[ "QNA_MAKER_ENDPOINT", "QNA_MAKER_KB_ID", "QNA_MAKER_SUBSCRIPTION_KEY" ]
[]
["QNA_MAKER_ENDPOINT", "QNA_MAKER_KB_ID", "QNA_MAKER_SUBSCRIPTION_KEY"]
go
3
0
pkg/cloudprovider/providers/matchbox/matchbox.go
/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package matchbox import ( "context" "encoding/json" "fmt" "io" "os" "strings" archoncloudprovider "kubeup.com/archon/pkg/cloudprovider" "kubeup.com/archon/pkg/cluster" "kubeup.com/archon/pkg/userdata" "kubeup.com/archon/pkg/util" "github.com/coreos/matchbox/matchbox/client" pb "github.com/coreos/matchbox/matchbox/server/serverpb" "github.com/coreos/matchbox/matchbox/storage/storagepb" "github.com/coreos/matchbox/matchbox/tlsutil" "github.com/golang/glog" "k8s.io/kubernetes/pkg/cloudprovider/providers/fake" ) var ( ErrorNotFound = fmt.Errorf("Instance is not found") MatchboxAnnotationPrefix = "matchbox." + cluster.AnnotationPrefix ) const ProviderName = "matchbox" type MatchboxInstanceOptions struct { Mac string `k8s:"mac"` Selector string `k8s:"selector"` UseProfile string `k8s:"use-profile"` UseIgnition string `k8s:"use-ignition"` ExtraBootArgs string `k8s:"extra-boot-args"` } type FakeInstance struct { name string spec cluster.InstanceSpec status *cluster.InstanceStatus } // MatchboxCloud is a test-double implementation of Interface, LoadBalancer, Instances, and Routes. It is useful for testing. type MatchboxCloud struct { fake.FakeCloud FakeInstances map[string]FakeInstance client *client.Client endpoint string } var ( _ archoncloudprovider.Interface = &MatchboxCloud{} ) func newMatchboxCloud(config io.Reader) (archoncloudprovider.Interface, error) { httpEndpoint := os.Getenv("MATCHBOX_HTTP_ENDPOINT") rpcEndpoint := os.Getenv("MATCHBOX_RPC_ENDPOINT") caFile := os.Getenv("MATCHBOX_CA_FILE") certFile := os.Getenv("MATCHBOX_CERT_FILE") keyFile := os.Getenv("MATCHBOX_KEY_FILE") if httpEndpoint == "" { httpEndpoint = "matchbox.foo:8080" } if rpcEndpoint == "" { rpcEndpoint = "127.0.0.1:8081" } if caFile == "" { caFile = "/etc/matchbox/ca.crt" } if certFile == "" { certFile = "/etc/matchbox/client.crt" } if keyFile == "" { keyFile = "/etc/matchbox/client.key" } tlsinfo := &tlsutil.TLSInfo{ CAFile: caFile, CertFile: certFile, KeyFile: keyFile, } tlscfg, err := tlsinfo.ClientConfig() if err != nil { return nil, err } cfg := &client.Config{ Endpoints: []string{rpcEndpoint}, TLS: tlscfg, } // gRPC client client, err := client.New(cfg) if err != nil { return nil, err } return &MatchboxCloud{ client: client, endpoint: httpEndpoint, }, nil } func (f *MatchboxCloud) addCall(desc string) { f.FakeCloud.Calls = append(f.FakeCloud.Calls, desc) } func (f *MatchboxCloud) Archon() (archoncloudprovider.ArchonInterface, bool) { return f, true } func (f *MatchboxCloud) PrivateIP() (archoncloudprovider.PrivateIPInterface, bool) { return nil, false } func (f *MatchboxCloud) PublicIP() (archoncloudprovider.PublicIPInterface, bool) { return nil, false } func (f *MatchboxCloud) AddNetworkAnnotation(clustername string, instance *cluster.Instance, network *cluster.Network) error { return nil } func (f *MatchboxCloud) EnsureNetwork(clusterName string, network *cluster.Network) (status *cluster.NetworkStatus, err error) { return &cluster.NetworkStatus{Phase: cluster.NetworkRunning}, nil } func (f *MatchboxCloud) EnsureNetworkDeleted(clusterName string, network *cluster.Network) (err error) { return nil } // GetLoadBalancer is a stub implementation of LoadBalancer.GetLoadBalancer. func (f *MatchboxCloud) GetInstance(clusterName string, instance *cluster.Instance) (*cluster.InstanceStatus, error) { if f.FakeInstances == nil { return nil, ErrorNotFound } if i, ok := f.FakeInstances[instance.Name]; !ok { return nil, ErrorNotFound } else { return i.status, f.Err } return nil, nil } // EnsureLoadBalancer is a test-spy implementation of LoadBalancer.EnsureLoadBalancer. // It adds an entry "create" into the internal method call record. func (f *MatchboxCloud) EnsureInstance(clusterName string, instance *cluster.Instance) (*cluster.InstanceStatus, error) { f.addCall("create") if f.FakeInstances == nil { f.FakeInstances = make(map[string]FakeInstance) } options := MatchboxInstanceOptions{} err := util.MapToStruct(instance.Annotations, &options, MatchboxAnnotationPrefix) if err != nil { return nil, fmt.Errorf("Can't get instance options: %s", err.Error()) } if i := instance.Dependency.ReservedInstance; i.Name != "" { if i.Spec.InstanceID != "" { instance.Status.InstanceID = i.Spec.InstanceID } for _, c := range i.Spec.Configs { if c.Name == "spec" { if options.Mac == "" { options.Mac = c.Data["mac"] } if instance.Status.PrivateIP == "" { instance.Status.PrivateIP = c.Data["private-ip"] } } } } err = f.ensureGroup(clusterName, instance, &options) if err != nil { return nil, err } err = f.ensureProfile(clusterName, instance, &options) if err != nil { return nil, err } err = f.ensureIgnition(clusterName, instance, &options) if err != nil { return nil, err } instance.Status.Phase = cluster.InstanceRunning name := instance.Name spec := instance.Spec status := instance.Status f.FakeInstances[name] = FakeInstance{name, spec, &status} return &status, f.Err } func (f *MatchboxCloud) ensureGroup(clusterName string, instance *cluster.Instance, options *MatchboxInstanceOptions) error { if options.Mac == "" { return fmt.Errorf("No mac address on instance") } groupID := instance.Name if instance.Status.InstanceID != "" { groupID = instance.Status.InstanceID } selector := map[string]string{} if options.Selector != "" { err := json.Unmarshal([]byte(options.Selector), &selector) if err != nil { return fmt.Errorf("Unmarshal selector error: %+v", err) } } profileID := groupID if options.UseProfile != "" { profileID = options.UseProfile } group := &storagepb.Group{ Id: groupID, Name: instance.Name, Profile: profileID, Selector: selector, } // we use FF:FF:FF:FF:FF:FF as a special mac address for default group // in the default group, we don't set the mac selector so all servers will got a match if options.Mac != "FF:FF:FF:FF:FF:FF" { group.Selector["mac"] = options.Mac } for _, c := range instance.Spec.Configs { if c.Name == "meta" { meta, err := json.Marshal(c.Data) if err != nil { return fmt.Errorf("Marshal metadata error: %+v", err) } group.Metadata = meta } } req := &pb.GroupPutRequest{Group: group} _, err := f.client.Groups.GroupPut(context.TODO(), req) return err } func (f *MatchboxCloud) ensureProfile(clusterName string, instance *cluster.Instance, options *MatchboxInstanceOptions) error { if options.UseProfile != "" { return nil } profileID := instance.Name if instance.Status.InstanceID != "" { profileID = instance.Status.InstanceID } ignitionID := profileID if options.UseIgnition != "" { ignitionID = options.UseIgnition } profile := &storagepb.Profile{ Id: profileID, Name: instance.Name, IgnitionId: ignitionID, Boot: &storagepb.NetBoot{ Kernel: fmt.Sprintf("/assets/coreos/%s/coreos_production_pxe.vmlinuz", instance.Spec.Image), Initrd: []string{fmt.Sprintf("/assets/coreos/%s/coreos_production_pxe_image.cpio.gz", instance.Spec.Image)}, Args: []string{ fmt.Sprintf("coreos.config.url=http://%s/ignition?uuid=${uuid}&mac=${mac:hexhyp}", f.endpoint), "coreos.first_boot=yes", "net.ifnames=0", "console=tty0", "console=ttyS0", }, }, } if options.ExtraBootArgs != "" { profile.Boot.Args = append(profile.Boot.Args, strings.Split(options.ExtraBootArgs, ",")...) } req := &pb.ProfilePutRequest{Profile: profile} _, err := f.client.Profiles.ProfilePut(context.TODO(), req) return err } func (f *MatchboxCloud) ensureIgnition(clusterName string, instance *cluster.Instance, options *MatchboxInstanceOptions) error { if options.UseIgnition != "" { return nil } userdata, err := userdata.Generate(instance) if err != nil { glog.Errorf("Generate userdata error: %+v", err) return err } ignitionName := instance.Name if instance.Status.InstanceID != "" { ignitionName = instance.Status.InstanceID } req := &pb.IgnitionPutRequest{Name: ignitionName, Config: userdata} _, err = f.client.Ignition.IgnitionPut(context.TODO(), req) return err } // EnsureLoadBalancerDeleted is a test-spy implementation of LoadBalancer.EnsureLoadBalancerDeleted. // It adds an entry "delete" into the internal method call record. func (f *MatchboxCloud) EnsureInstanceDeleted(clusterName string, instance *cluster.Instance) error { f.addCall("delete") if f.FakeInstances == nil { return f.Err } if _, ok := f.FakeInstances[instance.Name]; ok { delete(f.FakeInstances, instance.Name) } return f.Err } // List is a test-spy implementation of Instances.List. // It adds an entry "list" into the internal method call record. func (f *MatchboxCloud) ListInstances(clusterName string, network *cluster.Network, selector map[string]string) ([]string, []*cluster.InstanceStatus, error) { f.addCall("list") result := make([]string, 0) instances := make([]*cluster.InstanceStatus, 0) for k, v := range f.FakeInstances { result = append(result, k) instances = append(instances, v.status) } return result, instances, f.Err } func init() { archoncloudprovider.RegisterCloudProvider(ProviderName, func(config io.Reader) (archoncloudprovider.Interface, error) { return newMatchboxCloud(config) }) }
[ "\"MATCHBOX_HTTP_ENDPOINT\"", "\"MATCHBOX_RPC_ENDPOINT\"", "\"MATCHBOX_CA_FILE\"", "\"MATCHBOX_CERT_FILE\"", "\"MATCHBOX_KEY_FILE\"" ]
[]
[ "MATCHBOX_CA_FILE", "MATCHBOX_HTTP_ENDPOINT", "MATCHBOX_RPC_ENDPOINT", "MATCHBOX_KEY_FILE", "MATCHBOX_CERT_FILE" ]
[]
["MATCHBOX_CA_FILE", "MATCHBOX_HTTP_ENDPOINT", "MATCHBOX_RPC_ENDPOINT", "MATCHBOX_KEY_FILE", "MATCHBOX_CERT_FILE"]
go
5
0
server/test_api.py
import pytest from flask import json from api import app import os import cloudpickle as cp import pandas as pd import numpy as np from sklearn.pipeline import Pipeline from scipy.sparse import csr_matrix from sklearn.metrics import roc_auc_score categorize_route = "v1/categorize" valid_categories_set = {"Lembrancinhas", "Bebê", "Decoração", "Outros", "Papel e Cia", "Bijuterias e Jóias"} TEST_PRODUCTS_PATH = os.getenv("TEST_PRODUCTS_PATH") TEST_PRODUCTS_CSV_PATH = os.path.join("../", "data", "test_products.csv") with open(TEST_PRODUCTS_PATH, "r") as json_file: test_json = json.load(json_file) def categorize_request(input_data): return app.test_client().post(categorize_route, data=json.dumps(input_data), content_type="application/json") @pytest.mark.parametrize("input_data", [ None, "", {}, {"products": []}, {"products": [{"title": ""}]}, {"products": [{"concatenated_tags": ""}]}, {"products": [{"other1": "", "other2": ""}]} ]) def test_request_with_invalid_data(input_data): response = categorize_request(input_data) assert response.status_code == 400 assert response.data == b"(Bad Request)" @pytest.mark.parametrize("input_data", [ {"products": [{"title": None, "concatenated_tags": None}]}, {"products": [{"title": "", "concatenated_tags": ""}]}, {"products": [{"title": "", "concatenated_tags": "", "other": ""}]}, {"products": [{"title": "a", "concatenated_tags": "a"}, {"title": "b", "concatenated_tags": "b"}]}, test_json]) def test_request_with_valid_data(input_data): response = categorize_request(input_data) assert response.status_code == 200 assert len(response.json["categories"]) == len(input_data['products']) assert set(response.json['categories']).issubset(valid_categories_set) def load_model(): with open(os.getenv("MODEL_PATH"), "rb") as file: return cp.load(file) def load_data(): data = pd.read_csv(TEST_PRODUCTS_CSV_PATH) string_columns = data.select_dtypes("object").columns.tolist() data.loc[:, string_columns] = data.loc[:, string_columns].fillna("") return data def test_check_columns(): data = load_data() expected = ['title', 'query', 'concatenated_tags'] assert np.all(pd.Series(expected).isin(data.columns)) def test_load_pipeline_model(): model = load_model() expected = Pipeline assert expected == model.__class__ def test_column_concatenation(): data = load_data() model = load_model() expected = data["title"] + " " + data["concatenated_tags"] assert expected.equals(model["preprocessor"]["text_column_concatenation"].transform(data)) def test_preprocessor_pipeline_output_class(): data = load_data() model = load_model() expected = csr_matrix assert expected == model["preprocessor"].transform(data).__class__ def test_pipeline_predict(): data = load_data() model = load_model() labels = model.classes_ y_true = data["category"] y_proba = model.predict_proba(data) assert roc_auc_score(y_true, y_proba, multi_class="ovr") > 0.97
[]
[]
[ "MODEL_PATH", "TEST_PRODUCTS_PATH" ]
[]
["MODEL_PATH", "TEST_PRODUCTS_PATH"]
python
2
0
tests/test_architectures.py
import os import numpy as np import pytest import tensorflow as tf import timm import torch from tfimm import create_model, list_models from tfimm.utils.timm import load_pytorch_weights_in_tf2_model # Exclude models that cause specific test failures if "GITHUB_ACTIONS" in os.environ: # and 'Linux' in platform.system(): EXCLUDE_FILTERS = [ "cait_m*", "convnext_xlarge_*", "ig_resnext101_32x48d", "resnetv2_50x3_*", "resnetv2_101*", "resnetv2_152*", "vit_large_*", "vit_huge_*", ] else: EXCLUDE_FILTERS = ["cait_m*"] TIMM_ARCHITECTURES = list( set(list_models(exclude_filters=EXCLUDE_FILTERS)) & set(timm.list_models()) ) @pytest.mark.skip() @pytest.mark.parametrize("model_name", list_models(exclude_filters=EXCLUDE_FILTERS)) def test_mixed_precision(model_name: str): """ Test if we can run a forward pass with mixed precision. These tests are very slow on CPUs, so we skip them by default. """ tf.keras.backend.clear_session() tf.keras.mixed_precision.set_global_policy("mixed_float16") model = create_model(model_name) img = tf.ones((1, *model.cfg.input_size, model.cfg.in_channels), dtype="float16") res = model(img) assert res.dtype == "float16" @pytest.mark.parametrize("model_name", TIMM_ARCHITECTURES) @pytest.mark.timeout(60) def test_load_timm_model(model_name: str): """Test if we can load models from timm.""" # We don't need to load the pretrained weights from timm, we only need a PyTorch # model, that we then convert to tensorflow. This allows us to run these tests # in GitHub CI without data transfer issues. pt_model = timm.create_model(model_name, pretrained=False) pt_model.eval() tf_model = create_model(model_name, pretrained=False) load_pytorch_weights_in_tf2_model(tf_model, pt_model.state_dict()) rng = np.random.default_rng(2021) img = rng.random( size=(1, *tf_model.cfg.input_size, tf_model.cfg.in_channels), dtype="float32" ) tf_res = tf_model(img, training=False).numpy() pt_img = torch.Tensor(img.transpose([0, 3, 1, 2])) pt_res = pt_model.forward(pt_img).detach().numpy() if model_name.startswith("deit_") and "distilled" in model_name: # During inference timm distilled models return average of both heads, while # we return both heads tf_res = tf.reduce_mean(tf_res, axis=1) # The tests are flaky sometimes, so we use a quite high tolerance assert (np.max(np.abs(tf_res - pt_res))) / (np.max(np.abs(pt_res)) + 1e-6) < 1e-3 @pytest.mark.parametrize("model_name", list_models(module="convnext", exclude_filters=EXCLUDE_FILTERS)) @pytest.mark.timeout(60) def test_feature_extraction(model_name: str): """ Tests if we can create a model and run inference with `return_features` set to both `True` and `False. """ model = create_model(model_name, pretrained=False) inputs = model.dummy_inputs x1, features = model(inputs, return_features=True) x2 = model(inputs, return_features=False) # Check that return value doesn't change if we also want features x1, x2 = x1.numpy(), x2.numpy() assert np.max(np.abs(x1 - x2)) < 1e-6 # Check that features dict contains exactly the expected keys assert set(features.keys()) == set(model.feature_names)
[]
[]
[]
[]
[]
python
0
0
.mvn/wrapper/MavenWrapperDownloader.java
/* * Copyright 2012-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.net.*; import java.io.*; import java.nio.channels.*; import java.util.Properties; public class MavenWrapperDownloader { private static final String WRAPPER_VERSION = "0.5.5"; /** * Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided. */ private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/" + WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar"; /** * Path to the maven-wrapper.properties file, which might contain a downloadUrl property to * use instead of the default one. */ private static final String MAVEN_WRAPPER_PROPERTIES_PATH = ".mvn/wrapper/maven-wrapper.properties"; /** * Path where the maven-wrapper.jar will be saved to. */ private static final String MAVEN_WRAPPER_JAR_PATH = ".mvn/wrapper/maven-wrapper.jar"; /** * Name of the property which should be used to override the default download url for the wrapper. */ private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl"; public static void main(String args[]) { System.out.println("- Downloader started"); File baseDirectory = new File(args[0]); System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath()); // If the maven-wrapper.properties exists, read it and check if it contains a custom // wrapperUrl parameter. File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH); String url = DEFAULT_DOWNLOAD_URL; if (mavenWrapperPropertyFile.exists()) { FileInputStream mavenWrapperPropertyFileInputStream = null; try { mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile); Properties mavenWrapperProperties = new Properties(); mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream); url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url); } catch (IOException e) { System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'"); } finally { try { if (mavenWrapperPropertyFileInputStream != null) { mavenWrapperPropertyFileInputStream.close(); } } catch (IOException e) { // Ignore ... } } } System.out.println("- Downloading from: " + url); File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH); if (!outputFile.getParentFile().exists()) { if (!outputFile.getParentFile().mkdirs()) { System.out.println( "- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'"); } } System.out.println("- Downloading to: " + outputFile.getAbsolutePath()); try { downloadFileFromURL(url, outputFile); System.out.println("Done"); System.exit(0); } catch (Throwable e) { System.out.println("- Error downloading"); e.printStackTrace(); System.exit(1); } } private static void downloadFileFromURL(String urlString, File destination) throws Exception { if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) { String username = System.getenv("MVNW_USERNAME"); char[] password = System.getenv("MVNW_PASSWORD").toCharArray(); Authenticator.setDefault(new Authenticator() { @Override protected PasswordAuthentication getPasswordAuthentication() { return new PasswordAuthentication(username, password); } }); } URL website = new URL(urlString); ReadableByteChannel rbc; rbc = Channels.newChannel(website.openStream()); FileOutputStream fos = new FileOutputStream(destination); fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE); fos.close(); rbc.close(); } }
[ "\"MVNW_USERNAME\"", "\"MVNW_PASSWORD\"", "\"MVNW_USERNAME\"", "\"MVNW_PASSWORD\"" ]
[]
[ "MVNW_USERNAME", "MVNW_PASSWORD" ]
[]
["MVNW_USERNAME", "MVNW_PASSWORD"]
java
2
0
sparse_operation_kit/unit_test/test_scripts/tf2/test_sparse_emb_demo_model_multi_worker.py
""" Copyright (c) 2021, NVIDIA CORPORATION. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import argparse import sys, os sys.path.append(os.path.abspath(os.path.join( os.path.dirname(os.path.abspath(__file__)), r"../../../"))) import sparse_operation_kit as sok import tensorflow as tf import numpy as np import os, json import pickle import utils from test_sparse_emb_demo_model_single_worker import SOKDemo, test_tf_demo, check_saved_embedding_variables def test_sok_demo(args, init_tensors, *random_samples): port = 12345 os.environ["TF_CONFIG"] = json.dumps({ 'cluster': {"worker": [args.ips[i] + ":" + str(port + i) for i in range(args.worker_num)] }, 'task': {"type": 'worker', "index": args.task_id} }) strategy = tf.distribute.MultiWorkerMirroredStrategy() with strategy.scope(): result = sok.Init(global_batch_size=args.global_batch_size) plugin_demo = SOKDemo(combiner=args.combiner, max_vocabulary_size_per_gpu=args.max_vocabulary_size_per_gpu, slot_num=args.slot_num, max_nnz=args.max_nnz, embedding_vec_size=args.embedding_vec_size) emb_opt = utils.get_embedding_optimizer(args.optimizer)(learning_rate=0.1) dense_opt = utils.get_dense_optimizer(args.optimizer)(learning_rate=0.1) plugin_saver = sok.Saver() if (1 == args.restore_params): filepath = r"./embedding_variables" plugin_saver.restore_from_file(plugin_demo.embedding_layer.embedding_variable, filepath) else: status = plugin_saver.load_embedding_values(plugin_demo.embedding_layer.embedding_variable, init_tensors) loss_fn = tf.keras.losses.BinaryCrossentropy(from_logits=True, reduction=tf.keras.losses.Reduction.NONE) def _replica_loss(labels, logits): loss = loss_fn(labels, logits) return tf.nn.compute_average_loss(loss, global_batch_size=args.global_batch_size) @tf.function def _train_step(inputs, labels): with tf.GradientTape() as tape: logit, embedding_vector = plugin_demo(inputs, training=True) loss = _replica_loss(labels, logit) embedding_variables, other_variable = sok.split_embedding_variable_from_others(plugin_demo.trainable_variables) grads, emb_grads = tape.gradient(loss, [other_variable, embedding_variables]) if "plugin" not in args.optimizer: with sok.OptimizerScope(embedding_variables): emb_opt.apply_gradients(zip(emb_grads, embedding_variables), experimental_aggregate_gradients=False) else: emb_opt.apply_gradients(zip(emb_grads, embedding_variables), experimental_aggregate_gradients=False) dense_opt.apply_gradients(zip(grads, other_variable)) return logit, embedding_vector sok_results = list() def _dataset_fn(input_context): replica_batch_size = input_context.get_per_replica_batch_size(args.global_batch_size) dataset = utils.tf_dataset(*random_samples, batchsize=replica_batch_size, to_sparse_tensor=True, repeat=1) # because each worker has its own data source, so that no need to shard the dataset. return dataset dataset = strategy.distribute_datasets_from_function(_dataset_fn) for i, (sparse_tensors, replica_labels) in enumerate(dataset): print("-" * 30, "step ", str(i), "-" * 30) logit, embedding_vector = strategy.run(_train_step, args=(sparse_tensors, replica_labels)) print("[INFO]: embedding_vector\n", embedding_vector) sok_results.append(embedding_vector) # FIXME: when the forward computation is too fast, there # may exist some conficts with datareader, which cause the program hang. import time time.sleep(0.2) # seconds # save params to file. if 1 == args.save_params: filepath = r"./embedding_variables/" utils.try_make_dirs(filepath, chief=(True if args.task_id == 0 else False)) plugin_saver.dump_to_file(plugin_demo.embedding_layer.embedding_variable, filepath) return sok_results, plugin_demo.embedding_layer.embedding_variable.values[0].m_var_name def compare_sok_with_tf(args): if (args.global_batch_size % args.local_gpu_num != 0): raise ValueError("global_batch_size: %d is not divisible by local_gpu_num: %d" %(args.global_batch_size, args.local_gpu_num)) if (args.global_batch_size % args.worker_num != 0): raise ValueError("global_batch_size: %d is not divisible by worker_num: %d" %(args.global_batch_size, args.worker_num)) # each worker generate different dataset if args.generate_new_datas: worker_batch_size = args.global_batch_size // args.worker_num random_samples_local = utils.generate_random_samples(num_of_samples=worker_batch_size * args.iter_num, vocabulary_size=args.local_gpu_num * args.max_vocabulary_size_per_gpu * args.worker_num, slot_num=args.slot_num, max_nnz=args.max_nnz) utils.save_to_file(r"./random_samples_" + str(args.task_id) + r".file", *random_samples_local) else: random_samples_local = utils.restore_from_file(r"./random_samples_" + str(args.task_id) + r".file") if (0 == args.restore_params): # each worker generate same init tensors, because each worker will do the filtering by itself. init_tensors = utils.get_ones_tensor(max_vocab_size_per_gpu=args.max_vocabulary_size_per_gpu, embedding_vec_size=args.embedding_vec_size, num=args.local_gpu_num * args.worker_num) else: filepath = r"./embedding_variables" tf_values_filename = os.path.join(filepath, r"tf_variable.file") init_tensors = utils.restore_from_file(tf_values_filename) sok_results_local, embedding_variable_name = test_sok_demo(args, init_tensors, *random_samples_local) # save the forward embedding vector from different worker to file utils.save_to_file(r"./sok_embedding_vectors_" + str(args.task_id) + r".file", *sok_results_local) # aggregate dataset from different worker dataset_filenames = [r"./random_samples_" + str(task_id) + r".file" for task_id in range(args.worker_num)] random_samples_total = [list() for _ in range(args.iter_num)] random_labels_total = [list() for _ in range(args.iter_num)] local_batch_size = args.global_batch_size // args.worker_num for work_id in range(args.worker_num): samples, labels = utils.restore_from_file(dataset_filenames[work_id]) for i in range(args.iter_num): random_samples_total[i].extend(samples[i * local_batch_size : (i + 1) * local_batch_size]) random_labels_total[i].extend(labels[i * local_batch_size : (i + 1) * local_batch_size]) random_samples_total = np.concatenate(random_samples_total, axis=0) random_labels_total = np.concatenate(random_labels_total, axis=0) tf_results = test_tf_demo(args, init_tensors, random_samples_total, random_labels_total) # aggregate forward embedding vector from different worker sok_results_filenames = [r"./sok_embedding_vectors_" + str(task_id) + r".file" for task_id in range(args.worker_num)] sok_results_total = list() for file_name in sok_results_filenames: sok_results_local = utils.restore_from_file(file_name) sok_results_total.append(sok_results_local) if (len(sok_results_total[0]) != len(tf_results)): raise ValueError("The length of results obtained from sok: %d is not equal to that of tensorflow: %d." %(len(sok_results_total[0]), len(tf_results))) if (len(tf_results) != args.iter_num): raise ValueError("The length of embedding vectors: %d is not equal to iteration number: %d." %(len(tf_results), args.iter_num)) # for i, sok_vector in enumerate(sok_results_total): for i in range(args.iter_num): if args.local_gpu_num != 1: sok_vector = tf.concat([tf.concat(sok_results_total[task_id][i].values, axis=0) for task_id in range(args.worker_num)], axis=0) else: sok_vector = tf.concat([sok_results_total[task_id][i] for task_id in range(args.worker_num)], axis=0) tf.debugging.assert_near(tf.reshape(sok_vector, shape=[-1, tf.shape(sok_vector)[-1]]), tf_results[i], atol=1e-4, rtol=1e-4) print("\n[INFO]: With MultiWorkerMirroredStrategy, the embedding vector obtained from " +\ "sparse operation kit and tensorflow are consistent for %d iterations." %args.iter_num) if (1 == args.save_params): check_saved_embedding_variables(args, embedding_variable_name) def get_task_id(ips): local_ip = utils.get_local_ip() for i in range(len(ips)): if ips[i] == local_ip: return i raise ValueError("Cannot find local_ip: %s in ips list: [%s]" %(local_ip, ", ".join(ips))) if __name__ == "__main__": parser = argparse.ArgumentParser(description='test demo model with single worker.') parser.add_argument('--local_gpu_num', type=int, help='the number of GPUs used to do paralell training.', required=False, default=8) parser.add_argument('--iter_num', type=int, help='the number of testing iterations.', required=False, default=100) parser.add_argument('--max_vocabulary_size_per_gpu', type=int, required=False, default=128) parser.add_argument('--slot_num', type=int, help='the number of feature fields', required=False, default=1) parser.add_argument('--max_nnz', type=int, help='the maximum number of keys in one slot', required=False, default=1) parser.add_argument('--embedding_vec_size', type=int, help='the dimention of embedding vector', required=False, default=1) parser.add_argument('--combiner', type=str, help='the combiner used to do reduction for sparse embedding layer. ' +\ 'It is only respected in sparse embedding layer.', required=False, default='mean', choices=['mean', 'sum']) parser.add_argument('--global_batch_size', type=int, required=False, default=16) parser.add_argument('--optimizer', type=str, help="use what optimizer", required=False, default='plugin_adam', choices=['plugin_adam', 'adam', 'sgd']) parser.add_argument('--ips', type=str, nargs="+", help="the ip address of each worker.", required=False, default="0.0.0.0") parser.add_argument('--generate_new_datas', type=int, choices=[0, 1], help='whether to generate new random samples', required=False, default=1) parser.add_argument('--save_params', type=int, choices=[0, 1], help='whether to save the trained parameters.', required=False, default=0) parser.add_argument('--restore_params', type=int, choices=[0, 1], help='whether to restore from saved files. '+\ 'By default, the testing program will generate random ' +\ 'initial value to initialize trainable parameters '+\ 'rather than restore trainable parameters from file.', required=False, default=0) args = parser.parse_args() if not isinstance(args.ips, list): args.ips = [args.ips] args.worker_num = len(args.ips) if utils.all_ips_in_local(args.ips): processes = list() for task_id in range(args.worker_num): available_gpus = ",".join([str(args.local_gpu_num * task_id + i) for i in range(args.local_gpu_num)]) print("[INFO]: on task: %d, its available GPUs are: %s" %(task_id, available_gpus)) os.environ["CUDA_VISIBLE_DEVICES"] = available_gpus process = utils.TestProcess(func=compare_sok_with_tf, task_id=task_id, arguments=args) process.start() processes.append(process) for process in processes: process.join() else: args.task_id = get_task_id(args.ips) os.environ['CUDA_VISIBLE_DEVICES'] = ",".join([str(i) for i in range(args.local_gpu_num)]) compare_sok_with_tf(args)
[]
[]
[ "CUDA_VISIBLE_DEVICES", "TF_CONFIG" ]
[]
["CUDA_VISIBLE_DEVICES", "TF_CONFIG"]
python
2
0
examples/kubeflow/main.py
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Trains and Evaluates the MNIST network using a feed dictionary.""" import argparse import os import sys import time import random import logging from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf from tensorflow.examples.tutorials.mnist import input_data from tensorflow.examples.tutorials.mnist import mnist import fairing INPUT_DATA_DIR = '/tmp/tensorflow/mnist/input_data/' MAX_STEPS = 2000 BATCH_SIZE = 100 LEARNING_RATE = 0.3 HIDDEN_1 = 128 HIDDEN_2 = 32 # HACK: Ideally we would want to have a unique subpath for each instance of the job, but since we can't # we are instead appending HOSTNAME to the logdir LOG_DIR = os.path.join(os.getenv('TEST_TMPDIR', '/tmp'), 'tensorflow/mnist/logs/fully_connected_feed/', os.getenv('HOSTNAME', '')) MODEL_DIR = os.path.join(LOG_DIR, 'model.ckpt') class TensorflowModel(): def train(self, **kwargs): tf.logging.set_verbosity(tf.logging.ERROR) self.data_sets = input_data.read_data_sets(INPUT_DATA_DIR) self.images_placeholder = tf.placeholder( tf.float32, shape=(BATCH_SIZE, mnist.IMAGE_PIXELS)) self.labels_placeholder = tf.placeholder(tf.int32, shape=(BATCH_SIZE)) logits = mnist.inference(self.images_placeholder, HIDDEN_1, HIDDEN_2) self.loss = mnist.loss(logits, self.labels_placeholder) self.train_op = mnist.training(self.loss, LEARNING_RATE) self.summary = tf.summary.merge_all() init = tf.global_variables_initializer() saver = tf.train.Saver() self.sess = tf.Session() self.summary_writer = tf.summary.FileWriter(LOG_DIR, self.sess.graph) self.sess.run(init) data_set = self.data_sets.train for step in xrange(MAX_STEPS): images_feed, labels_feed = data_set.next_batch(BATCH_SIZE, False) feed_dict = { self.images_placeholder: images_feed, self.labels_placeholder: labels_feed, } _, loss_value = self.sess.run([self.train_op, self.loss], feed_dict=feed_dict) if step % 100 == 0: print("At step {}, loss = {}".format(step, loss_value)) summary_str = self.sess.run(self.summary, feed_dict=feed_dict) self.summary_writer.add_summary(summary_str, step) self.summary_writer.flush() if __name__ == '__main__': fairing.config.set_builder(name='cluster') fairing.config.set_model(TensorflowModel()) fairing.config.run()
[]
[]
[ "TEST_TMPDIR", "HOSTNAME" ]
[]
["TEST_TMPDIR", "HOSTNAME"]
python
2
0
np-maintenance/edit.py
# -*- coding: utf-8 -*- import argparse import json import os import re import urllib.parse os.environ['PYWIKIBOT_DIR'] = os.path.dirname(os.path.realpath(__file__)) import pywikibot import requests from bs4 import BeautifulSoup from config import config_page_name # pylint: disable=E0611,W0614 parser = argparse.ArgumentParser() parser.add_argument('-c', '--check', action='store_true', dest='check') parser.set_defaults(check=False) args = parser.parse_args() site = pywikibot.Site() site.login() config_page = pywikibot.Page(site, config_page_name) cfg = config_page.text cfg = json.loads(cfg) print(json.dumps(cfg, indent=4, ensure_ascii=False)) if not cfg['enable']: exit('disabled\n') url = 'https://zh.wikipedia.org/wiki/{}?action=render'.format(cfg['np_page']) try: req = requests.get(url) except Exception as e: print(e) exit() page_html = req.text soup = BeautifulSoup(page_html, 'html.parser') root = soup.find('div', {'class': 'mw-parser-output'}) cnt = 0 npPage = pywikibot.Page(site, cfg['np_page']) text = npPage.text for ul in root.find_all('ul', recursive=False): for li in ul.find_all('li', recursive=False): link = li.find('a') classlist = link.get('class') if link and classlist and 'mw-redirect' in classlist: title = re.sub(r'^//zh.wikipedia.org/wiki/(.+)$', r'\1', link.get('href')) title = urllib.parse.unquote_plus(title) print(title) log = list(site.logevents(page=pywikibot.Page(site, title), total=1))[0] if log.type() != 'move': print('\tnot move') continue newtitle1 = log.target_page.title() titleregex = re.sub(r'_', '[ _]', re.escape(title)) page = pywikibot.Page(site, title) newtitle2 = page.getRedirectTarget().title() if newtitle1 != newtitle2: print('\ttitle not match:', newtitle1, newtitle2) continue text = re.sub( r'\[\[{0}]],{{{{Findsources\|{0}}}}}'.format(titleregex), '[[{0}]],{{{{Findsources|{0}}}}}'.format(newtitle2), text ) cnt += 1 if cnt == 0: exit('Nothing to do\n') pywikibot.showDiff(npPage.text, text) summary = cfg['summary'].format(cnt) print(summary) if args.check and input('Save?').lower() not in ['', 'y', 'yes']: exit() npPage.text = text npPage.save(summary=summary, minor=False)
[]
[]
[ "PYWIKIBOT_DIR" ]
[]
["PYWIKIBOT_DIR"]
python
1
0
tests/api/test_contacts_endpoint.py
""" Test Contacts API Endpoint | Cannlytics API Author: Keegan Skeate Contact: <[email protected]> Created: 7/19/2021 Updated: 7/19/2021 License: MIT License <https://opensource.org/licenses/MIT> """ import os import requests from dotenv import load_dotenv # Test using development server. BASE = 'http://127.0.0.1:8000/api' # Uncomment to test with production server. # BASE = 'https://console.cannlytics.com/api' # Load your API key. load_dotenv('../../.env') API_KEY = os.getenv('CANNLYTICS_API_KEY') # Pass your API key through the authorization header as a bearer token. HEADERS = { 'Authorization': 'Bearer %s' % API_KEY, 'Content-type': 'application/json', } # Identify the organization that you are working with. ORG_ID = 'test-company' # Define the endpoint. ENDPOINT = 'contacts' #------------------------------------------------------------------------------ # Create a contact. #------------------------------------------------------------------------------ data = { 'address': '', 'city': '', 'contact_id': 'TEST', 'county': '', 'email': '', 'latitude': '', 'longitude': '', 'organization': 'Cannlytics Test Contact', 'phone': '', 'state': '', 'street': '', 'website': '', 'zip_code': '' } url = f'{BASE}/{ENDPOINT}?organization_id={ORG_ID}' response = requests.post(url, json=data, headers=HEADERS) assert response.status_code == 200 print('Created:', response.json()['data']) #------------------------------------------------------------------------------ # Get contacts. #------------------------------------------------------------------------------ organization_id = 'test-company' url = f'{BASE}/{ENDPOINT}?organization_id={ORG_ID}' response = requests.get(url, headers=HEADERS) assert response.status_code == 200 data = response.json()['data'] print('Found:', len(data)) #------------------------------------------------------------------------------ # Update a contact. #------------------------------------------------------------------------------ data = { 'contact_id': 'TEST', 'city': 'Tulsa', 'state': 'OK', } url = f'{BASE}/{ENDPOINT}?organization_id={ORG_ID}' response = requests.post(url, json=data, headers=HEADERS) assert response.status_code == 200 print('Updated:', response.json()['data']) #------------------------------------------------------------------------------ # Delete a contact. #------------------------------------------------------------------------------ data = { 'contact_id': 'TEST', } url = f'{BASE}/{ENDPOINT}?organization_id={ORG_ID}' response = requests.delete(url, json=data, headers=HEADERS) assert response.status_code == 200 print('Deleted:', response.json()['data'])
[]
[]
[ "CANNLYTICS_API_KEY" ]
[]
["CANNLYTICS_API_KEY"]
python
1
0
recbole/config/configurator.py
# @Time : 2020/6/28 # @Author : Zihan Lin # @Email : [email protected] # UPDATE # @Time : 2020/10/04, 2021/3/2, 2021/2/17 # @Author : Shanlei Mu, Yupeng Hou, Jiawei Guan # @Email : [email protected], [email protected], [email protected] """ recbole.config.configurator ################################ """ import re import os import sys import yaml import torch from logging import getLogger from recbole.evaluator import group_metrics, individual_metrics from recbole.utils import get_model, Enum, EvaluatorType, ModelType, InputType, \ general_arguments, training_arguments, evaluation_arguments, dataset_arguments from recbole.utils.utils import set_color class Config(object): """ Configurator module that load the defined parameters. Configurator module will first load the default parameters from the fixed properties in RecBole and then load parameters from the external input. External input supports three kind of forms: config file, command line and parameter dictionaries. - config file: It's a file that record the parameters to be modified or added. It should be in ``yaml`` format, e.g. a config file is 'example.yaml', the content is: learning_rate: 0.001 train_batch_size: 2048 - command line: It should be in the format as '---learning_rate=0.001' - parameter dictionaries: It should be a dict, where the key is parameter name and the value is parameter value, e.g. config_dict = {'learning_rate': 0.001} Configuration module allows the above three kind of external input format to be used together, the priority order is as following: command line > parameter dictionaries > config file e.g. If we set learning_rate=0.01 in config file, learning_rate=0.02 in command line, learning_rate=0.03 in parameter dictionaries. Finally the learning_rate is equal to 0.02. """ def __init__(self, model=None, dataset=None, config_file_list=None, config_dict=None): """ Args: model (str/AbstractRecommender): the model name or the model class, default is None, if it is None, config will search the parameter 'model' from the external input as the model name or model class. dataset (str): the dataset name, default is None, if it is None, config will search the parameter 'dataset' from the external input as the dataset name. config_file_list (list of str): the external config file, it allows multiple config files, default is None. config_dict (dict): the external parameter dictionaries, default is None. """ self._init_parameters_category() self.yaml_loader = self._build_yaml_loader() self.file_config_dict = self._load_config_files(config_file_list) self.variable_config_dict = self._load_variable_config_dict(config_dict) self.cmd_config_dict = self._load_cmd_line() self._merge_external_config_dict() self.model, self.model_class, self.dataset = self._get_model_and_dataset(model, dataset) self._load_internal_config_dict(self.model, self.model_class, self.dataset) self.final_config_dict = self._get_final_config_dict() self._set_default_parameters() self._init_device() self._set_train_neg_sample_args() def _init_parameters_category(self): self.parameters = dict() self.parameters['General'] = general_arguments self.parameters['Training'] = training_arguments self.parameters['Evaluation'] = evaluation_arguments self.parameters['Dataset'] = dataset_arguments def _build_yaml_loader(self): loader = yaml.FullLoader loader.add_implicit_resolver( u'tag:yaml.org,2002:float', re.compile( u'''^(?: [-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)? |[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+) |\\.[0-9_]+(?:[eE][-+][0-9]+)? |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]* |[-+]?\\.(?:inf|Inf|INF) |\\.(?:nan|NaN|NAN))$''', re.X ), list(u'-+0123456789.') ) return loader def _convert_config_dict(self, config_dict): r"""This function convert the str parameters to their original type. """ for key in config_dict: param = config_dict[key] if not isinstance(param, str): continue try: value = eval(param) if not isinstance(value, (str, int, float, list, tuple, dict, bool, Enum)): value = param except (NameError, SyntaxError, TypeError): if isinstance(param, str): if param.lower() == "true": value = True elif param.lower() == "false": value = False else: value = param else: value = param config_dict[key] = value return config_dict def _load_config_files(self, file_list): file_config_dict = dict() if file_list: for file in file_list: with open(file, 'r', encoding='utf-8') as f: file_config_dict.update(yaml.load(f.read(), Loader=self.yaml_loader)) return file_config_dict def _load_variable_config_dict(self, config_dict): # HyperTuning may set the parameters such as mlp_hidden_size in NeuMF in the format of ['[]', '[]'] # then config_dict will receive a str '[]', but indeed it's a list [] # temporarily use _convert_config_dict to solve this problem return self._convert_config_dict(config_dict) if config_dict else dict() def _load_cmd_line(self): r""" Read parameters from command line and convert it to str. """ cmd_config_dict = dict() unrecognized_args = [] if "ipykernel_launcher" not in sys.argv[0]: for arg in sys.argv[1:]: if not arg.startswith("--") or len(arg[2:].split("=")) != 2: unrecognized_args.append(arg) continue cmd_arg_name, cmd_arg_value = arg[2:].split("=") if cmd_arg_name in cmd_config_dict and cmd_arg_value != cmd_config_dict[cmd_arg_name]: raise SyntaxError("There are duplicate commend arg '%s' with different value." % arg) else: cmd_config_dict[cmd_arg_name] = cmd_arg_value if len(unrecognized_args) > 0: logger = getLogger() logger.warning('command line args [{}] will not be used in RecBole'.format(' '.join(unrecognized_args))) cmd_config_dict = self._convert_config_dict(cmd_config_dict) return cmd_config_dict def _merge_external_config_dict(self): external_config_dict = dict() external_config_dict.update(self.file_config_dict) external_config_dict.update(self.variable_config_dict) external_config_dict.update(self.cmd_config_dict) self.external_config_dict = external_config_dict def _get_model_and_dataset(self, model, dataset): if model is None: try: model = self.external_config_dict['model'] except KeyError: raise KeyError( 'model need to be specified in at least one of the these ways: ' '[model variable, config file, config dict, command line] ' ) if not isinstance(model, str): final_model_class = model final_model = model.__name__ else: final_model = model final_model_class = get_model(final_model) if dataset is None: try: final_dataset = self.external_config_dict['dataset'] except KeyError: raise KeyError( 'dataset need to be specified in at least one of the these ways: ' '[dataset variable, config file, config dict, command line] ' ) else: final_dataset = dataset return final_model, final_model_class, final_dataset def _update_internal_config_dict(self, file): with open(file, 'r', encoding='utf-8') as f: config_dict = yaml.load(f.read(), Loader=self.yaml_loader) if config_dict is not None: self.internal_config_dict.update(config_dict) return config_dict def _load_internal_config_dict(self, model, model_class, dataset): current_path = os.path.dirname(os.path.realpath(__file__)) overall_init_file = os.path.join(current_path, '../properties/overall.yaml') model_init_file = os.path.join(current_path, '../properties/model/' + model + '.yaml') sample_init_file = os.path.join(current_path, '../properties/dataset/sample.yaml') dataset_init_file = os.path.join(current_path, '../properties/dataset/' + dataset + '.yaml') quick_start_config_path = os.path.join(current_path, '../properties/quick_start_config/') context_aware_init = os.path.join(quick_start_config_path, 'context-aware.yaml') context_aware_on_ml_100k_init = os.path.join(quick_start_config_path, 'context-aware_ml-100k.yaml') DIN_init = os.path.join(quick_start_config_path, 'sequential_DIN.yaml') DIN_on_ml_100k_init = os.path.join(quick_start_config_path, 'sequential_DIN_on_ml-100k.yaml') sequential_init = os.path.join(quick_start_config_path, 'sequential.yaml') special_sequential_on_ml_100k_init = os.path.join(quick_start_config_path, 'special_sequential_on_ml-100k.yaml') sequential_embedding_model_init = os.path.join(quick_start_config_path, 'sequential_embedding_model.yaml') knowledge_base_init = os.path.join(quick_start_config_path, 'knowledge_base.yaml') self.internal_config_dict = dict() for file in [overall_init_file, model_init_file, sample_init_file, dataset_init_file]: if os.path.isfile(file): config_dict = self._update_internal_config_dict(file) if file == dataset_init_file: self.parameters['Dataset'] += [ key for key in config_dict.keys() if key not in self.parameters['Dataset'] ] self.internal_config_dict['MODEL_TYPE'] = model_class.type if self.internal_config_dict['MODEL_TYPE'] == ModelType.GENERAL: pass elif self.internal_config_dict['MODEL_TYPE'] in {ModelType.CONTEXT, ModelType.DECISIONTREE}: self._update_internal_config_dict(context_aware_init) if dataset == 'ml-100k': self._update_internal_config_dict(context_aware_on_ml_100k_init) elif self.internal_config_dict['MODEL_TYPE'] == ModelType.SEQUENTIAL: if model in ['DIN', 'DIEN']: self._update_internal_config_dict(DIN_init) if dataset == 'ml-100k': self._update_internal_config_dict(DIN_on_ml_100k_init) elif model in ['GRU4RecKG', 'KSR']: self._update_internal_config_dict(sequential_embedding_model_init) else: self._update_internal_config_dict(sequential_init) if dataset == 'ml-100k' and model in ['GRU4RecF', 'SASRecF', 'FDSA', 'S3Rec']: self._update_internal_config_dict(special_sequential_on_ml_100k_init) elif self.internal_config_dict['MODEL_TYPE'] == ModelType.KNOWLEDGE: self._update_internal_config_dict(knowledge_base_init) def _get_final_config_dict(self): final_config_dict = dict() final_config_dict.update(self.internal_config_dict) final_config_dict.update(self.external_config_dict) return final_config_dict def _set_default_parameters(self): self.final_config_dict['dataset'] = self.dataset self.final_config_dict['model'] = self.model if self.dataset == 'ml-100k' and self.final_config_dict['data_path'] is None: current_path = os.path.dirname(os.path.realpath(__file__)) self.final_config_dict['data_path'] = os.path.join(current_path, '../dataset_example/' + self.dataset) else: self.final_config_dict['data_path'] = os.path.join(self.final_config_dict['data_path'], self.dataset) if hasattr(self.model_class, 'input_type'): self.final_config_dict['MODEL_INPUT_TYPE'] = self.model_class.input_type elif 'loss_type' in self.final_config_dict: if self.final_config_dict['loss_type'] in ['CE']: if self.final_config_dict['MODEL_TYPE'] == ModelType.SEQUENTIAL and self.final_config_dict['training_neg_sample_num'] > 0: raise ValueError("training_neg_sample_num should be 0 when the loss_type is CE") self.final_config_dict['MODEL_INPUT_TYPE'] = InputType.POINTWISE elif self.final_config_dict['loss_type'] in ['BPR']: self.final_config_dict['MODEL_INPUT_TYPE'] = InputType.PAIRWISE else: raise ValueError('Either Model has attr \'input_type\',' 'or arg \'loss_type\' should exist in config.') eval_type = None for metric in self.final_config_dict['metrics']: if metric.lower() in individual_metrics: if eval_type is not None and eval_type == EvaluatorType.RANKING: raise RuntimeError('Ranking metrics and other metrics can not be used at the same time.') else: eval_type = EvaluatorType.INDIVIDUAL if metric.lower() in group_metrics: if eval_type is not None and eval_type == EvaluatorType.INDIVIDUAL: raise RuntimeError('Ranking metrics and other metrics can not be used at the same time.') else: eval_type = EvaluatorType.RANKING self.final_config_dict['eval_type'] = eval_type smaller_metric = ['rmse', 'mae', 'logloss'] valid_metric = self.final_config_dict['valid_metric'].split('@')[0] self.final_config_dict['valid_metric_bigger'] = False if valid_metric.lower() in smaller_metric else True if 'additional_feat_suffix' in self.final_config_dict: ad_suf = self.final_config_dict['additional_feat_suffix'] if isinstance(ad_suf, str): self.final_config_dict['additional_feat_suffix'] = [ad_suf] def _init_device(self): use_gpu = self.final_config_dict['use_gpu'] if use_gpu: os.environ["CUDA_VISIBLE_DEVICES"] = str(self.final_config_dict['gpu_id']) self.final_config_dict['device'] = torch.device("cuda" if torch.cuda.is_available() and use_gpu else "cpu") def _set_train_neg_sample_args(self): if self.final_config_dict['training_neg_sample_num']: self.final_config_dict['train_neg_sample_args'] = { 'strategy': 'by', 'by': self.final_config_dict['training_neg_sample_num'], 'distribution': self.final_config_dict['training_neg_sample_distribution'] or 'uniform' } else: self.final_config_dict['train_neg_sample_args'] = {'strategy': 'none'} def __setitem__(self, key, value): if not isinstance(key, str): raise TypeError("index must be a str.") self.final_config_dict[key] = value def __getitem__(self, item): if item in self.final_config_dict: return self.final_config_dict[item] else: return None def __contains__(self, key): if not isinstance(key, str): raise TypeError("index must be a str.") return key in self.final_config_dict def __str__(self): args_info = '\n' for category in self.parameters: args_info += set_color(category + ' Hyper Parameters:\n', 'pink') args_info += '\n'.join([(set_color("{}", 'cyan') + " =" + set_color(" {}", 'yellow')).format(arg, value) for arg, value in self.final_config_dict.items() if arg in self.parameters[category]]) args_info += '\n\n' args_info += set_color('Other Hyper Parameters: \n', 'pink') args_info += '\n'.join([ (set_color("{}", 'cyan') + " = " + set_color("{}", 'yellow')).format(arg, value) for arg, value in self.final_config_dict.items() if arg not in { _ for args in self.parameters.values() for _ in args }.union({'model', 'dataset', 'config_files'}) ]) args_info += '\n\n' return args_info def __repr__(self): return self.__str__()
[]
[]
[ "CUDA_VISIBLE_DEVICES" ]
[]
["CUDA_VISIBLE_DEVICES"]
python
1
0
tbert/transformer.py
# The MIT License # Copyright 2019 Innodata Labs and Mike Kroutikov # # PyTorch port of # https://github.com/google-research/bert/modeling.py # # Original code copyright follows: # # Copyright 2018 The Google AI Language Team Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.import json # import torch from tbert.gelu import gelu from tbert.attention import Attention, init_linear class TransformerEncoder(torch.nn.Module): def __init__(self, hidden_size=768, num_heads=12, intermediate_size=3072, dropout=0.1, initializer_range=0.02): ''' hidden_size - hidden size, must be multiple of num_heads num_heads - number of attention heads. intermediate_size - size of the intermediate dense layer dropout - dropout probability (0. means "no dropout") initializer_range - stddev for random weight matrix initialization ''' torch.nn.Module.__init__(self) if hidden_size % num_heads: raise ValueError( 'hidden size must be a multiple of the number of attention heads' ) self.attention = Attention( hidden_size, hidden_size, num_heads, hidden_size // num_heads, dropout=dropout, initializer_range=initializer_range ) self.dense = torch.nn.Linear(hidden_size, hidden_size) self.dropout = torch.nn.Dropout(dropout) self.dense_layer_norm = torch.nn.LayerNorm(hidden_size, eps=1.e-12) self.intermediate = torch.nn.Linear(hidden_size, intermediate_size) self.output = torch.nn.Linear(intermediate_size, hidden_size) self.output_layer_norm = torch.nn.LayerNorm(hidden_size, eps=1.e-12) init_linear(self.dense, initializer_range) init_linear(self.intermediate, initializer_range) init_linear(self.output, initializer_range) def forward(self, inp, att_mask=None, batch_size=1): ''' B - batch size S - sequence length H - hidden size inp - a float matrix with embedded input sequences, shape [B*S, H] att_mask - an int tensor of shape [B, 1, S, S] - the self-attention mask batch_size - batch size Returns: a matrix of the same dims as inp (so that encoders are stackable) ''' # --> [B*S, H] x = self.attention(inp, inp, inp, att_mask, batch_size=batch_size) # --> [B*S, H] x = self.dense(x) x = self.dropout(x) x = self.dense_layer_norm(inp + x) x2 = self.output(gelu(self.intermediate(x))) x = self.output_layer_norm(x + x2) return x class TransformerDecoder(torch.nn.Module): def __init__(self, hidden_size=768, num_heads=12, intermediate_size=3072, dropout=0.1, initializer_range=0.02): ''' hidden_size - hidden size, must be multiple of num_heads num_heads - number of attention heads. intermediate_size - size of the intermediate dense layer dropout - dropout probability (0. means "no dropout") ''' torch.nn.Module.__init__(self) if hidden_size % num_heads: raise ValueError( 'hidden size must be a multiple of the number of attention heads' ) self.attention = Attention( hidden_size, hidden_size, num_heads, hidden_size // num_heads, dropout=dropout, initializer_range=initializer_range ) self.encoder_attention = Attention( hidden_size, hidden_size, num_heads, hidden_size // num_heads, dropout=dropout, initializer_range=initializer_range ) self.dense = torch.nn.Linear(hidden_size, hidden_size) self.dropout = torch.nn.Dropout(dropout) self.dense_layer_norm = torch.nn.LayerNorm(hidden_size, eps=1.e-12) self.intermediate = torch.nn.Linear(hidden_size, intermediate_size) self.output = torch.nn.Linear(intermediate_size, hidden_size) self.output_layer_norm = torch.nn.LayerNorm(hidden_size, eps=1.e-12) init_linear(self.dense, initializer_range) init_linear(self.intermediate, initializer_range) init_linear(self.output, initializer_range) def forward(self, inp, enc_inp, att_mask=None, enc_att_mask=None, batch_size=1): ''' B - batch size S - sequence length E - encoder sequence length H - hidden size inp - a float matrix with embedded input sequences, shape [B*S, H] enc_inp - a float matrix with embedded activations from encoder layer, shape [B*E, H] att_mask - an int tensor of shape [B, 1, S, S] - the self-attention mask enc_att_mask - an int tensor of shape [B, 1, E, S] - the attention mask from encoder data batch_size - batch size Returns: a matrix of the same dims as inp (so that decoders are stackable) ''' # --> [B*S, H] x = self.attention(inp, inp, inp, att_mask, batch_size=batch_size) # apply attention on encoder x = self.encoder_attention(enc_inp, x, x, enc_att_mask, batch_size=batch_size) # --> [B*S, H] x = self.dense(x) x = self.dropout(x) x = self.dense_layer_norm(inp + x) x2 = self.output(gelu(self.intermediate(x))) x = self.output_layer_norm(x + x2) return x
[]
[]
[]
[]
[]
python
null
null
null