file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
match-empty.rs
#![feature(never_type)] #![deny(unreachable_patterns)] enum Foo {} struct NonEmptyStruct(bool); //~ `NonEmptyStruct` defined here union NonEmptyUnion1 { //~ `NonEmptyUnion1` defined here foo: (), } union NonEmptyUnion2 { //~ `NonEmptyUnion2` defined here foo: (), bar: (), } enum NonEmptyEnum1 { //~ `NonEmptyEnum1` defined here Foo(bool), //~^ not covered //~| not covered } enum NonEmptyEnum2 { //~ `NonEmptyEnum2` defined here Foo(bool), //~^ not covered //~| not covered Bar, //~^ not covered //~| not covered } enum
{ //~ `NonEmptyEnum5` defined here V1, V2, V3, V4, V5, } macro_rules! match_empty { ($e:expr) => { match $e {} }; } macro_rules! match_false { ($e:expr) => { match $e { _ if false => {} } }; } fn foo(x: Foo) { match_empty!(x); // ok match_false!(x); // Not detected as unreachable nor exhaustive. //~^ ERROR non-exhaustive patterns: `_` not covered match x { _ => {}, // Not detected as unreachable, see #55123. } } fn main() { // `exhaustive_patterns` is not on, so uninhabited branches are not detected as unreachable. match None::<!> { None => {} Some(_) => {} } match None::<Foo> { None => {} Some(_) => {} } match_empty!(0u8); //~^ ERROR type `u8` is non-empty match_empty!(NonEmptyStruct(true)); //~^ ERROR type `NonEmptyStruct` is non-empty match_empty!((NonEmptyUnion1 { foo: () })); //~^ ERROR type `NonEmptyUnion1` is non-empty match_empty!((NonEmptyUnion2 { foo: () })); //~^ ERROR type `NonEmptyUnion2` is non-empty match_empty!(NonEmptyEnum1::Foo(true)); //~^ ERROR `Foo(_)` not covered match_empty!(NonEmptyEnum2::Foo(true)); //~^ ERROR `Foo(_)` and `Bar` not covered match_empty!(NonEmptyEnum5::V1); //~^ ERROR `V1`, `V2`, `V3` and 2 more not covered match_false!(0u8); //~^ ERROR `_` not covered match_false!(NonEmptyStruct(true)); //~^ ERROR `NonEmptyStruct(_)` not covered match_false!((NonEmptyUnion1 { foo: () })); //~^ ERROR `NonEmptyUnion1 { .. }` not covered match_false!((NonEmptyUnion2 { foo: () })); //~^ ERROR `NonEmptyUnion2 { .. }` not covered match_false!(NonEmptyEnum1::Foo(true)); //~^ ERROR `Foo(_)` not covered match_false!(NonEmptyEnum2::Foo(true)); //~^ ERROR `Foo(_)` and `Bar` not covered match_false!(NonEmptyEnum5::V1); //~^ ERROR `V1`, `V2`, `V3` and 2 more not covered }
NonEmptyEnum5
receipt_listener.go
package sequence import ( "context" "errors" "fmt" "math/big" "sync"
"github.com/0xsequence/ethkit/ethmonitor" "github.com/0xsequence/ethkit/ethrpc" "github.com/0xsequence/ethkit/go-ethereum" "github.com/0xsequence/ethkit/go-ethereum/common" "github.com/0xsequence/ethkit/go-ethereum/core/types" "github.com/0xsequence/go-sequence/lib/logadapter" "github.com/goware/breaker" "github.com/rs/zerolog" ) type ReceiptListener struct { log zerolog.Logger provider *ethrpc.Provider monitor *ethmonitor.Monitor pastReceipts []BlockOfReceipts subscribers []*subscriber mu sync.Mutex } type ReceiptResult struct { MetaTxnID MetaTxnID Status MetaTxnStatus TxnReceipt *types.Receipt } type BlockOfReceipts []*ReceiptResult type subscriber struct { ch chan *ReceiptResult done chan struct{} unsubscribe func() } func NewReceiptListener(log zerolog.Logger, provider *ethrpc.Provider, monitor *ethmonitor.Monitor) (*ReceiptListener, error) { return &ReceiptListener{ log: log.With().Str("ps", "ReceiptListener").Logger(), provider: provider, monitor: monitor, pastReceipts: make([]BlockOfReceipts, 0), subscribers: make([]*subscriber, 0), }, nil } func (l *ReceiptListener) Run(ctx context.Context) error { sub := l.monitor.Subscribe() defer sub.Unsubscribe() br := breaker.New(logadapter.Wrap(l.log), 1*time.Second, 2, 10) for { select { case <-ctx.Done(): l.log.Debug().Msgf("parent signaled to cancel - receipt listener is quitting") return nil case <-sub.Done(): l.log.Info().Msgf("receipt listener is stopped because monitor signaled its stopping") return nil case blocks := <-sub.Blocks(): block := blocks.LatestBlock().Block if block == nil { l.log.Warn().Msgf("monitor return latestblock of nil, unexpected but skipping..") continue } err := br.Do(ctx, func() error { return l.handleBlock(ctx, block) }) if err != nil { if errors.Is(err, breaker.ErrHitMaxRetries) { l.log.Err(err).Msgf("failed to handle block %d after many retries", block.NumberU64()) continue } else { l.log.Err(err).Msgf("failed to handle block %d", block.NumberU64()) continue } } } } } func (l *ReceiptListener) handleBlock(ctx context.Context, block *types.Block) error { blockOfReceipts := BlockOfReceipts{} nonceChangedTopics := [][]common.Hash{{NonceChangeEventSig}} query := ethereum.FilterQuery{ FromBlock: block.Number(), ToBlock: new(big.Int).Add(block.Number(), common.Big1), Topics: nonceChangedTopics, } // Find all nonce change events logs, err := l.provider.FilterLogs(ctx, query) if err != nil { return err } l.log.Debug(). Uint64("block", block.NumberU64()). Int("logs", len(logs)). Msgf("Found logs") for _, log := range logs { // We need to find the metaTxnIds tx, err := l.provider.TransactionReceipt(ctx, log.TxHash) if err != nil { l.log.Warn(). Uint64("block", block.NumberU64()). Str("tx", log.TxHash.Hex()). Err(err). Msgf("Error retrieving tx receipt") return err } // We could see multiple metaTxns on the same transaction for _, txLog := range tx.Logs { var status MetaTxnStatus var metaTxnID MetaTxnID // Success transactions have no topics and the metaTxId is the data // we can't really know if this is a metaTxn or not, but we assume it is // if it isn't is just going to get ignored if len(txLog.Topics) == 0 && len(txLog.Data) == 32 { status = MetaTxnExecuted metaTxnID = MetaTxnID(common.Bytes2Hex(txLog.Data)) l.log.Debug(). Str("tx", tx.TxHash.Hex()). Str("meta-tx", string(metaTxnID)). Msgf("Found succeed meta-tx") // Failed transactions have the TxFailed topic and the data begins with the metaTxInd } else if len(txLog.Topics) == 1 && txLog.Topics[0] == TxFailedEventSig && len(txLog.Data) >= 32 { status = MetaTxnExecuted metaTxnID = MetaTxnID(common.Bytes2Hex(txLog.Data[:32])) l.log.Debug(). Str("tx", tx.TxHash.Hex()). Str("meta-tx", string(metaTxnID)). Msgf("Found failed meta-tx") } else { continue // unknown, skip } result := &ReceiptResult{ MetaTxnID: metaTxnID, Status: status, TxnReceipt: tx, } // Add found result to block of receipts blockOfReceipts = append(blockOfReceipts, result) } } // Nothing to record, skipping. if len(blockOfReceipts) == 0 { return nil } // Publish to subscribers l.mu.Lock() defer l.mu.Unlock() for _, result := range blockOfReceipts { for _, sub := range l.subscribers { select { case <-sub.done: case sub.ch <- result: case <-time.After(2 * time.Second): l.log.Warn().Msgf("channel publisher is blocked by a slow subscriber") } } } l.log.Debug(). Int("past-block-entries", len(l.pastReceipts)). Int("new-entries", len(blockOfReceipts)). Msgf("Push into past receipts") if len(l.pastReceipts) < 1024 { // Append at the end of slice l.pastReceipts = append(l.pastReceipts, blockOfReceipts) } else { // Append value but also pop the queue l.pastReceipts = append(l.pastReceipts[1:], blockOfReceipts) } return nil } func (l *ReceiptListener) subscribe() *subscriber { l.mu.Lock() defer l.mu.Unlock() subscriber := &subscriber{ ch: make(chan *ReceiptResult, 1), done: make(chan struct{}), } subscriber.unsubscribe = func() { close(subscriber.done) l.mu.Lock() defer l.mu.Unlock() close(subscriber.ch) for i, sub := range l.subscribers { if sub == subscriber { l.subscribers = append(l.subscribers[:i], l.subscribers[i+1:]...) return } } } l.subscribers = append(l.subscribers, subscriber) return subscriber } func (l *ReceiptListener) WaitForMetaTxn(ctx context.Context, metaTxnID MetaTxnID, optTimeout ...time.Duration) (MetaTxnStatus, *types.Receipt, error) { // Use optional timeout if passed, otherwise use deadline on the provided ctx, or finally, // set a default timeout of 120 seconds. var cancel context.CancelFunc if len(optTimeout) > 0 { ctx, cancel = context.WithTimeout(ctx, optTimeout[0]) defer cancel() } else { if _, ok := ctx.Deadline(); !ok { ctx, cancel = context.WithTimeout(ctx, 120*time.Second) defer cancel() } } // Listen for new receipts sub := l.subscribe() defer sub.unsubscribe() // See if metaTxn has been seen in past blocks totalInspected := 0 for _, bol := range l.pastReceipts { for _, receipt := range bol { totalInspected++ if receipt.MetaTxnID == metaTxnID { l.log.Debug(). Int("inspected", totalInspected). Str("meta-tx", string(metaTxnID)). Msgf("Found receipt among past receipts") return receipt.Status, receipt.TxnReceipt, nil } } } l.log.Debug(). Int("inspected", totalInspected). Str("meta-tx", string(metaTxnID)). Msgf("Receipt not found among past receipts. Now listening..") // Wait for receipt or context deadline var receipt *ReceiptResult var err error var wg sync.WaitGroup wg.Add(1) go func(ctx context.Context) { defer wg.Done() for { select { case <-ctx.Done(): err := ctx.Err() if errors.Is(err, context.DeadlineExceeded) { err = fmt.Errorf("waiting for meta transaction timeout for %v: %w", metaTxnID, err) return } else if err != nil { err = fmt.Errorf("failed waiting for meta transaction for %v: %w", metaTxnID, err) return } else { return } case <-sub.done: return case receipt = <-sub.ch: if receipt.MetaTxnID == metaTxnID { return } } } }(ctx) wg.Wait() if err != nil { return 0, nil, err } if receipt != nil { return receipt.Status, receipt.TxnReceipt, nil } return 0, nil, nil }
"time"
filters_test.py
#!/usr/bin/env python """Tests for grr.server.checks.filters.""" import collections from grr.lib import flags from grr.lib.rdfvalues import anomaly from grr.lib.rdfvalues import client as rdf_client from grr.lib.rdfvalues import paths as rdf_paths from grr.lib.rdfvalues import protodict as rdf_protodict from grr.server.checks import checks from grr.server.checks import filters from grr.test_lib import test_lib # Just a named tuple that can be used to test objectfilter expressions. Sample = collections.namedtuple("Sample", ["x", "y"]) class BaseFilterTests(test_lib.GRRBaseTest): """Test base filter methods and operations.""" def testEnforceList(self): filt = filters.Filter() self.assertRaises(filters.ProcessingError, filt.Parse, "not_a_list", False) def testValidate(self): filt = filters.Filter() self.assertRaises(NotImplementedError, filt.Validate, "anything") def testParse(self): filt = filters.Filter() self.assertRaises(NotImplementedError, filt.Parse, [], "do nothing") class AttrFilterTests(test_lib.GRRBaseTest): """Test attribute filter methods and operations.""" def testValidate(self): filt = filters.AttrFilter() self.assertRaises(filters.DefinitionError, filt.Validate, " ") self.assertFalse(filt.Validate("cfg1")) self.assertFalse(filt.Validate("cfg1 cfg1.test1")) def testParse(self): filt = filters.AttrFilter() hit1 = rdf_protodict.AttributedDict(k1="hit1", k2="found1", k3=[3, 4]) hit2 = rdf_protodict.AttributedDict(k1="hit2", k2="found2") meta = rdf_protodict.AttributedDict(one=hit1, two=hit2) objs = [hit1, hit2, meta] results = filt.Parse(objs, "k1 k2 one.k3") self.assertEqual(5, len(results)) r1, r2, r3, r4, r5 = results self.assertEqual("k1", r1.key) self.assertEqual("hit1", r1.value) self.assertEqual("k1", r2.key) self.assertEqual("hit2", r2.value) self.assertEqual("k2", r3.key) self.assertEqual("found1", r3.value) self.assertEqual("k2", r4.key) self.assertEqual("found2", r4.value) self.assertEqual("one.k3", r5.key) self.assertEqual([3, 4], r5.value) class ItemFilterTests(test_lib.GRRBaseTest): """Test item filter methods and operations.""" def testParse(self): filt = filters.ItemFilter() one = rdf_protodict.AttributedDict(test1="1", test2=[2, 3]) foo = rdf_protodict.AttributedDict(test1="foo", test2=["bar", "baz"]) fs = rdf_client.Filesystem(device="/dev/sda1", mount_point="/root") objs = [one, foo, fs] results = filt.Parse(objs, "test1 is '1'") self.assertEqual(1, len(results)) self.assertEqual("test1", results[0].key) self.assertEqual("1", results[0].value) results = filt.Parse(objs, "test1 is '2'") self.assertFalse(results) results = filt.Parse(objs, "test2 contains 3") self.assertEqual(1, len(results)) self.assertEqual("test2", results[0].key) self.assertEqual([2, 3], results[0].value) results = filt.Parse(objs, "test1 is '1' or test1 contains 'foo'") self.assertEqual(2, len(results)) self.assertEqual("test1", results[0].key) self.assertEqual("1", results[0].value) self.assertEqual("test1", results[1].key) self.assertEqual("foo", results[1].value) results = filt.Parse(objs, "mount_point is '/root'") self.assertEqual(1, len(results)) self.assertEqual("mount_point", results[0].key) self.assertEqual("/root", results[0].value) class ForEachTests(test_lib.GRRBaseTest): """Test ForEach filter methods and operations.""" def testValidate(self): filt = filters.ForEach() self.assertRaises(filters.DefinitionError, filt.Validate, " ") self.assertRaises(filters.DefinitionError, filt.Validate, "attr1 attr2") self.assertFalse(filt.Validate("attr1")) def testParse(self): filt = filters.ForEach() hit1 = rdf_protodict.AttributedDict(k1="v1", k2="v2", k3="v3") hit2 = rdf_protodict.AttributedDict(k1="v4", k2="v5", k3="v6") meta = rdf_protodict.AttributedDict( foo=["foo", "bar"], target=[hit1, hit2], null=[]) objs = [meta] results = filt.Parse(objs, "target") self.assertEqual(2, len(results)) self.assertItemsEqual([hit1, hit2], [r.item for r in results]) results = filt.Parse(objs, "foo") self.assertEqual(2, len(results)) self.assertItemsEqual(["foo", "bar"], [r.item for r in results]) results = filt.Parse(objs, "null") self.assertEqual(0, len(results)) class ObjectFilterTests(test_lib.GRRBaseTest): """Test object filter methods and operations.""" def testValidate(self): filt = filters.ObjectFilter() self.assertRaises(filters.DefinitionError, filt.Validate, "bad term") self.assertFalse(filt.Validate("test is 'ok'")) def testParse(self): filt = filters.ObjectFilter() hit1 = rdf_protodict.AttributedDict(test="hit1") hit2 = rdf_protodict.AttributedDict(test="hit2") miss = rdf_protodict.AttributedDict(test="miss") objs = [hit1, hit2, miss] results = filt.Parse(objs, "test is 'hit1'") self.assertItemsEqual([hit1], results) results = filt.Parse(objs, "test is 'hit2'") self.assertItemsEqual([hit2], results) results = filt.Parse(objs, "test inset 'hit1,hit2'") self.assertItemsEqual([hit1, hit2], results) class RDFFilterTests(test_lib.GRRBaseTest): """Test rdf filter methods and operations.""" def testValidate(self): filt = filters.RDFFilter() self.assertFalse(filt.Validate("KnowledgeBase,AttributedDict")) self.assertRaises(filters.DefinitionError, filt.Validate, "KnowledgeBase,Nonexistent") def testParse(self): filt = filters.RDFFilter() cfg = rdf_protodict.AttributedDict() anom = anomaly.Anomaly() objs = [cfg, anom] results = filt.Parse(objs, "KnowledgeBase") self.assertFalse(results) results = filt.Parse(objs, "AttributedDict,KnowledgeBase") self.assertItemsEqual([cfg], results) results = filt.Parse(objs, "Anomaly,AttributedDict,KnowledgeBase") self.assertItemsEqual(objs, results) class StatFilterTests(test_lib.GRRBaseTest): """Test stat filter methods and operations.""" bad_null = ["", " :"] bad_file = ["file_re:[[["] bad_gids = ["gid: ", "gid 0", "gid:0", "gid:=", "gid:gid:"] bad_mode = ["mode 755", "mode:755", "mode:0999", "mode:0777,0775"] bad_mask = ["mask 755", "mask:755", "mask:0999", "mask:0777,0775"] bad_path = ["path_re:[[["] bad_type = [ "file_type: ", "file_type foo", "file_type:foo", "file_type:directory,regular" ] bad_uids = ["uid: ", "uid 0", "uid:0", "uid:=", "uid:gid:"] badness = [ bad_null, bad_file, bad_gids, bad_mask, bad_mode, bad_path, bad_type, bad_uids ] ok_file = ["file_re:/etc/passwd"] ok_gids = ["gid:=0", "gid:=1,>1,<1,>=1,<=1,!1"] ok_mode = ["mode:0002"] ok_mask = ["mode:1002"] ok_path = ["path_re:/home/*"] ok_type = ["file_type:REGULAR", "file_type:directory"] ok_uids = ["uid:=0", "uid:=1,>1,<1,>=1,<=1,!1"] just_fine = [ok_file, ok_gids, ok_mask, ok_mode, ok_path, ok_type, ok_uids] def _GenStat(self, path="/etc/passwd", st_mode=33184, st_ino=1063090, st_dev=64512L, st_nlink=1, st_uid=1001, st_gid=5000, st_size=1024, st_atime=1336469177, st_mtime=1336129892, st_ctime=1336129892): """Generate a StatEntry RDF value.""" pathspec = rdf_paths.PathSpec( path=path, pathtype=rdf_paths.PathSpec.PathType.OS) return rdf_client.StatEntry( pathspec=pathspec, st_mode=st_mode, st_ino=st_ino, st_dev=st_dev, st_nlink=st_nlink, st_uid=st_uid, st_gid=st_gid, st_size=st_size, st_atime=st_atime, st_mtime=st_mtime, st_ctime=st_ctime) def testValidate(self): filt = filters.StatFilter() for params in self.badness: for bad in params: self.assertRaises(filters.DefinitionError, filt.Validate, bad) for params in self.just_fine: for ok in params: self.assertTrue(filt.Validate(ok), "Rejected valid expression: %s" % ok) def testFileTypeParse(self): """FileType filters restrict results to specified file types.""" all_types = { "BLOCK": self._GenStat(st_mode=24992), # 0060640 "Character": self._GenStat(st_mode=8608), # 0020640 "directory": self._GenStat(st_mode=16873), # 0040751 "fiFO": self._GenStat(st_mode=4534), # 0010666 "REGULAR": self._GenStat(st_mode=33204), # 0100664 "socket": self._GenStat(st_mode=49568), # 0140640 "SymLink": self._GenStat(st_mode=41471) } # 0120777 filt = filters.StatFilter() for file_type, expected in all_types.iteritems(): filt._Flush() results = filt.Parse(all_types.values(), "file_type:%s" % file_type) self.assertEqual(1, len(results), "Expected exactly 1 %s" % file_type) self.assertEqual(expected, results[0], "Expected stat %s, got %s" % (expected, results[0])) def
(self): """File regexes operate successfully.""" filt = filters.StatFilter() obj1 = self._GenStat(path="/etc/passwd") obj2 = self._GenStat(path="/etc/alternatives/ssh-askpass") obj3 = self._GenStat(path="/etc/alternatives/ssh-askpass.1.gz") objs = [obj1, obj2, obj3] results = filt.Parse(objs, "file_re:pass") self.assertItemsEqual(objs, results) results = filt.Parse(objs, "file_re:pass$") self.assertItemsEqual([obj2], results) results = filt.Parse(objs, "file_re:^pass") self.assertItemsEqual([obj1], results) def testPathREParse(self): """Path regexes operate successfully.""" filt = filters.StatFilter() obj1 = self._GenStat(path="/etc/passwd") obj2 = self._GenStat(path="/etc/alternatives/ssh-askpass") obj3 = self._GenStat(path="/etc/alternatives/ssh-askpass.1.gz") objs = [obj1, obj2, obj3] results = filt.Parse(objs, "path_re:/etc/*") self.assertItemsEqual(objs, results) results = filt.Parse(objs, "path_re:alternatives") self.assertItemsEqual([obj2, obj3], results) results = filt.Parse(objs, "path_re:alternatives file_re:pass$") self.assertItemsEqual([obj2], results) def testGIDParse(self): """GID comparisons operate successfully.""" filt = filters.StatFilter() obj1 = self._GenStat(st_gid=0) obj2 = self._GenStat(st_gid=500) obj3 = self._GenStat(st_gid=5000) objs = [obj1, obj2, obj3] results = filt.Parse(objs, "gid:=0") self.assertItemsEqual([obj1], results) results = filt.Parse(objs, "gid:>=0") self.assertItemsEqual(objs, results) results = filt.Parse(objs, "gid:>0") self.assertItemsEqual([obj2, obj3], results) results = filt.Parse(objs, "gid:>0,<=5000") self.assertItemsEqual([obj2, obj3], results) results = filt.Parse(objs, "gid:>0,<5000") self.assertItemsEqual([obj2], results) results = filt.Parse(objs, "gid:!5000") self.assertItemsEqual([obj1, obj2], results) def testUIDParse(self): """UID comparisons operate successfully.""" filt = filters.StatFilter() obj1 = self._GenStat(st_uid=1001) obj2 = self._GenStat(st_uid=5000) objs = [obj1, obj2] results = filt.Parse(objs, "uid:=0") self.assertFalse(results) results = filt.Parse(objs, "uid:=1001") self.assertItemsEqual([obj1], results) results = filt.Parse(objs, "uid:>=0") self.assertItemsEqual(objs, results) results = filt.Parse(objs, "uid:>0") self.assertItemsEqual(objs, results) results = filt.Parse(objs, "uid:>0,<=5000") self.assertItemsEqual(objs, results) results = filt.Parse(objs, "uid:>0,<5000") self.assertItemsEqual([obj1], results) results = filt.Parse(objs, "uid:!5000") self.assertItemsEqual([obj1], results) def testPermissionsParse(self): """Permissions comparisons operate successfully.""" filt = filters.StatFilter() obj1 = self._GenStat(st_mode=0100740) obj2 = self._GenStat(st_mode=0100755) objs = [obj1, obj2] results = filt.Parse(objs, "mode:0644") self.assertFalse(results) results = filt.Parse(objs, "mode:0740") self.assertItemsEqual([obj1], results) results = filt.Parse(objs, "mode:0640 mask:0640") self.assertItemsEqual(objs, results) results = filt.Parse(objs, "mode:0014 mask:0014") self.assertItemsEqual([obj2], results) def testParseFileObjs(self): """Multiple file types are parsed successfully.""" filt = filters.StatFilter() ok = self._GenStat(path="/etc/shadow", st_uid=0, st_gid=0, st_mode=0100640) link = self._GenStat( path="/etc/shadow", st_uid=0, st_gid=0, st_mode=0120640) user = self._GenStat( path="/etc/shadow", st_uid=1000, st_gid=1000, st_mode=0100640) writable = self._GenStat( path="/etc/shadow", st_uid=0, st_gid=0, st_mode=0100666) cfg = {"path": "/etc/shadow", "st_uid": 0, "st_gid": 0, "st_mode": 0100640} invalid = rdf_protodict.AttributedDict(**cfg) objs = [ok, link, user, writable, invalid] results = filt.Parse(objs, "uid:>=0 gid:>=0") self.assertItemsEqual([ok, link, user, writable], results) results = filt.Parse(objs, "uid:=0 mode:0440 mask:0440") self.assertItemsEqual([ok, link, writable], results) results = filt.Parse(objs, "uid:=0 mode:0440 mask:0444") self.assertItemsEqual([ok, link], results) results = list( filt.Parse(objs, "uid:=0 mode:0440 mask:0444 file_type:regular")) self.assertItemsEqual([ok], results) class FilterRegistryTests(test_lib.GRRBaseTest): """Test filter methods and operations.""" def testFilterRegistry(self): filters.Filter.filters = {} filt = filters.Filter.GetFilter("Filter") # It should be the right type of filter. # And should be in the registry already. self.assertIsInstance(filt, filters.Filter) # The registry must never give the same object to multiple callers. self.assertNotEqual(filt, filters.Filter.GetFilter("Filter")) filt = filters.Filter.GetFilter("ObjectFilter") self.assertIsInstance(filt, filters.ObjectFilter) self.assertNotEqual(filt, filters.Filter.GetFilter("ObjectFilter")) filt = filters.Filter.GetFilter("RDFFilter") self.assertIsInstance(filt, filters.RDFFilter) self.assertNotEqual(filt, filters.Filter.GetFilter("RDFFilter")) filters.Filter.filters = {} self.assertRaises(filters.DefinitionError, filters.Filter.GetFilter, "???") class HandlerTests(test_lib.GRRBaseTest): """Test handler operations.""" def setUp(self): super(HandlerTests, self).setUp() fx0 = checks.Filter({"type": "ObjectFilter", "expression": "x == 0"}) fy0 = checks.Filter({"type": "ObjectFilter", "expression": "y == 0"}) bad = checks.Filter({"type": "ObjectFilter", "expression": "y =="}) self.ok = [fx0, fy0] self.bad = [fx0, fy0, bad] self.all = [Sample(0, 0), Sample(0, 1), Sample(1, 0), Sample(1, 1)] def GetFilters(self, filt_defs): """Initialize one or more filters as if they were contained in a probe.""" # The artifact isn't actually used for anything, it's just required to # initialize handlers. probe = checks.Probe(artifact="Data", filters=filt_defs) return probe.filters def testValidateFilters(self): self.assertEquals(2, len(self.GetFilters(self.ok))) self.assertRaises(filters.DefinitionError, self.GetFilters, self.bad) def testBaseHandler(self): # Handler needs an artifact. self.assertRaises(filters.DefinitionError, filters.BaseHandler) h = filters.BaseHandler("STUB") self.assertRaises(NotImplementedError, h.Parse, "STUB") def testNoOpHandler(self): h = filters.GetHandler("PASSTHROUGH") handler = h("Data", filters=self.GetFilters(self.ok)) self.assertItemsEqual(self.all, handler.Parse(self.all)) def testParallelHandler(self): h = filters.GetHandler("PARALLEL") # Without filters. handler = h("Data", filters=[]) self.assertItemsEqual(self.all, handler.Parse(self.all)) # With filters. handler = h("Data", filters=self.GetFilters(self.ok)) expected = [Sample(0, 0), Sample(0, 1), Sample(1, 0)] self.assertItemsEqual(expected, handler.Parse(self.all)) def testSerialHandler(self): h = filters.GetHandler("SERIAL") # Without filters. handler = h("Data", filters=[]) self.assertItemsEqual(self.all, handler.Parse(self.all)) # With filters. handler = h("Data", filters=self.GetFilters(self.ok)) expected = [Sample(0, 0)] self.assertItemsEqual(expected, handler.Parse(self.all)) def main(argv): test_lib.main(argv) if __name__ == "__main__": flags.StartMain(main)
testFileREParse
axios-utils.js
'use strict'
var config = require('../config.json'); _axios.defaults.headers.post['Content-Type'] = 'application/x-www-form-urlencoded;charset=utf8'; _axios.defaults.withCredentials = true; const tough = require('tough-cookie'); const axiosCookieJarSupport = require('axios-cookiejar-support').default; axiosCookieJarSupport(_axios); const cookieJar = new tough.CookieJar(); exports.get = function(url, params, timeout, successfn, errorfn) { _axios.get(url, { baseURL: config.cduestc.url, method: 'GET', params: params, jar: cookieJar, responseType: "arraybuffer", timeout: timeout }) .then(successfn) .catch(errorfn); } exports.post = function(url, params, data, timeout, successfn, errorfn) { _axios.get(url, { baseURL: config.cduestc.url, method: 'POST', params: params, jar: cookieJar, data: data, responseType: "arraybuffer", timeout: timeout }) .then(successfn) .catch(errorfn); }
var _axios = require('axios');
protocol.go
package client type message struct { name string signature string types []*typ } type typ struct { name string version int methods []message events []message } var displayMeta = &typ{ name: "wl_display", version: 1, methods: []message{ {"sync", "n", []*typ{callbackMeta}}, {"get_registry", "n", []*typ{registryMeta}}, }, events: []message{ {"error", "ous", nil}, {"delete_id", "u", nil}}, } var registryMeta = &typ{ name: "wl_registry", version: 1, methods: []message{ {"bind", "usun", nil}}, events: []message{ {"global", "usu", nil}, {"global_remove", "u", nil}}, } var callbackMeta = &typ{ name: "wl_callback", version: 1, methods: nil, events: []message{ {"done", "u", nil}}, } var shmPoolMeta = &typ{ name: "wl_shm_pool", version: 1, methods: []message{ {"create_buffer", "niiiiu", []*typ{bufferMeta, nil, nil, nil, nil, nil}}, {"destroy", "", nil}, {"resize", "i", nil}}, events: nil, } var shmMeta = &typ{ name: "wl_shm", version: 1, methods: []message{ {"create_pool", "nhi", []*typ{shmPoolMeta, nil, nil}}}, events: []message{
{"format", "u", nil}}, } var bufferMeta = &typ{ name: "wl_buffer", version: 1, methods: []message{ {"destroy", "", nil}}, events: []message{ {"release", "", nil}}, } var outputMeta = &typ{ name: "wl_output", version: 2, methods: nil, events: []message{ {"geometry", "iiiiissi", nil}, {"mode", "uiii", nil}, {"done", "2", nil}, {"scale", "2i", nil}}, } var dataOfferMeta = &typ{ name: "wl_data_offer", version: 1, methods: []message{ {"accept", "u?s", nil}, {"receive", "sh", nil}, {"destroy", "", nil}}, events: []message{ {"offer", "s", nil}}, } var dataSourceMeta = &typ{ name: "wl_data_source", version: 1, methods: []message{ {"offer", "s", nil}, {"destroy", "", nil}}, events: []message{ {"target", "?s", nil}, {"send", "sh", nil}, {"cancelled", "", nil}}, } var dataDeviceMeta = &typ{ name: "wl_data_device", version: 1, methods: []message{ {"start_drag", "?oo?ou", []*typ{dataSourceMeta, surfaceMeta, surfaceMeta, nil}}, {"set_selection", "?ou", []*typ{dataSourceMeta, nil}}}, events: []message{ {"data_offer", "n", []*typ{dataOfferMeta}}, {"enter", "uoff?o", []*typ{nil, surfaceMeta, nil, nil, dataOfferMeta}}, {"leave", "", nil}, {"motion", "uff", nil}, {"drop", "", nil}, {"selection", "?o", []*typ{dataOfferMeta}}, }, } var dataDeviceManagerMeta = &typ{ name: "wl_data_device_manager", version: 1, methods: []message{ {"create_data_source", "n", []*typ{dataOfferMeta}}, {"get_data_device", "no", []*typ{dataDeviceMeta, seatMeta}}}, events: nil, } var shellMeta = &typ{ name: "wl_shell", version: 1, methods: []message{ {"get_shell_surface", "no", []*typ{shellSurfaceMeta, surfaceMeta}}}, events: nil, } var shellSurfaceMeta = &typ{ name: "wl_shell_surface", version: 1, methods: []message{ {"pong", "u", nil}, {"move", "ou", []*typ{seatMeta, nil}}, {"resize", "ouu", []*typ{seatMeta, nil, nil}}, {"set_toplevel", "", nil}, {"set_transient", "oiiu", []*typ{surfaceMeta, nil, nil, nil}}, {"set_fullscreen", "uu?o", []*typ{nil, nil, outputMeta}}, {"set_popup", "ouoiiu", []*typ{seatMeta, nil, surfaceMeta, nil, nil, nil}}, {"set_maximized", "?o", []*typ{outputMeta}}, {"set_title", "s", nil}, {"set_class", "s", nil}, }, events: []message{ {"ping", "u", nil}, {"configure", "uii", nil}, {"popup_done", "", nil}}, } var compositorMeta = &typ{ name: "wl_compositor", version: 3, methods: []message{ {"create_surface", "n", []*typ{surfaceMeta}}, {"create_region", "n", []*typ{regionMeta}}}, events: nil, } var regionMeta = &typ{ name: "wl_region", version: 1, methods: []message{ {"destroy", "", nil}, {"add", "iiii", nil}, {"subtract", "iiii", nil}}, events: nil, } var surfaceMeta = &typ{ name: "wl_surface", version: 3, methods: []message{ {"destroy", "", nil}, {"attach", "?oii", []*typ{bufferMeta, nil, nil}}, {"damage", "iiii", nil}, {"frame", "n", []*typ{callbackMeta}}, {"set_opaque_region", "?o", []*typ{regionMeta}}, {"set_input_region", "?o", []*typ{regionMeta}}, {"commit", "", nil}, {"set_buffer_transform", "2i", nil}, {"set_buffer_scale", "3i", nil}}, events: []message{ {"enter", "o", []*typ{outputMeta}}, {"leave", "o", []*typ{outputMeta}}}, } var seatMeta = &typ{ name: "wl_seat", version: 3, methods: []message{ {"get_pointer", "n", []*typ{pointerMeta}}, {"get_keyboard", "n", []*typ{keyboardMeta}}, {"get_touch", "n", []*typ{touchMeta}}}, events: []message{ {"capabilities", "u", nil}, {"name", "2s", nil}}, } var pointerMeta = &typ{ name: "wl_pointer", version: 3, methods: []message{ {"set_cursor", "u?oii", []*typ{nil, surfaceMeta, nil, nil}}, {"release", "3", nil}}, events: []message{ {"enter", "3uoff", []*typ{nil, surfaceMeta, nil, nil}}, {"leave", "3uo", []*typ{nil, surfaceMeta}}, {"motion", "3uff", nil}, {"button", "3uuuu", nil}, {"axis", "3uuf", nil}}, } var keyboardMeta = &typ{ name: "wl_keyboard", version: 3, methods: []message{ {"release", "3", nil}}, events: []message{ {"keymap", "3uhu", nil}, {"enter", "3uoa", []*typ{nil, surfaceMeta, nil}}, {"leave", "3uo", []*typ{nil, surfaceMeta}}, {"key", "3uuuu", nil}, {"modifiers", "3uuuuu", nil}}, } var touchMeta = &typ{ name: "wl_touch", version: 3, methods: []message{ {"release", "3", nil}}, events: []message{ {"down", "3uuoiff", []*typ{nil, nil, surfaceMeta, nil, nil, nil}}, {"up", "3uui", nil}, {"motion", "3uiff", nil}, {"frame", "3", nil}, {"cancel", "3", nil}}, } var subcompositorMeta = &typ{ name: "wl_subcompositor", version: 1, methods: []message{ {"destroy", "", nil}, {"get_subsurface", "noo", []*typ{subsurfaceMeta, surfaceMeta, surfaceMeta}}}, events: nil, } var subsurfaceMeta = &typ{ name: "wl_subsurface", version: 1, methods: []message{ {"destroy", "", nil}, {"set_position", "ii", nil}, {"place_above", "o", []*typ{surfaceMeta}}, {"place_below", "o", []*typ{surfaceMeta}}, {"set_sync", "", nil}, {"set_desync", "", nil}}, events: nil, }
f32.rs
// NB: transitionary, de-mode-ing. #[forbid(deprecated_mode)]; #[forbid(deprecated_pattern)]; //! Operations and constants for `f32` // PORT use cmath::c_float::*; use cmath::c_float_targ_consts::*; export add, sub, mul, div, rem, lt, le, eq, ne, ge, gt; export is_positive, is_negative, is_nonpositive, is_nonnegative; export is_zero, is_infinite, is_finite; export NaN, is_NaN, infinity, neg_infinity; export consts; export logarithm; export acos, asin, atan, atan2, cbrt, ceil, copysign, cos, cosh, floor; export erf, erfc, exp, expm1, exp2, abs, abs_sub; export mul_add, fmax, fmin, nextafter, frexp, hypot, ldexp; export lgamma, ln, log_radix, ln1p, log10, log2, ilog_radix; export modf, pow, round, sin, sinh, sqrt, tan, tanh, tgamma, trunc; export signbit; export num; // These are not defined inside consts:: for consistency with // the integer types const NaN: f32 = 0.0_f32/0.0_f32; const infinity: f32 = 1.0_f32/0.0_f32; const neg_infinity: f32 = -1.0_f32/0.0_f32; pure fn is_NaN(f: f32) -> bool { f != f } pure fn add(x: f32, y: f32) -> f32 { return x + y; } pure fn sub(x: f32, y: f32) -> f32 { return x - y; } pure fn mul(x: f32, y: f32) -> f32 { return x * y; } pure fn div(x: f32, y: f32) -> f32 { return x / y; } pure fn rem(x: f32, y: f32) -> f32 { return x % y; } pure fn lt(x: f32, y: f32) -> bool { return x < y; } pure fn le(x: f32, y: f32) -> bool { return x <= y; } pure fn eq(x: f32, y: f32) -> bool { return x == y; } pure fn ne(x: f32, y: f32) -> bool { return x != y; } pure fn ge(x: f32, y: f32) -> bool { return x >= y; } pure fn gt(x: f32, y: f32) -> bool { return x > y; } // FIXME (#1999): replace the predicates below with llvm intrinsics or // calls to the libmath macros in the rust runtime for performance. /// Returns true if `x` is a positive number, including +0.0f320 and +Infinity pure fn is_positive(x: f32) -> bool { return x > 0.0f32 || (1.0f32/x) == infinity; } /// Returns true if `x` is a negative number, including -0.0f320 and -Infinity pure fn is_negative(x: f32) -> bool { return x < 0.0f32 || (1.0f32/x) == neg_infinity; } /** * Returns true if `x` is a negative number, including -0.0f320 and -Infinity * * This is the same as `f32::is_negative`. */ pure fn is_nonpositive(x: f32) -> bool { return x < 0.0f32 || (1.0f32/x) == neg_infinity; } /** * Returns true if `x` is a positive number, including +0.0f320 and +Infinity * * This is the same as `f32::is_positive`.) */ pure fn is_nonnegative(x: f32) -> bool { return x > 0.0f32 || (1.0f32/x) == infinity; } /// Returns true if `x` is a zero number (positive or negative zero) pure fn is_zero(x: f32) -> bool { return x == 0.0f32 || x == -0.0f32; } /// Returns true if `x`is an infinite number pure fn is_infinite(x: f32) -> bool { return x == infinity || x == neg_infinity; } /// Returns true if `x`is a finite number pure fn is_finite(x: f32) -> bool { return !(is_NaN(x) || is_infinite(x)); } // FIXME (#1999): add is_normal, is_subnormal, and fpclassify. /* Module: consts */ mod consts { // FIXME (requires Issue #1433 to fix): replace with mathematical // constants from cmath. /// Archimedes' constant const pi: f32 = 3.14159265358979323846264338327950288_f32; /// pi/2.0 const frac_pi_2: f32 = 1.57079632679489661923132169163975144_f32; /// pi/4.0 const frac_pi_4: f32 = 0.785398163397448309615660845819875721_f32; /// 1.0/pi const frac_1_pi: f32 = 0.318309886183790671537767526745028724_f32; /// 2.0/pi const frac_2_pi: f32 = 0.636619772367581343075535053490057448_f32; /// 2.0/sqrt(pi) const frac_2_sqrtpi: f32 = 1.12837916709551257389615890312154517_f32; /// sqrt(2.0) const sqrt2: f32 = 1.41421356237309504880168872420969808_f32; /// 1.0/sqrt(2.0) const frac_1_sqrt2: f32 = 0.707106781186547524400844362104849039_f32; /// Euler's number const e: f32 = 2.71828182845904523536028747135266250_f32; /// log2(e) const log2_e: f32 = 1.44269504088896340735992468100189214_f32; /// log10(e) const log10_e: f32 = 0.434294481903251827651128918916605082_f32; /// ln(2.0) const ln_2: f32 = 0.693147180559945309417232121458176568_f32; /// ln(10.0) const ln_10: f32 = 2.30258509299404568401799145468436421_f32; } pure fn signbit(x: f32) -> int { if is_negative(x) { return 1; } else { return 0; } } pure fn logarithm(n: f32, b: f32) -> f32 { return log2(n) / log2(b); } impl f32: num::Num { pure fn add(&&other: f32) -> f32 { return self + other; } pure fn sub(&&other: f32) -> f32 { return self - other; } pure fn mul(&&other: f32) -> f32 { return self * other; } pure fn div(&&other: f32) -> f32 { return self / other; } pure fn modulo(&&other: f32) -> f32 { return self % other; } pure fn neg() -> f32 { return -self; } pure fn
() -> int { return self as int; } static pure fn from_int(n: int) -> f32 { return n as f32; } } // // Local Variables: // mode: rust // fill-column: 78; // indent-tabs-mode: nil // c-basic-offset: 4 // buffer-file-coding-system: utf-8-unix // End: //
to_int
rename_IQ.py
# KLANR ALI @IQTHON """Rename Telegram Files Syntax: .rnupload file.name""" import asyncio import time from datetime import datetime from hachoir.metadata import extractMetadata from hachoir.parser import createParser from base64 import b64decode import io import math import os from pySmartDL import SmartDL from telethon.tl.types import DocumentAttributeVideo from uniborg.util import progress, humanbytes, time_formatter, admin_cmd thumb_image_path = Config.TMP_DOWNLOAD_DIRECTORY + "/thumb_image.jpg" @borg.on(admin_cmd(pattern="rnupload (.*)")) async def _(event):
if event.fwd_from: return thumb = None if os.path.exists(thumb_image_path): thumb = thumb_image_path await event.edit("`Rename and upload in progress, please wait!`") input_str = event.pattern_match.group(1) if not os.path.isdir(Config.TMP_DOWNLOAD_DIRECTORY): os.makedirs(Config.TMP_DOWNLOAD_DIRECTORY) if event.reply_to_msg_id: start = datetime.now() end = datetime.now() file_name = input_str reply_message = await event.get_reply_message() to_download_directory = Config.TMP_DOWNLOAD_DIRECTORY downloaded_file_name = os.path.join(to_download_directory, file_name) downloaded_file_name = await borg.download_media( reply_message, downloaded_file_name, ) ms_one = (end - start).seconds if os.path.exists(downloaded_file_name): c_time = time.time() await borg.send_file( event.chat_id, downloaded_file_name, force_document=True, supports_streaming=False, allow_cache=False, reply_to=event.message.id, thumb=thumb, ) end_two = datetime.now() os.remove(downloaded_file_name) ms_two = (end_two - end).seconds await event.edit("Downloaded in {} seconds. Uploaded in {} seconds.".format(ms_one, ms_two)) else: await event.edit("File Not Found {}".format(input_str)) else: await event.edit("Syntax // .rnupload file.name as reply to a Telegram media")
start.rs
// Copyright 2020 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use { crate::model::{ actions::{Action, ActionKey}, component::{ ComponentInstance, ExecutionState, InstanceState, Package, Runtime, StartReason, }, error::ModelError, hooks::{Event, EventError, EventErrorPayload, EventPayload, RuntimeInfo}, namespace::IncomingNamespace, }, ::routing::{component_instance::ComponentInstanceInterface, policy::GlobalPolicyChecker}, async_trait::async_trait, cm_runner::Runner, config_encoder::ConfigFields, fidl::{ endpoints::{self, Proxy, ServerEnd}, Vmo, }, fidl_fuchsia_component_decl as fdecl, fidl_fuchsia_component_runner as fcrunner, fidl_fuchsia_io as fio, fidl_fuchsia_mem as fmem, fidl_fuchsia_sys2 as fsys, fuchsia_async as fasync, fuchsia_zircon as zx, log::*, moniker::AbsoluteMoniker, std::sync::Arc, }; /// Starts a component instance. pub struct StartAction { start_reason: StartReason, } impl StartAction { pub fn new(start_reason: StartReason) -> Self { Self { start_reason } } } #[async_trait] impl Action for StartAction { type Output = Result<fsys::StartResult, ModelError>; async fn handle(&self, component: &Arc<ComponentInstance>) -> Self::Output { do_start(component, &self.start_reason).await } fn key(&self) -> ActionKey { ActionKey::Start } } struct StartContext { component_decl: cm_rust::ComponentDecl, resolved_url: String, runner: Arc<dyn Runner>, start_info: fcrunner::ComponentStartInfo, controller_server_end: ServerEnd<fcrunner::ComponentControllerMarker>, } async fn do_start( component: &Arc<ComponentInstance>, start_reason: &StartReason, ) -> Result<fsys::StartResult, ModelError> { // Pre-flight check: if the component is already started, or was shut down, return now. Note // that `start` also performs this check before scheduling the action here. We do it again // while the action is registered to avoid the risk of dispatching the Started event twice. { let state = component.lock_state().await; let execution = component.lock_execution().await; if let Some(res) = should_return_early(&state, &execution, &component.abs_moniker) { return res; } } let result = async move { // Resolve the component. let component_info = component.resolve().await?; // Find the runner to use. let runner = component.resolve_runner().await.map_err(|e| { warn!("Failed to resolve runner for `{}`: {}", component.abs_moniker, e); e })?; // Generate the Runtime which will be set in the Execution. let checker = component.try_get_policy_checker()?; let (pending_runtime, start_info, controller_server_end) = make_execution_runtime( &component, &checker, component_info.resolved_url.clone(), component_info.package, &component_info.decl, component_info.config, ) .await?; Ok(( StartContext { component_decl: component_info.decl, resolved_url: component_info.resolved_url.clone(), runner, start_info, controller_server_end, }, pending_runtime, )) } .await; let (start_context, pending_runtime) = match result { Ok((start_context, mut pending_runtime)) => { let event = Event::new_with_timestamp( component, Ok(EventPayload::Started { component: component.into(), runtime: RuntimeInfo::from_runtime( &mut pending_runtime, start_context.resolved_url.clone(), ), component_decl: start_context.component_decl.clone(), start_reason: start_reason.clone(), }), pending_runtime.timestamp, ); component.hooks.dispatch(&event).await?; (start_context, pending_runtime) } Err(e) => { let event = Event::new(component, Err(EventError::new(&e, EventErrorPayload::Started))); component.hooks.dispatch(&event).await?; return Err(e); } }; let res = configure_component_runtime(&component, pending_runtime).await; match res { Ok(fsys::StartResult::AlreadyStarted) => {} Ok(fsys::StartResult::Started) => { // It's possible that the component is stopped before getting here. If so, that's fine: the // runner will start the component, but its stop or kill signal will be immediately set on the // component controller. start_context .runner .start(start_context.start_info, start_context.controller_server_end) .await; } Err(ref _e) => { // Since we dispatched a start event, dispatch a stop event // TODO(fxbug.dev/87507): It is possible this issues Stop after // Destroyed is issued. component .hooks .dispatch(&Event::new( component, Ok(EventPayload::Stopped { status: zx::Status::OK }), )) .await?; } }; res } /// Set the Runtime in the Execution and start the exit water. From component manager's /// perspective, this indicates that the component has started. If this returns an error, the /// component was shut down and the Runtime is not set, otherwise the function returns the /// start context with the runtime set. This function acquires the state and execution locks on /// `Component`. async fn configure_component_runtime( component: &Arc<ComponentInstance>, mut pending_runtime: Runtime, ) -> Result<fsys::StartResult, ModelError> { let state = component.lock_state().await; let mut execution = component.lock_execution().await; if let Some(r) = should_return_early(&state, &execution, &component.abs_moniker) { return r; } pending_runtime.watch_for_exit(component.as_weak()); execution.runtime = Some(pending_runtime); Ok(fsys::StartResult::Started) } /// Returns `Some(Result)` if `start` should return early due to any of the following: /// - The component instance is destroyed. /// - The component instance is shut down. /// - The component instance is already started. pub fn should_return_early( component: &InstanceState, execution: &ExecutionState, abs_moniker: &AbsoluteMoniker, ) -> Option<Result<fsys::StartResult, ModelError>> { match component { InstanceState::New | InstanceState::Discovered | InstanceState::Resolved(_) => {} InstanceState::Purged => { return Some(Err(ModelError::instance_not_found(abs_moniker.clone()))); } } if execution.is_shut_down() { Some(Err(ModelError::instance_shut_down(abs_moniker.clone()))) } else if execution.runtime.is_some() { Some(Ok(fsys::StartResult::AlreadyStarted)) } else { None } } /// Returns a configured Runtime for a component and the start info (without actually starting /// the component). async fn make_execution_runtime( component: &Arc<ComponentInstance>, checker: &GlobalPolicyChecker, url: String, package: Option<Package>, decl: &cm_rust::ComponentDecl, config: Option<ConfigFields>, ) -> Result< (Runtime, fcrunner::ComponentStartInfo, ServerEnd<fcrunner::ComponentControllerMarker>), ModelError, > { match component.on_terminate { fdecl::OnTerminate::Reboot => { checker.reboot_on_terminate_allowed(&component.abs_moniker)?; } fdecl::OnTerminate::None => {} } // Create incoming/outgoing directories, and populate them. let (outgoing_dir_client, outgoing_dir_server) = zx::Channel::create().map_err(|e| ModelError::namespace_creation_failed(e))?; let (runtime_dir_client, runtime_dir_server) = zx::Channel::create().map_err(|e| ModelError::namespace_creation_failed(e))?; let mut namespace = IncomingNamespace::new(package)?; let ns = namespace.populate(component.as_weak(), decl).await?; let (controller_client, controller_server) = endpoints::create_endpoints::<fcrunner::ComponentControllerMarker>() .expect("could not create component controller endpoints"); let controller = controller_client.into_proxy().expect("failed to create ComponentControllerProxy"); // Set up channels into/out of the new component. These are absent from non-executable // components. let outgoing_dir_client = decl.get_runner().map(|_| { fio::DirectoryProxy::from_channel( fasync::Channel::from_channel(outgoing_dir_client).unwrap(), ) }); let runtime_dir_client = decl.get_runner().map(|_| { fio::DirectoryProxy::from_channel( fasync::Channel::from_channel(runtime_dir_client).unwrap(), ) }); let encoded_config = if let Some(config) = config { let encoded = config.encode_as_fidl_struct(); let encoded_size = encoded.len() as u64; let vmo = Vmo::create(encoded_size).map_err(ModelError::VmoCreateFailed)?; vmo.write(&encoded, 0).map_err(ModelError::VmoWriteFailed)?; Some(fmem::Data::Buffer(fmem::Buffer { vmo, size: encoded_size })) } else { None }; let runtime = Runtime::start_from( Some(namespace), outgoing_dir_client, runtime_dir_client, Some(controller), )?; let numbered_handles = component.numbered_handles.lock().await.take(); let start_info = fcrunner::ComponentStartInfo { resolved_url: Some(url), program: decl.program.as_ref().map(|p| p.info.clone()), ns: Some(ns), outgoing_dir: Some(ServerEnd::new(outgoing_dir_server)), runtime_dir: Some(ServerEnd::new(runtime_dir_server)), numbered_handles, encoded_config, ..fcrunner::ComponentStartInfo::EMPTY }; Ok((runtime, start_info, controller_server)) } #[cfg(test)] mod tests { use { crate::model::{ actions::{ start::should_return_early, ActionSet, ShutdownAction, StartAction, StopAction, }, component::{ ComponentInstance, ExecutionState, InstanceState, ResolvedInstanceState, Runtime, StartReason, }, error::ModelError, hooks::{Event, EventType, Hook, HooksRegistration}, testing::{ test_helpers::{self, ActionsTest}, test_hook::Lifecycle, }, }, assert_matches::assert_matches, async_trait::async_trait, cm_rust::ComponentDecl, cm_rust_testing::{ChildDeclBuilder, ComponentDeclBuilder}, fidl_fuchsia_sys2 as fsys, fuchsia, fuchsia_zircon as zx, moniker::AbsoluteMoniker, routing::error::ComponentInstanceError, std::sync::{Arc, Weak}, }; // Child name for test child components instantiated during tests. const TEST_CHILD_NAME: &str = "child"; struct StartHook { component: Arc<ComponentInstance>, } #[async_trait] impl Hook for StartHook { async fn on(self: Arc<Self>, _event: &Event) -> Result<(), ModelError> { ActionSet::register(self.component.clone(), ShutdownAction::new()) .await .expect("shutdown failed"); Ok(()) } } #[fuchsia::test] /// Validate that if a start action is issued and the component stops /// the action completes we see a Stop event emitted. async fn start_issues_stop() { let (test_topology, child) = build_tree_with_single_child(TEST_CHILD_NAME).await; let start_hook = Arc::new(StartHook { component: child.clone() }); child .hooks .install(vec![HooksRegistration::new( "my_start_hook", vec![EventType::Started], Arc::downgrade(&start_hook) as Weak<dyn Hook>, )]) .await; match ActionSet::register(child.clone(), StartAction::new(StartReason::Debug)).await { Err(ModelError::InstanceShutDown { moniker: m }) => { assert_eq!(AbsoluteMoniker::from(vec![TEST_CHILD_NAME]), m); } e => panic!("Unexpected result from component start: {:?}", e), } let events: Vec<_> = test_topology .test_hook .lifecycle() .into_iter() .filter(|event| match event { Lifecycle::Start(_) | Lifecycle::Stop(_) => true, _ => false, }) .collect(); assert_eq!( events, vec![ Lifecycle::Start(vec![format!("{}:0", TEST_CHILD_NAME).as_str()].into()), Lifecycle::Stop(vec![format!("{}:0", TEST_CHILD_NAME).as_str()].into()) ] ); } #[fuchsia::test] async fn restart_set_execution_runtime() { let (_test_harness, child) = build_tree_with_single_child(TEST_CHILD_NAME).await; { let timestamp = zx::Time::get_monotonic(); ActionSet::register(child.clone(), StartAction::new(StartReason::Debug)) .await .expect("failed to start child"); let execution = child.lock_execution().await; let runtime = execution.runtime.as_ref().expect("child runtime is unexpectedly empty"); assert!(runtime.timestamp > timestamp); } { ActionSet::register(child.clone(), StopAction::new(false, false)) .await .expect("failed to stop child"); let execution = child.lock_execution().await; assert!(execution.runtime.is_none()); } { let timestamp = zx::Time::get_monotonic(); ActionSet::register(child.clone(), StartAction::new(StartReason::Debug)) .await .expect("failed to start child"); let execution = child.lock_execution().await; let runtime = execution.runtime.as_ref().expect("child runtime is unexpectedly empty"); assert!(runtime.timestamp > timestamp); } } #[fuchsia::test] async fn restart_does_not_refresh_resolved_state() { let (mut test_harness, child) = build_tree_with_single_child(TEST_CHILD_NAME).await; { let timestamp = zx::Time::get_monotonic(); ActionSet::register(child.clone(), StartAction::new(StartReason::Debug)) .await .expect("failed to start child"); let execution = child.lock_execution().await; let runtime = execution.runtime.as_ref().expect("child runtime is unexpectedly empty"); assert!(runtime.timestamp > timestamp); } { let () = ActionSet::register(child.clone(), StopAction::new(false, false)) .await .expect("failed to stop child"); let execution = child.lock_execution().await; assert!(execution.runtime.is_none()); } let resolver = test_harness.resolver.as_mut(); let original_decl = resolver.get_component_decl(TEST_CHILD_NAME).expect("child decl not stored"); let mut modified_decl = original_decl.clone(); modified_decl.children.push(ChildDeclBuilder::new().name("foo").build()); resolver.add_component(TEST_CHILD_NAME, modified_decl.clone()); ActionSet::register(child.clone(), StartAction::new(StartReason::Debug)) .await .expect("failed to start child"); let resolved_decl = get_resolved_decl(&child).await; assert_ne!(resolved_decl, modified_decl); assert_eq!(resolved_decl, original_decl); } async fn build_tree_with_single_child( child_name: &'static str, ) -> (ActionsTest, Arc<ComponentInstance>) { let root_name = "root"; let components = vec![ (root_name, ComponentDeclBuilder::new().add_lazy_child(child_name).build()), (child_name, test_helpers::component_decl_with_test_runner()), ]; let test_topology = ActionsTest::new(components[0].0, components, None).await; let child = test_topology.look_up(vec![child_name].into()).await; (test_topology, child) } async fn
(component: &Arc<ComponentInstance>) -> ComponentDecl { let state = component.lock_state().await; let resolved_state = match &*state { InstanceState::Resolved(resolve_state) => resolve_state, _ => panic!("expected component to be resolved"), }; resolved_state.decl().clone() } #[fuchsia::test] async fn check_should_return_early() { let m = AbsoluteMoniker::from(vec!["foo"]); let es = ExecutionState::new(); // Checks based on InstanceState: assert!(should_return_early(&InstanceState::New, &es, &m).is_none()); assert!(should_return_early(&InstanceState::Discovered, &es, &m).is_none()); assert_matches!( should_return_early(&InstanceState::Purged, &es, &m), Some(Err(ModelError::ComponentInstanceError { err: ComponentInstanceError::InstanceNotFound { moniker: _ } })) ); let (_, child) = build_tree_with_single_child(TEST_CHILD_NAME).await; let decl = ComponentDeclBuilder::new().add_lazy_child("bar").build(); let ris = ResolvedInstanceState::new(&child, decl).await.unwrap(); assert!(should_return_early(&InstanceState::Resolved(ris), &es, &m).is_none()); // Check for already_started: { let mut es = ExecutionState::new(); es.runtime = Some(Runtime::start_from(None, None, None, None).unwrap()); assert!(!es.is_shut_down()); assert_matches!( should_return_early(&InstanceState::New, &es, &m), Some(Ok(fsys::StartResult::AlreadyStarted)) ); } // Check for shut_down: let _ = child.stop_instance(true, false).await; let execution = child.lock_execution().await; assert!(execution.is_shut_down()); assert_matches!( should_return_early(&InstanceState::New, &execution, &m), Some(Err(ModelError::InstanceShutDown { moniker: _ })) ); } #[fuchsia::test] async fn check_already_started() { let (_test_harness, child) = build_tree_with_single_child(TEST_CHILD_NAME).await; assert_eq!( ActionSet::register(child.clone(), StartAction::new(StartReason::Debug)) .await .expect("failed to start child"), fsys::StartResult::Started ); assert_eq!( ActionSet::register(child.clone(), StartAction::new(StartReason::Debug)) .await .expect("failed to start child"), fsys::StartResult::AlreadyStarted ); } }
get_resolved_decl
ppu.ts
// PPU: Picutre Processing Unit // PPU scrolling // https://wiki.nesdev.com/w/index.php/PPU_scrolling import {Const} from '../const' import {Address, Byte} from '../types' import {kPaletColors, kStaggered, kFlipXBits} from './const' import {HEventType, HEvents, HStatusMgr} from './hevent' import {MirrorMode, PpuReg, PpuCtrlBit, PpuMaskBit, PpuStatusBit, OamElem, OamAttrBit} from './types' import {Util} from '../../util/util' import {VBlank} from '../const' const REGISTER_COUNT = 8 const VRAM_SIZE = 0x4000 const OAM_SIZE = 0x0100 const MAX_SPRITE_ON_SCANLINE = 8 // OAMDATA ($2004) const MAX_SPRITE = 64 // Palette const PALET_ADR = 0x3f00 const PALET_END_ADR = 0x3fff const kMirrorModeBitTable = Uint8Array.from([0x50, 0x44, 0x00, 0x55, 0x05]) const SPRITE_MASK = 0x10 const kSpritePriorityMask = Uint8Array.from([SPRITE_MASK, 0xff]) export function getNameTable(baseNameTable: Address, bx: number, by: number, mirrorModeBit: Byte): Address { const page = (((bx >> 5) & 1) + (((by / 30) & 1) << 1)) ^ baseNameTable // 0~3 const m = (mirrorModeBit << (10 - (page << 1))) & 0x0c00 return (0x2000 + m) | 0 } export function getBgPatternTableAddress(ppuCtrl: Byte): Address { return (ppuCtrl & PpuCtrlBit.BG_PATTERN_TABLE_ADDRESS) << 8 } export function getBgPat(chrData: Readonly<Uint8Array>, chridx: number, py: number, chrBankOffset: number[]): number { const idx = chridx + py const bank = (idx >> 10) & 7 const p = chrBankOffset[bank] + (idx & 0x03ff) return kStaggered[chrData[p]] | (kStaggered[chrData[p + 8]] << 1) } function getPpuAddr(adr: Address, mirrorModeBit: Byte): Address { adr &= 0x3fff if (0x3000 <= adr && adr < 0x3f00) adr -= 0x1000 // Map 0x3000~3eff to 0x2000~ if (0x2000 <= adr && adr < 0x3000) { const page = (adr >> 10) & 3 const m = (mirrorModeBit << (10 - (page << 1))) & 0x0c00 return (adr & 0xf3ff) | m } if (PALET_ADR <= adr && adr <= PALET_END_ADR) { adr &= 0xff1f // Repeat 0x3f00~0x3f1f --> 0x3fff // "Addresses $3F10/$3F14/$3F18/$3F1C are mirrors of $3F00/$3F04/$3F08/$3F0C." // http://wiki.nesdev.com/w/index.php/PPU_palettes#Memory_Map if ((adr & 0xfff3) === 0x3f10) adr &= 0xffef } return adr } function incPpuAddr(ppuAddr: Address, ppuCtrl: Byte): Address { const add = ((ppuCtrl & PpuCtrlBit.INCREMENT_MODE) !== 0) ? 32 : 1 return (ppuAddr + add) & (VRAM_SIZE - 1) } function
(t: number, dy: number): number { const pageY = ((t >> 11) & 1) * 240 let y = ((t & 0x03e0) >> (5 - 3)) | ((t >> 12) & 7) if (y >= 240) y -= 256 const ny = pageY + y + dy const p = (ny / 240) & 1 const sy = ny % 240 return (t & ~0x7be0) | ((sy & 0xf8) << (5 - 3)) | ((sy & 0x07) << 12) | (p << 11) } function getSpritePat(chrData: Uint8Array, chridx: number, py: number, flipHorz: boolean, chrBankOffset: number[]): number { const idx = chridx + (py & 7) + ((py & 8) << 1) const bank = (idx >> 10) & 7 const p = chrBankOffset[bank] + (idx & 0x03ff) let patHi = chrData[p + 8] let patLo = chrData[p] if (flipHorz) { patHi = kFlipXBits[patHi] patLo = kFlipXBits[patLo] } return kStaggered[patLo] | (kStaggered[patHi] << 1) } function clearBg(hline0: number, hline1: number, x: number, pixels: Uint8Array|Uint8ClampedArray, colorMask: number, palet: Uint8Array): void { const LINE_BYTES = Const.WIDTH * 4 const col = palet[0] & colorMask const c = kPaletColors[col] const r = c >> 16 const g = (c >> 8) & 0xff const b = c & 0xff for (let i = hline0; i < hline1; ++i) { let index = i * LINE_BYTES for (let j = 0; j < x; ++j) { pixels[index ] = r pixels[index + 1] = g pixels[index + 2] = b index += 4 } } } export class Ppu { public suppressSpriteFlicker = true private chrData = new Uint8Array(0) private regs = new Uint8Array(REGISTER_COUNT) private vram: Uint8Array private oam = new Uint8Array(OAM_SIZE) // Object Attribute Memory private mirrorMode = MirrorMode.VERT private hcount = 0 private latch = 0 private ppuAddr: Address = 0 private bufferedValue: Byte = 0 private hevents = new HEvents() private hstatusMgr = new HStatusMgr() private offscreen = new Uint8Array(Const.WIDTH * Const.HEIGHT) constructor(private triggerNmi: () => void) { // `palet` is part of `vram`, and shares its content using ArrayBuffer. const vramBuffer = new ArrayBuffer(VRAM_SIZE) this.vram = new Uint8Array(vramBuffer) this.reset() } public reset(): void { this.regs.fill(0) this.vram.fill(0) this.oam.fill(0) this.hcount = 0 this.ppuAddr = 0 this.latch = 0 this.bufferedValue = 0 this.hevents.clear() this.hstatusMgr.reset() this.offscreen.fill(0) } public save(): object { const data: any = { regs: Util.convertUint8ArrayToBase64String(this.regs), oam: Util.convertUint8ArrayToBase64String(this.oam), mirrorMode: this.mirrorMode, } if (this.isChrRam()) { // Save VRAM including ChrRAM data.vram = Util.convertUint8ArrayToBase64String(this.vram) } else { // Save VRAM except ChrROM data.vramHigh = Util.convertUint8ArrayToBase64String(this.vram.subarray(0x2000)) } return data } public load(saveData: any): void { const isRam = this.isChrRam() this.regs = Util.convertBase64StringToUint8Array(saveData.regs) this.oam = Util.convertBase64StringToUint8Array(saveData.oam) this.mirrorMode = saveData.mirrorMode if (isRam) { const vram = Util.convertBase64StringToUint8Array(saveData.vram) for (let i = 0; i < vram.length; ++i) this.vram[i] = vram[i] } else { const vramHigh = Util.convertBase64StringToUint8Array(saveData.vramHigh) for (let i = 0; i < vramHigh.length; ++i) this.vram[i + 0x2000] = vramHigh[i] } this.hstatusMgr.current.set(HEventType.PPU_CTRL, this.regs[PpuReg.CTRL], -1) this.hstatusMgr.current.set(HEventType.PPU_MASK, this.regs[PpuReg.MASK], -1) this.hstatusMgr.current.set(HEventType.MIRROR_MODE_BIT, kMirrorModeBitTable[this.mirrorMode], -1) for (let i = 0; i < 32; ++i) this.hstatusMgr.current.set(HEventType.PALET, this.vram[PALET_ADR + i], i) } public setChrData(chrData: Uint8Array): void { const isRam = !(chrData && chrData.length > 0) if (isRam) this.chrData = this.vram else this.chrData = chrData } public setChrBank(value: number): void { const base = value << 3 for (let i = 0; i < 8; ++i) this.setChrBankOffset(i, base + i) } public setChrBankOffset(bank: number, value: number): void { const max = this.chrData.length const offset = (value << 10) & (max - 1) this.incScrollCounter() this.addHevent(HEventType.CHR_BANK_OFFSET, offset, bank) } public getMirrorMode(): MirrorMode { return this.mirrorMode } public setMirrorMode(mode: MirrorMode): void { this.mirrorMode = mode this.setMirrorModeBit(kMirrorModeBitTable[mode]) } public setMirrorModeBit(bit: Byte): void { this.incScrollCounter() this.addHevent(HEventType.MIRROR_MODE_BIT, bit) } public read(reg: number): Byte { let result = this.regs[reg] switch (reg as PpuReg) { case PpuReg.STATUS: this.regs[PpuReg.STATUS] &= ~PpuStatusBit.VBLANK this.latch = 0 break case PpuReg.OAMDATA: result = this.oam[this.regs[PpuReg.OAMADDR]] break case PpuReg.DATA: { const ppuAddr = this.hstatusMgr.current.scrollCurr const addr = getPpuAddr(ppuAddr, this.hstatusMgr.current.mirrorModeBit) if (PALET_ADR <= addr && addr <= PALET_END_ADR) { result = this.readPpuDirect(addr) // Palette read shouldn't be buffered like other VRAM // Palette read should also read VRAM into read buffer this.bufferedValue = this.readPpuDirect( getPpuAddr(ppuAddr - 0x1000, this.hstatusMgr.current.mirrorModeBit)) } else { result = this.bufferedValue this.bufferedValue = this.readPpuDirect(addr) } this.addHevent(HEventType.SCROLL_CURR, incPpuAddr(ppuAddr, this.regs[PpuReg.CTRL])) } break default: break } return result } public write(reg: number, value: Byte): void { if (reg === PpuReg.STATUS) { value &= ~(PpuStatusBit.VBLANK | PpuStatusBit.SPRITE0HIT | PpuStatusBit.SPRITE_OVERFLOW) } this.regs[reg] = value switch (reg as PpuReg) { case PpuReg.CTRL: { this.incScrollCounter() this.addHevent(HEventType.PPU_CTRL, this.regs[PpuReg.CTRL]) this.ppuAddr = ((this.ppuAddr & ~0x0c00) | ((value & PpuCtrlBit.BASE_NAMETABLE_ADDRESS) << 10)) this.updateCoarseX() } break case PpuReg.MASK: this.incScrollCounter() this.addHevent(HEventType.PPU_MASK, this.regs[PpuReg.MASK]) break case PpuReg.OAMDATA: { const oamAddr = this.regs[PpuReg.OAMADDR] this.oam[oamAddr] = value this.regs[PpuReg.OAMADDR] = (oamAddr + 1) & 0xff } break case PpuReg.SCROLL: this.incScrollCounter() if (this.latch === 0) { this.ppuAddr = (this.ppuAddr & ~0x001f) | (value >> 3) this.addHevent(HEventType.SCROLL_FINE_X, value & 7) this.updateCoarseX() } else { this.ppuAddr = ((this.ppuAddr & ~0x73e0) | ((value & 0xf8) << (5 - 3)) | ((value & 0x07) << 12)) } this.latch = 1 - this.latch break case PpuReg.ADDR: if (this.latch === 0) { this.ppuAddr = (this.ppuAddr & ~0x7f00) | ((value & 0x3f) << 8) } else { this.ppuAddr = (this.ppuAddr & ~0x00ff) | value this.addHevent(HEventType.SCROLL_CURR, this.ppuAddr) } this.latch = 1 - this.latch break case PpuReg.DATA: { const ppuAddr = this.hstatusMgr.current.scrollCurr const addr = getPpuAddr(ppuAddr, this.hstatusMgr.current.mirrorModeBit) this.vram[addr] = value if (PALET_ADR <= addr && addr < PALET_ADR + 32) this.addHevent(HEventType.PALET, value, addr - PALET_ADR) this.addHevent(HEventType.SCROLL_CURR, incPpuAddr(ppuAddr, this.regs[PpuReg.CTRL])) } break default: break } } public copyWithDma(array: Uint8Array, start: Address): void { const dst = this.oam let j = this.regs[PpuReg.OAMADDR] for (let i = 0; i < 256; ++i) { dst[j] = array[start + i] j = (j + 1) & 255 } // TODO: Block CPU. } public setVBlank(): void { this.regs[PpuReg.STATUS] = this.regs[PpuReg.STATUS] | PpuStatusBit.VBLANK this.hevents.swap() this.hstatusMgr.swap() } public clearVBlank(): void { this.regs[PpuReg.STATUS] &= ~(PpuStatusBit.VBLANK | PpuStatusBit.SPRITE0HIT | PpuStatusBit.SPRITE_OVERFLOW) if ((this.hstatusMgr.current.ppuMask & (PpuMaskBit.SHOW_SPRITE | PpuMaskBit.SHOW_BG)) !== 0) this.addHevent(HEventType.SCROLL_CURR, this.ppuAddr) } public setHcount(hcount: number): void { this.hcount = hcount this.checkSprite0Hit(hcount) switch (hcount) { case VBlank.START: this.setVBlank() break case VBlank.NMI: if ((this.regs[PpuReg.CTRL] & PpuCtrlBit.VINT_ENABLE) !== 0) this.triggerNmi() break case VBlank.END: this.clearVBlank() break default: break } } public render(pixels: Uint8Array | Uint8ClampedArray): void { const greyscale = (this.regs[PpuReg.MASK] & PpuMaskBit.GREYSCALE) !== 0 const colorMask = greyscale ? 0x20 : 0x3f this.renderOffscreen(this.offscreen, pixels, colorMask) } public writePpuDirect(addr: Address, value: Byte): void { if (addr >= 0x2000) { this.vram[addr] = value } else { const bankOffset = this.hstatusMgr.current.chrBankOffset[(addr >> 10) & 7] this.chrData[(addr & 0x3ff) + bankOffset] = value } } public getPaletTable(): Readonly<Uint8Array> { const h = this.hstatusMgr.lastFrame return h.palet } public getRegs(): Readonly<Uint8Array> { return this.regs } public getVram(): Readonly<Uint8Array> { return this.vram } public getChrData(): Readonly<Uint8Array> { return this.chrData } public getHStatusMgr(): HStatusMgr { return this.hstatusMgr } private isChrRam(): boolean { return this.chrData === this.vram } private renderOffscreen(offscreen: Uint8Array, pixels: Uint8Array|Uint8ClampedArray, colorMask: number): void { offscreen.fill(0) const h = this.hstatusMgr.lastFrame const n = this.hevents.getCount() let sprChrStart = 0 for (let i = 0; i < n; ++i) { const hevent = this.hevents.getEvent(i) h.set(hevent.type, hevent.value, hevent.index) const hline0 = hevent.hcount const hline1 = this.hevents.getEvent(i + 1).hcount if (hline0 >= hline1) continue // BG if ((h.ppuMask & PpuMaskBit.SHOW_BG) === 0) { clearBg(hline0, hline1, Const.WIDTH, pixels, colorMask, h.palet) } else { const baseNameTable = (h.scrollCurr & 0x0c00) >> 10 const bgChrStart = getBgPatternTableAddress(h.ppuCtrl) let x0 = 0 if ((h.ppuMask & PpuMaskBit.SHOW_BG_LEFT_8PX) === 0) { x0 = 8 clearBg(hline0, hline1, x0, pixels, colorMask, h.palet) } const scrollX = h.scrollFineX | ((h.scrollCurr & 0x001f) << 3) const scrollY = ((h.scrollCurr & 0x7000) >> 12) | ((h.scrollCurr & 0x03e0) >> (5 - 3)) this.renderBg(offscreen, scrollX, scrollY, baseNameTable, hline0, hline1, x0, h.chrBankOffset, h.mirrorModeBit, bgChrStart, pixels, colorMask, h.palet) } // Sprite if ((h.ppuMask & PpuMaskBit.SHOW_SPRITE) !== 0) { if ((h.ppuCtrl & PpuCtrlBit.SPRITE_SIZE) === 0) sprChrStart = (h.ppuCtrl & PpuCtrlBit.SPRITE_PATTERN_TABLE_ADDRESS) << 9 const x0 = (h.ppuMask & PpuMaskBit.SHOW_SPRITE_LEFT_8PX) ? 0 : 8 this.renderSprite(offscreen, hline0, hline1, x0, h.chrBankOffset, sprChrStart, pixels, colorMask, h.palet) } } } private renderBg(offscreen: Uint8Array, scrollX: number, scrollY: number, baseNameTable: Address, hline0: number, hline1: number, x0: number, chrBankOffset: number[], mirrorModeBit: Byte, chrStart: Address, pixels: Uint8Array|Uint8ClampedArray, colorMask: number, palet: Uint8Array): void { scrollX |= 0 scrollY |= 0 hline0 |= 0 hline1 |= 0 x0 |= 0 mirrorModeBit |= 0 colorMask |= 0 const W = 8 const LINE_WIDTH = Const.WIDTH | 0 const vram = this.vram if (scrollY >= 240) scrollY = (scrollY - 256) | 0 const fineX = scrollX & 7 const coarseX = scrollX >> 3 for (let yy = hline0; yy < hline1; ++yy) { const yyy = yy - hline0 + scrollY const by = ((yyy >> 3) + 60) % 60 const ay = by % 30 for (let bbx = 0; bbx < Const.WIDTH / W + 1; ++bbx) { const bx = (bbx + coarseX) & 63 const ax = bx & 31 const nameTable = getNameTable(baseNameTable, bx, by, mirrorModeBit) | 0 const name = vram[nameTable + ax + (ay << 5)] const chridx = (name << 4) + chrStart const palShift = (ax & 2) + ((ay & 2) << 1) const atrBlk = (ax >> 2) + ((ay << 1) & 0x0f8) const attributeTable = (nameTable + 0x3c0) | 0 const paletHigh = (((vram[attributeTable + atrBlk] >> palShift) & 3) << 2) | 0 const px0 = (bbx * W - fineX) | 0 const pxStart = Math.max(x0 - px0, 0) | 0 const pxEnd = Math.min(Const.WIDTH - px0, W) | 0 const pat = getBgPat(this.chrData, chridx, yyy & 7, chrBankOffset) for (let px = pxStart; px < pxEnd; ++px) { const xx = (px + px0) | 0 let pal = ((pat >> ((W - 1) * 2 - (px << 1))) & 3) | 0 if (pal !== 0) pal |= paletHigh const index = yy * LINE_WIDTH + xx offscreen[index] = pal const col = palet[pal] & colorMask const c = kPaletColors[col] const index2 = index * 4 pixels[index2 + 0] = c >> 16 pixels[index2 + 1] = (c >> 8) & 0xff pixels[index2 + 2] = c & 0xff } } } } private isSprite8x16(): boolean { return (this.regs[PpuReg.CTRL] & PpuCtrlBit.SPRITE_SIZE) !== 0 } private renderSprite(offscreen: Uint8Array, hline0: number, hline1: number, x0: number, chrBankOffset: number[], chrStart: Address, pixels: Uint8Array|Uint8ClampedArray, colorMask: number, palet: Uint8Array): void { const W = 8 const LINE_WIDTH = Const.WIDTH const PALET = 0x03 const oam = this.oam const isSprite8x16 = this.isSprite8x16() const sh = isSprite8x16 ? 16 : 8 for (let h = hline0; h < hline1; ++h) { let n = 0 for (let i = 0; i < MAX_SPRITE; ++i) { const y = oam[i * 4 + OamElem.Y] + 1 if (h < y || h >= y + sh) continue const oamIndex = oam[i * 4 + OamElem.INDEX] const attr = oam[i * 4 + OamElem.ATTR] const flipVert = (attr & OamAttrBit.FLIP_VERT) !== 0 const flipHorz = (attr & OamAttrBit.FLIP_HORZ) !== 0 const x = oam[i * 4 + OamElem.X] const priorityMask = kSpritePriorityMask[(attr >> 5) & 1] const chridx = (isSprite8x16 ? (oamIndex & 0xfe) * 16 + ((oamIndex & 1) << 12) : oamIndex * 16 + chrStart) const paletHigh = ((attr & PALET) << 2) | SPRITE_MASK const py = h - y const px0 = Math.max(x0 - x, 0) const px1 = Math.min(Const.WIDTH - x, W) const ppy = flipVert ? (sh - 1) - py : py const pat = getSpritePat(this.chrData, chridx, ppy, flipHorz, chrBankOffset) for (let px = px0; px < px1; ++px) { let pal = (pat >> ((W - 1 - px) << 1)) & 3 if (pal === 0) continue const pixelIndex = (y + py) * LINE_WIDTH + (x + px) if ((offscreen[pixelIndex] & priorityMask) !== 0) { offscreen[pixelIndex] |= SPRITE_MASK continue } pal |= paletHigh offscreen[pixelIndex] = pal const col = palet[pal] & colorMask const c = kPaletColors[col] const index2 = pixelIndex * 4 pixels[index2 + 0] = c >> 16 pixels[index2 + 1] = (c >> 8) & 0xff pixels[index2 + 2] = c & 0xff } if (++n >= MAX_SPRITE_ON_SCANLINE && !this.suppressSpriteFlicker) { this.regs[PpuReg.STATUS] |= PpuStatusBit.SPRITE_OVERFLOW break } } } } private checkSprite0Hit(hcount: number): void { const mask = PpuMaskBit.SHOW_BG | PpuMaskBit.SHOW_SPRITE if ((this.regs[PpuReg.STATUS] & PpuStatusBit.SPRITE0HIT) !== 0 || (this.regs[PpuReg.MASK] & mask) !== mask) return const sprite0y = this.oam[OamElem.Y] + 1 if (hcount < sprite0y || hcount >= sprite0y + 16) return const sprite0x = this.oam[OamElem.X] if (sprite0x >= 255) return const dy = this.getNonEmptySprite0Line() if (dy < 0 || hcount !== sprite0y + dy) return this.regs[PpuReg.STATUS] |= PpuStatusBit.SPRITE0HIT } private getNonEmptySprite0Line(): number { const oam = this.oam const chrStart = this.getSpritePatternTableAddress() const isSprite8x16 = this.isSprite8x16() const h = isSprite8x16 ? 16 : 8 const index = oam[OamElem.INDEX] const attr = oam[OamElem.ATTR] const flipVert = (attr & OamAttrBit.FLIP_VERT) !== 0 const chridx = (isSprite8x16 ? (index & 0xfe) * 16 + ((index & 1) << 12) : index * 16 + chrStart) for (let py = 0; py < h; ++py) { const ppy = flipVert ? (h - 1) - py : py const pat = getSpritePat(this.chrData, chridx, ppy, false, this.hstatusMgr.current.chrBankOffset) if (pat !== 0) return py } return -1 } private getSpritePatternTableAddress(): Address { if ((this.regs[PpuReg.CTRL] & PpuCtrlBit.SPRITE_SIZE) === 0) return ((this.regs[PpuReg.CTRL] & PpuCtrlBit.SPRITE_PATTERN_TABLE_ADDRESS) << 9) return 0 } private addHevent(type: HEventType, value: number, index = -1): void { // Apply immediately to the current state. if (!this.hstatusMgr.current.set(type, value, index)) return let hcount = this.hcount + 1 if (hcount > Const.HEIGHT) { hcount = 0 } this.hevents.add(hcount, type, value, index) } private incScrollCounter(): void { if (!this.visible()) return const lastHcount = this.hevents.getLastHcount() if (lastHcount < 0) return const hcount = this.hcount + 1 const dy = hcount - lastHcount if (dy <= 0) return this.addHevent(HEventType.SCROLL_CURR, incScroll(this.hstatusMgr.current.scrollCurr, dy)) } private visible(): boolean { return this.hcount < Const.HEIGHT && (this.hstatusMgr.current.ppuMask & (PpuMaskBit.SHOW_SPRITE | PpuMaskBit.SHOW_BG)) !== 0 } private updateCoarseX(): void { if (this.visible()) { // At dot 257 of each scanline: const scrollCurr = ((this.hstatusMgr.current.scrollCurr & ~0x041f) | (this.ppuAddr & 0x041f)) this.addHevent(HEventType.SCROLL_CURR, scrollCurr) } } private readPpuDirect(addr: Address): Byte { if (addr >= 0x2000) { return this.vram[addr] } else { const bankOffset = this.hstatusMgr.current.chrBankOffset[(addr >> 10) & 7] return this.chrData[(addr & 0x3ff) + bankOffset] } } }
incScroll
VPath.unit.js
import VPath from './VPath' test('exports a valid component', () => { expect(VPath).toBeAComponent() }) test('renders the text "VPath"', () => { const { element } = shallowMount(VPath)
expect(element.textContent.trim()).toBe('VPath') }) test('adds a "hello" class on the root element', () => { const { element } = shallowMount(VPath) expect(element.classList.contains('hello')).toBe(true) })
get_domain_topic.py
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from ... import _utilities, _tables __all__ = [ 'GetDomainTopicResult', 'AwaitableGetDomainTopicResult', 'get_domain_topic', ] @pulumi.output_type class GetDomainTopicResult: """ Domain Topic """ def __init__(__self__, id=None, name=None, provisioning_state=None, type=None): if id and not isinstance(id, str): raise TypeError("Expected argument 'id' to be a str") pulumi.set(__self__, "id", id) if name and not isinstance(name, str): raise TypeError("Expected argument 'name' to be a str") pulumi.set(__self__, "name", name) if provisioning_state and not isinstance(provisioning_state, str): raise TypeError("Expected argument 'provisioning_state' to be a str") pulumi.set(__self__, "provisioning_state", provisioning_state) if type and not isinstance(type, str): raise TypeError("Expected argument 'type' to be a str") pulumi.set(__self__, "type", type) @property @pulumi.getter def id(self) -> str: """ Fully qualified identifier of the resource """ return pulumi.get(self, "id") @property @pulumi.getter def name(self) -> str: """ Name of the resource """ return pulumi.get(self, "name") @property @pulumi.getter(name="provisioningState") def
(self) -> Optional[str]: """ Provisioning state of the domain topic. """ return pulumi.get(self, "provisioning_state") @property @pulumi.getter def type(self) -> str: """ Type of the resource """ return pulumi.get(self, "type") class AwaitableGetDomainTopicResult(GetDomainTopicResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetDomainTopicResult( id=self.id, name=self.name, provisioning_state=self.provisioning_state, type=self.type) def get_domain_topic(domain_name: Optional[str] = None, domain_topic_name: Optional[str] = None, resource_group_name: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetDomainTopicResult: """ Domain Topic :param str domain_name: Name of the domain :param str domain_topic_name: Name of the topic :param str resource_group_name: The name of the resource group within the user's subscription. """ __args__ = dict() __args__['domainName'] = domain_name __args__['domainTopicName'] = domain_topic_name __args__['resourceGroupName'] = resource_group_name if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('azure-nextgen:eventgrid/v20190201preview:getDomainTopic', __args__, opts=opts, typ=GetDomainTopicResult).value return AwaitableGetDomainTopicResult( id=__ret__.id, name=__ret__.name, provisioning_state=__ret__.provisioning_state, type=__ret__.type)
provisioning_state
stylus.js
// TODO : to css then injection
module.exports = function(src, module, makeModule, options) { throw new Error("stylus parser not implemented yet") }
CH01_16.py
def sigma_days(days, daily_sigma):
sigma10 = sigma_days(10, 0.2) print("The 10-day volatility is ${0:.2f}".format(sigma10))
return days*daily_sigma
xkcd.py
# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see <http://www.gnu.org/licenses/>. """This example shows a possible answer to a problem that can be found in this xkcd comics: http://xkcd.com/287/. In the comic, the characters want to get exactly 15.05$ worth of appetizers, as fast as possible.""" import random from operator import attrgetter from collections import Counter # We delete the reduction function of the Counter because it doesn't copy added # attributes. Because we create a class that inherit from the Counter, the # fitness attribute was not copied by the deepcopy. del Counter.__reduce__ import numpy from deap import algorithms from deap import base from deap import creator from deap import tools IND_INIT_SIZE = 3 # Create the item dictionary: item id is an integer, and value is # a (name, weight, value) 3-uple. Since the comic didn't specified a time for # each menu item, random was called to generate a time. ITEMS_NAME = "Mixed Fruit", "French Fries", "Side Salad", "Hot Wings", "Mozzarella Sticks", "Sampler Plate" ITEMS_PRICE = 2.15, 2.75, 3.35, 3.55, 4.2, 5.8 ITEMS = dict((name, (price, random.uniform(1, 5))) for name, price in zip(ITEMS_NAME, ITEMS_PRICE)) creator.create("Fitness", base.Fitness, weights=(-1.0, -1.0)) creator.create("Individual", Counter, fitness=creator.Fitness) toolbox = base.Toolbox() toolbox.register("attr_item", random.choice, ITEMS_NAME) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_item, IND_INIT_SIZE) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def evalXKCD(individual, target_price): """Evaluates the fitness and return the error on the price and the time taken by the order if the chef can cook everything in parallel.""" price = 0.0 times = list() for item, number in individual.items(): price += ITEMS[item][0] * number times.append(ITEMS[item][1]) return abs(price - target_price), max(times) def cxCounter(ind1, ind2, indpb): """Swaps the number of perticular items between two individuals""" for key in ITEMS.keys(): if random.random() < indpb: ind1[key], ind2[key] = ind2[key], ind1[key] return ind1, ind2 def mutCounter(individual): """Adds or remove an item from an individual""" if random.random() > 0.5: individual.update([random.choice(ITEMS_NAME)]) else:
if individual[val] < 0: del individual[val] return individual, toolbox.register("evaluate", evalXKCD, target_price=15.05) toolbox.register("mate", cxCounter, indpb=0.5) toolbox.register("mutate", mutCounter) toolbox.register("select", tools.selNSGA2) def main(): NGEN = 40 MU = 100 LAMBDA = 200 CXPB = 0.3 MUTPB = 0.6 pop = toolbox.population(n=MU) hof = tools.ParetoFront() price_stats = tools.Statistics(key=lambda ind: ind.fitness.values[0]) time_stats = tools.Statistics(key=lambda ind: ind.fitness.values[1]) stats = tools.MultiStatistics(price=price_stats, time=time_stats) stats.register("avg", numpy.mean, axis=0) stats.register("std", numpy.std, axis=0) stats.register("min", numpy.min, axis=0) algorithms.eaMuPlusLambda(pop, toolbox, MU, LAMBDA, CXPB, MUTPB, NGEN, stats, halloffame=hof) return pop, stats, hof if __name__ == "__main__": _, _, hof = main() from matplotlib import pyplot as plt error_price = [i.fitness.values[0] for i in hof] time = [i.fitness.values[1] for i in hof] plt.plot(error_price, time, 'bo') plt.xlabel("Price difference") plt.ylabel("Total time") plt.show()
val = random.choice(ITEMS_NAME) individual.subtract([val])
template.spec.js
const test = require('ava') const compile = require('../../helpers/compile') const { escape } = require('../../..') const { join } = require('path') test('template: component', async assert => { var { template } = await compile('<template foo>foo</template><foo/>') assert.deepEqual(template({}, escape), 'foo') }) // TODO add a possibility to declare a local component with the same // name as the imported component test('template: component inside of a imported component', async assert => { var { template } = await compile(` <import foo from='./foo.html'> <foo /> `, { paths: [ join(__dirname, '../../fixtures/template') ] }) assert.deepEqual(template({}, escape), 'bar') }) test('template: objects as parameters', async assert => { var { template } = await compile(` <template foo>{bar.baz}</template> <foo bar="{ { baz: 'qux' } }" /> `) assert.deepEqual(template({}, escape), 'qux')
test('template: padding attribute', async assert => { var { template } = await compile(`<template section><div padding="{{ bottom: 30 }}"><p>Inline component</p></div></template><section></section>`) assert.deepEqual(template({}, escape), '<div style="padding-bottom: 30px;"><p>Inline component</p></div>') var { template } = await compile(`<template section><div padding="{{ bottom: "30", top: "150" }}"><p>Inline component</p></div></template><section></section>`) assert.deepEqual(template({}, escape), '<div style="padding-bottom: 30px; padding-top: 150px;"><p>Inline component</p></div>') var { template } = await compile(`<template section><div margin="{{ top: 100, bottom: 100, right: 100, left: 100 }}"><p>Inline component</p></div></template><section></section>`) assert.deepEqual(template({}, escape), '<div style="margin-top: 100px; margin-bottom: 100px; margin-right: 100px; margin-left: 100px;"><p>Inline component</p></div>') })
})
__init__.py
#!/usr/bin/env python from __future__ import print_function import argparse import base64 import os import sys import logging from six import print_ as print from tzlocal import get_localzone from aws_saml_auth import amazon from aws_saml_auth import configuration from aws_saml_auth import saml from aws_saml_auth import util with open( os.path.join(os.path.abspath(os.path.dirname(__file__)), "VERSION"), encoding="utf-8", ) as version_file: version = version_file.read().strip() def parse_args(args): parser = argparse.ArgumentParser( prog="aws-saml-auth", description="Acquire temporary AWS credentials via SAML", ) main_group = parser.add_mutually_exclusive_group() main_group.add_argument( "--redirect-server", action="store_true", help="Run the redirect server on port ($PORT)", ) main_group.add_argument( "-L", "--login-url", help="SAML Provider login url ($ASA_LOGIN_URL)" ) parser.add_argument( "-R", "--region", help="AWS region endpoint ($AWS_DEFAULT_REGION)" ) duration_group = parser.add_mutually_exclusive_group() duration_group.add_argument( "-d", "--duration", type=int, help="Credential duration in seconds (defaults to value of $ASA_DURATION, then falls back to 43200)", ) duration_group.add_argument( "--auto-duration", action="store_true", help="Tries to use the longest allowed duration ($ASA_AUTO_DURATION=1)", ) parser.add_argument( "-p", "--profile", help="AWS profile (defaults to value of $AWS_PROFILE, then falls back to 'default')", ) parser.add_argument( "-A", "--account", help="Filter for specific AWS account ($ASA_AWS_ACCOUNT)" ) parser.add_argument("-q", "--quiet", action="store_true", help="Quiet output") parser.add_argument( "--saml-assertion", dest="saml_assertion", help="Base64 encoded SAML assertion to use", ) parser.add_argument( "--no-saml-cache", dest="use_saml_cache", action="store_false", help="Do not cache the SAML Assertion ($ASA_NO_SAML_CACHE=1)", ) print_group = parser.add_mutually_exclusive_group() print_group.add_argument( "--print-creds", action="store_true", help="Print Credentials" ) print_group.add_argument( "--credential-process", action="store_true", help="Output suitable for aws cli credential_process ($ASA_CREDENTIAL_PROCESS=1)", ) parser.add_argument( "--no-resolve-aliases", dest="resolve_aliases", action="store_false", help="Do not resolve AWS account aliases. ($ASA_NO_RESOLVE_ALIASES=1)", ) parser.add_argument("--port", type=int, help="Port for the redirect server ($PORT)") role_group = parser.add_mutually_exclusive_group() role_group.add_argument( "--no-ask-role", dest="ask_role", action="store_false", help="Never ask to pick the role ($ASA_NO_ASK_ROLE=1)", ) role_group.add_argument( "-r", "--role-arn", help="The ARN of the role to assume ($ASA_ROLE_ARN)" ) parser.add_argument( "-l", "--log", dest="log_level", choices=["debug", "info", "warn"], default="warn", help="Select log level (default: %(default)s)", ) parser.add_argument( "-V", "--version", action="version", version="%(prog)s {version}".format(version=version), ) return parser.parse_args(args) def exit_if_unsupported_python(): if sys.version_info.major == 2 and sys.version_info.minor < 7: logging.critical( "%s requires Python 2.7 or higher. Please consider " "upgrading. Support for Python 2.6 and lower was " "dropped because this tool's dependencies dropped " "support.", __name__, ) logging.critical( "For debugging, it appears you're running: %s", sys.version_info ) logging.critical( "For more information, see: " "https://github.com/cevoaustralia/aws-google-auth/" "issues/41" ) sys.exit(1) def cli(cli_args): try: exit_if_unsupported_python() args = parse_args(args=cli_args) # Set up logging logging.getLogger().setLevel(getattr(logging, args.log_level.upper(), None)) config = resolve_config(args) if args.redirect_server: from aws_saml_auth.redirect_server import start_redirect_server start_redirect_server(config.port) return process_auth(args, config) except amazon.ExpectedAmazonException as ex: print(ex) sys.exit(1) except saml.ExpectedSamlException as ex: print(ex) sys.exit(1) except KeyboardInterrupt: pass except Exception as ex: logging.exception(ex) def resolve_config(args): # Shortening Convenience functions coalesce = util.Util.coalesce # Create a blank configuration object (has the defaults pre-filled) config = configuration.Configuration() # Have the configuration update itself via the ~/.aws/config on disk. # Profile (Option priority = ARGS, ENV_VAR, DEFAULT) config.profile = coalesce(args.profile, os.getenv("AWS_PROFILE"), config.profile) # Now that we've established the profile, we can read the configuration and # fill in all the other variables. config.read(config.profile) # Ask Role (Option priority = ARGS, ENV_VAR, DEFAULT) config.ask_role = coalesce( (False if os.getenv("ASA_NO_ASK_ROLE") != None else None), args.ask_role, config.ask_role, ) # Do not cache the SAML Assertion (Option priority = ARGS, ENV_VAR, DEFAULT) config.use_saml_cache = coalesce( (False if os.getenv("ASA_NO_SAML_CACHE") != None else None), args.use_saml_cache, config.use_saml_cache, ) # Duration (Option priority = ARGS, ENV_VAR, DEFAULT) config.duration = int( coalesce(args.duration, os.getenv("ASA_DURATION"), config.duration) ) # Automatic duration (Option priority = ARGS, ENV_VAR, DEFAULT) config.auto_duration = args.auto_duration or os.getenv("ASA_AUTO_DURATION") != None # Login URL (Option priority = ARGS, ENV_VAR, DEFAULT) config.login_url = coalesce( args.login_url, os.getenv("ASA_LOGIN_URL"), config.login_url ) # Region (Option priority = ARGS, ENV_VAR, DEFAULT) config.region = coalesce( args.region, os.getenv("AWS_DEFAULT_REGION"), config.region ) # ROLE ARN (Option priority = ARGS, ENV_VAR, DEFAULT) config.role_arn = coalesce( args.role_arn, os.getenv("ASA_ROLE_ARN"), config.role_arn ) # Resolve AWS aliases enabled (Option priority = ARGS, ENV_VAR, DEFAULT) config.resolve_aliases = coalesce( (False if os.getenv("ASA_NO_RESOLVE_ALIASES") != None else None), args.resolve_aliases, config.resolve_aliases, ) # Account (Option priority = ARGS, ENV_VAR, DEFAULT) config.account = coalesce( args.account, os.getenv("ASA_AWS_ACCOUNT"), config.account ) config.print_creds = coalesce(args.print_creds, config.print_creds) # Quiet config.quiet = coalesce(args.quiet, config.quiet) config.port = int(coalesce(args.port, os.getenv("PORT"), config.port)) config.credential_process = ( args.credential_process or os.getenv("ASA_CREDENTIAL_PROCESS") != None ) if config.credential_process: config.quiet = True config.ask_role = False config.read_token_cache() if config.use_saml_cache: config.read_saml_cache() return config def process_auth(args, config): if config.region is None: config.region = util.Util.get_input("AWS Region: ") logging.debug("%s: region is: %s", __name__, config.region) if config.login_url is None: config.login_url = util.Util.get_input("Login URL: ") logging.debug("%s: login url is: %s", __name__, config.login_url) # If there is a valid cache and the user opted to use it, use that instead # of prompting the user for input (it will also ignroe any set variables # such as username or sp_id and idp_id, as those are built into the SAML # response). The user does not need to be prompted for a password if the # SAML cache is used. if args.saml_assertion: saml_xml = base64.b64decode(args.saml_assertion) elif config.token_cache: saml_xml = None elif config.saml_cache: saml_xml = config.saml_cache logging.info("%s: SAML cache found", __name__) else: saml_client = saml.Saml(config) saml_xml = saml_client.do_browser_saml() # We now have a new SAML value that can get cached (If the user asked # for it to be) if config.use_saml_cache: config.saml_cache = saml_xml # The amazon_client now has the SAML assertion it needed (Either via the # cache or freshly generated). From here, we can get the roles and continue # the rest of the workflow regardless of cache.
amazon_client = amazon.Amazon(config, saml_xml) if saml_xml is not None: roles = amazon_client.roles # Determine the provider and the role arn (if the the user provided isn't an option) if config.role_arn in roles and not config.ask_role: config.provider = roles[config.role_arn] else: if config.account and config.resolve_aliases: aliases = amazon_client.resolve_aws_aliases(roles) config.role_arn, config.provider = util.Util.pick_a_role( roles, aliases, config.account ) elif config.account: config.role_arn, config.provider = util.Util.pick_a_role( roles, account=config.account ) elif config.resolve_aliases: aliases = amazon_client.resolve_aws_aliases(roles) config.role_arn, config.provider = util.Util.pick_a_role(roles, aliases) else: config.role_arn, config.provider = util.Util.pick_a_role(roles) if not config.quiet: print("Assuming " + config.role_arn) print( "Credentials Expiration: " + format(amazon_client.expiration.astimezone(get_localzone())) ) if config.credential_process: amazon_client.print_credential_process() config.write_token_cache(amazon_client) elif config.print_creds: amazon_client.print_export_line() elif config.profile: config.write(amazon_client) config.write_saml_cache() def main(): cli_args = sys.argv[1:] cli(cli_args) if __name__ == "__main__": main()
grade.go
package check // Grade represents a grade returned by the server, which is normally // somewhere between A+ (highest) and F (lowest). type Grade string // The Grade constants below indicate the current available // grades. const ( GradeAPlus Grade = "A+" GradeA = "A" GradeB = "B" GradeC = "C" GradeD = "D" GradeE = "E" GradeF = "F" ) // GradeFromPercentage is a helper for getting the GradeFromPercentage for a percentage func GradeFromPercentage(percentage float64) Grade { switch { case percentage > 90: return GradeAPlus case percentage > 80: return GradeA case percentage > 70: return GradeB case percentage > 60: return GradeC
return GradeE default: return GradeF } } // BadgeFromGrade is a helper for getting the badge svg for a grade func BadgeFromGrade(grade Grade) string { switch grade { case GradeAPlus: return `<svg xmlns="http://www.w3.org/2000/svg" width="88" height="20"><linearGradient id="b" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><mask id="a"><rect width="88" height="20" rx="3" fill="#fff"/></mask><g mask="url(#a)"><path fill="#555" d="M0 0h61v20H0z"/><path fill="#4c1" d="M61 0h27v20H61z"/><path fill="url(#b)" d="M0 0h88v20H0z"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11"><text x="30.5" y="15" fill="#010101" fill-opacity=".3">go report</text><text x="30.5" y="14">go report</text><text x="73.5" y="15" fill="#010101" fill-opacity=".3">A+</text><text x="73.5" y="14">A+</text></g></svg>` case GradeA: return `<svg xmlns="http://www.w3.org/2000/svg" width="78" height="20"><linearGradient id="b" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><mask id="a"><rect width="78" height="20" rx="3" fill="#fff"/></mask><g mask="url(#a)"><path fill="#555" d="M0 0h61v20H0z"/><path fill="#4c1" d="M61 0h17v20H61z"/><path fill="url(#b)" d="M0 0h78v20H0z"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11"><text x="30.5" y="15" fill="#010101" fill-opacity=".3">go report</text><text x="30.5" y="14">go report</text><text x="68.5" y="15" fill="#010101" fill-opacity=".3">A</text><text x="68.5" y="14">A</text></g></svg>` case GradeB: return `<svg xmlns="http://www.w3.org/2000/svg" width="78" height="20"><linearGradient id="b" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><mask id="a"><rect width="78" height="20" rx="3" fill="#fff"/></mask><g mask="url(#a)"><path fill="#555" d="M0 0h61v20H0z"/><path fill="#a4a61d" d="M61 0h17v20H61z"/><path fill="url(#b)" d="M0 0h78v20H0z"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11"><text x="30.5" y="15" fill="#010101" fill-opacity=".3">go report</text><text x="30.5" y="14">go report</text><text x="68.5" y="15" fill="#010101" fill-opacity=".3">B</text><text x="68.5" y="14">B</text></g></svg>` case GradeC: return `<svg xmlns="http://www.w3.org/2000/svg" width="78" height="20"><linearGradient id="b" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><mask id="a"><rect width="78" height="20" rx="3" fill="#fff"/></mask><g mask="url(#a)"><path fill="#555" d="M0 0h61v20H0z"/><path fill="#dfb317" d="M61 0h17v20H61z"/><path fill="url(#b)" d="M0 0h78v20H0z"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11"><text x="30.5" y="15" fill="#010101" fill-opacity=".3">go report</text><text x="30.5" y="14">go report</text><text x="68.5" y="15" fill="#010101" fill-opacity=".3">C</text><text x="68.5" y="14">C</text></g></svg>` case GradeD: return `<svg xmlns="http://www.w3.org/2000/svg" width="80" height="20"><linearGradient id="b" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><mask id="a"><rect width="80" height="20" rx="3" fill="#fff"/></mask><g mask="url(#a)"><path fill="#555" d="M0 0h61v20H0z"/><path fill="#fe7d37" d="M61 0h19v20H61z"/><path fill="url(#b)" d="M0 0h80v20H0z"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11"><text x="30.5" y="15" fill="#010101" fill-opacity=".3">go report</text><text x="30.5" y="14">go report</text><text x="69.5" y="15" fill="#010101" fill-opacity=".3">D</text><text x="69.5" y="14">D</text></g></svg>` case GradeE: return `<svg xmlns="http://www.w3.org/2000/svg" width="78" height="20"><linearGradient id="b" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><mask id="a"><rect width="78" height="20" rx="3" fill="#fff"/></mask><g mask="url(#a)"><path fill="#555" d="M0 0h61v20H0z"/><path fill="#e05d44" d="M61 0h17v20H61z"/><path fill="url(#b)" d="M0 0h78v20H0z"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11"><text x="30.5" y="15" fill="#010101" fill-opacity=".3">go report</text><text x="30.5" y="14">go report</text><text x="68.5" y="15" fill="#010101" fill-opacity=".3">E</text><text x="68.5" y="14">E</text></g></svg>` case GradeF: return `<svg xmlns="http://www.w3.org/2000/svg" width="78" height="20"><linearGradient id="b" x2="0" y2="100%"><stop offset="0" stop-color="#bbb" stop-opacity=".1"/><stop offset="1" stop-opacity=".1"/></linearGradient><mask id="a"><rect width="78" height="20" rx="3" fill="#fff"/></mask><g mask="url(#a)"><path fill="#555" d="M0 0h61v20H0z"/><path fill="#e05d44" d="M61 0h17v20H61z"/><path fill="url(#b)" d="M0 0h78v20H0z"/></g><g fill="#fff" text-anchor="middle" font-family="DejaVu Sans,Verdana,Geneva,sans-serif" font-size="11"><text x="30.5" y="15" fill="#010101" fill-opacity=".3">go report</text><text x="30.5" y="14">go report</text><text x="68.5" y="15" fill="#010101" fill-opacity=".3">F</text><text x="68.5" y="14">F</text></g></svg>` default: return "" } }
case percentage > 50: return GradeD case percentage > 40:
elias_fano_encoder.rs
// Copyright 2019 Zhizhesihai (Beijing) Technology Limited. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. use core::codec::postings::{EncodeType, ForUtil}; use core::store::io::{IndexInput, IndexOutput}; use core::util::bit_util::*; use error::ErrorKind::*; use error::Result; use std::ptr::{slice_from_raw_parts, slice_from_raw_parts_mut}; /// The default index interval for zero upper bits. pub const DEFAULT_INDEX_INTERVAL: i64 = 256; #[derive(Debug)] pub struct EliasFanoEncoder { pub num_values: i64, upper_bound: i64, pub num_low_bits: i32, pub lower_bits_mask: i64, pub upper_longs: Vec<i64>, pub lower_longs: Vec<i64>, pub num_encoded: i64, pub last_encoded: i64, pub num_index_entries: i64, pub index_interval: i64, // index entry bits num pub n_index_entry_bits: i32, /// upper_zero_bit_position_index[i] (filled using packValue) will contain the bit position /// just after the zero bit ((i+1) * index_interval) in the upper bits. pub upper_zero_bit_position_index: Vec<i64>, current_entry_index: i64, // also indicates how many entries in the index are valid. } impl EliasFanoEncoder { pub fn new(num_values: i64, upper_bound: i64, index_interval: i64) -> Result<Self> { if num_values < 0 { bail!(IllegalArgument(format!( "num_values should not be negative: {}", num_values ))); } if num_values > 0 && upper_bound < 0 { bail!(IllegalArgument(format!( "upper_bound should not be negative: {} when num_values > 0", upper_bound ))); } let upper_bound = if num_values > 0 { upper_bound } else { -1 }; // the number of lower bits let mut num_low_bits = 0; if num_values > 0 { let low_bits_fac = upper_bound / num_values; if low_bits_fac > 0 { // different from lucene version // num_low_bits = LONG_SIZE_32 - number_of_leading_zeros(low_bits_fac); // floor(2_log(upper_bound / num_values)), default // ceil(2_log(upper_bound / num_values - 1)) num_low_bits = LONG_SIZE_32 - 1 - low_bits_fac.leading_zeros() as i32; } } let lower_bits_mask = i64::max_value().unsigned_shift((LONG_SIZE_32 - 1 - num_low_bits) as usize);
num_longs_for_low_bits ))); } let lower_longs = vec![0; num_longs_for_low_bits as usize]; // high bits let mut num_high_bits_clear = if upper_bound > 0 { upper_bound } else { 0 }; num_high_bits_clear = num_high_bits_clear.unsigned_shift(num_low_bits as usize); assert!(num_high_bits_clear <= 2 * num_values); let num_high_bits_set = num_values; // Todo: 感觉这里少计算了 let num_longs_for_high_bits = Self::num_longs_for_bits(num_high_bits_clear + num_high_bits_set); if num_longs_for_high_bits > i32::max_value() as i64 { bail!(IllegalArgument(format!( "num_longs_for_high_bits too large to index a long array: {}", num_longs_for_high_bits ))); } let upper_longs = vec![0; num_longs_for_high_bits as usize]; if index_interval < 2 { bail!(IllegalArgument(format!( "index_interval should at least 2: {}", index_interval ))); } // high bits的分区索引 let max_high_value = upper_bound.unsigned_shift(num_low_bits as usize); let n_index_entries = max_high_value / index_interval; let num_index_entries = if n_index_entries >= 0 { n_index_entries } else { 0 }; // Todo max value & first index let max_index_entry = max_high_value + num_values - 1; let n_index_entry_bits = if max_index_entry <= 0 { 0 } else { LONG_SIZE_32 - max_index_entry.leading_zeros() as i32 }; let num_longs_for_index_bits = Self::num_longs_for_bits(num_index_entries * n_index_entry_bits as i64); if num_longs_for_index_bits > i32::max_value() as i64 { bail!(IllegalArgument(format!( "num_longs_for_index_bits too large to index a long array: {}", num_longs_for_index_bits ))); } Ok(Self { num_values, upper_bound, num_low_bits, lower_bits_mask, upper_longs, lower_longs, num_encoded: 0, last_encoded: 0, num_index_entries, index_interval, n_index_entry_bits, upper_zero_bit_position_index: vec![0; num_longs_for_index_bits as usize], current_entry_index: 0, }) } pub fn rebuild_not_with_check(&mut self, num_values: i64, upper_bound: i64) -> Result<()> { self.num_values = num_values; self.upper_bound = upper_bound; self.num_encoded = num_values; self.last_encoded = upper_bound; // low bits num & mask self.num_low_bits = if num_values > 0 { let low_bits_fac = upper_bound / num_values; if low_bits_fac > 0 { LONG_SIZE_32 - 1 - low_bits_fac.leading_zeros() as i32 } else { 0 } } else { 0 }; self.lower_bits_mask = i64::max_value().unsigned_shift((LONG_SIZE_32 - 1 - self.num_low_bits) as usize); // low bits self.lower_longs.resize( Self::num_longs_for_bits(num_values * self.num_low_bits as i64) as usize, 0, ); // high bits self.upper_longs.resize( Self::num_longs_for_bits( upper_bound.unsigned_shift(self.num_low_bits as usize) + num_values, ) as usize, 0, ); // index let max_high_value = upper_bound.unsigned_shift(self.num_low_bits as usize); let n_index_entries = max_high_value / self.index_interval; let num_index_entries = if n_index_entries >= 0 { n_index_entries } else { 0 }; let max_index_entry = max_high_value + num_values - 1; let n_index_entry_bits = if max_index_entry <= 0 { 0 } else { LONG_SIZE_32 - max_index_entry.leading_zeros() as i32 }; self.upper_zero_bit_position_index.resize( Self::num_longs_for_bits(num_index_entries * n_index_entry_bits as i64) as usize, 0, ); self.n_index_entry_bits = n_index_entry_bits; self.num_index_entries = num_index_entries; self.current_entry_index = 0; Ok(()) } pub fn get_encoder(num_values: i64, upper_bound: i64) -> Result<Self> { Self::new(num_values, upper_bound, DEFAULT_INDEX_INTERVAL) } #[inline] pub fn encode_size(&self) -> i32 { ((self.upper_longs.len() + self.lower_longs.len() + self.upper_zero_bit_position_index.len()) << 3) as i32 } pub fn encode_next(&mut self, x: i64) -> Result<()> { if self.num_encoded >= self.num_values { bail!(IllegalState(format!( "encode_next called more than {} times.", self.num_values ))); } if self.last_encoded > x { bail!(IllegalArgument(format!( "{} smaller than previous {}", x, self.last_encoded ))); } if x > self.upper_bound { bail!(IllegalArgument(format!( "{} larger than upperBound {}", x, self.upper_bound ))); } let high_value = x.unsigned_shift(self.num_low_bits as usize); self.encode_upper_bits(high_value); self.encode_lower_bits(x & self.lower_bits_mask); self.last_encoded = x; let mut index_value = (self.current_entry_index + 1) * self.index_interval; while index_value <= high_value { let after_zero_bit_position = index_value + self.num_encoded; Self::pack_value( after_zero_bit_position, &mut self.upper_zero_bit_position_index, self.n_index_entry_bits, self.current_entry_index, ); self.current_entry_index += 1; index_value += self.index_interval; } self.num_encoded += 1; Ok(()) } pub fn serialize(&mut self, out: &mut impl IndexOutput) -> Result<()> { out.write_byte(ForUtil::encode_type_to_code(EncodeType::EF))?; out.write_vlong(self.upper_bound)?; Self::write_data(&self.upper_longs, out)?; Self::write_data(&self.lower_longs, out)?; Self::write_data(&self.upper_zero_bit_position_index, out)?; Ok(()) } pub fn deserialize(&mut self, encoded_data: &[u8]) -> Result<()> { self.num_encoded = self.num_values; self.last_encoded = self.upper_bound; let mut index = 0; Self::read_data(&mut self.upper_longs, encoded_data, &mut index); Self::read_data(&mut self.lower_longs, encoded_data, &mut index); Self::read_data( &mut self.upper_zero_bit_position_index, encoded_data, &mut index, ); Ok(()) } pub fn deserialize2(&mut self, input: &mut dyn IndexInput) -> Result<()> { self.num_encoded = self.num_values; self.last_encoded = self.upper_bound; Self::read_data2(&mut self.upper_longs, input)?; Self::read_data2(&mut self.lower_longs, input)?; Self::read_data2(&mut self.upper_zero_bit_position_index, input)?; Ok(()) } #[inline] pub fn sufficiently_smaller_than_bit_set(num_values: i64, upper_bound: i64) -> bool { return (upper_bound > (4 * LONG_SIZE)) && (upper_bound / 7) > num_values; } // pub get_decoder(&self) -> Result<EliasFanoDecoder> { // EliasFanoDecoder::new(self) // } pub fn get_lower_bits(&self) -> &Vec<i64> { &self.lower_longs } pub fn get_upper_bits(&self) -> &Vec<i64> { &self.upper_longs } pub fn get_index_bits(&self) -> &Vec<i64> { &self.upper_zero_bit_position_index } #[inline] fn num_longs_for_bits(n: i64) -> i64 { assert!(n >= 0); (n + LONG_SIZE - 1).unsigned_shift(LOG2_LONG_SIZE as usize) } #[inline] fn encode_upper_bits(&mut self, high_value: i64) { let next_high_bit_num = self.num_encoded + high_value; self.upper_longs[next_high_bit_num.unsigned_shift(LOG2_LONG_SIZE as usize) as usize] |= 1_i64 << (next_high_bit_num & LONG_SIZE - 1) } #[inline] fn encode_lower_bits(&mut self, low_value: i64) { Self::pack_value( low_value, &mut self.lower_longs, self.num_low_bits, self.num_encoded, ); } /// 用Vec<i64>存储固定长度的bits array /// value: 待存储值,取从右到左的num_bits个位 /// long_array: 用于存储的Vec<i64> /// num_bits: 固定bits的个数 /// pack_index: 已经存储的值的个数 #[inline] fn pack_value(value: i64, long_array: &mut Vec<i64>, num_bits: i32, pack_index: i64) { if num_bits != 0 { let bit_pos = num_bits as i64 * pack_index; let index = bit_pos.unsigned_shift(LOG2_LONG_SIZE as usize) as usize; let bit_pos_at_index = (bit_pos & LONG_SIZE - 1) as i32; long_array[index] |= value << bit_pos_at_index as i64; if (bit_pos_at_index + num_bits) > LONG_SIZE_32 { long_array[index + 1] = value.unsigned_shift((LONG_SIZE_32 - bit_pos_at_index) as usize); } } } pub fn write_data(data: &Vec<i64>, out: &mut impl IndexOutput) -> Result<()> { if !data.is_empty() { let ptr = data.as_ptr() as *const u8; let length = data.len() << 3; let data = unsafe { &*slice_from_raw_parts(ptr, length) }; out.write_bytes(data, 0, length)?; } Ok(()) } pub fn read_data(data: &mut Vec<i64>, encoded_data: &[u8], index: &mut usize) { let length = data.len(); if length > 0 { data.clear(); let ptr = encoded_data[*index..].as_ptr() as *mut i64; let v = unsafe { Vec::from_raw_parts(ptr, length, length) }; let _ = v.iter().map(|&x| data.push(x)).collect::<()>(); v.into_raw_parts(); *index += length << 3; } } pub fn read_data2(buf: &mut Vec<i64>, input: &mut dyn IndexInput) -> Result<()> { if buf.len() > 0 { let ptr = buf.as_mut_ptr() as *mut u8; let new_buf = unsafe { &mut *slice_from_raw_parts_mut(ptr, buf.len() << 3) }; input.read_exact(new_buf)?; } Ok(()) } } // impl Drop for EliasFanoEncoder { // fn drop(&mut self) { // if self.upper_longs.len() > 0 { // mem::take(&mut self.upper_longs).into_raw_parts(); // } // if self.lower_longs.len() > 0 { // mem::take(&mut self.lower_longs).into_raw_parts(); // } // if self.upper_zero_bit_position_index.len() > 0 { // mem::take(&mut self.upper_zero_bit_position_index).into_raw_parts(); // } // } // } #[cfg(test)] mod tests { use core::util::packed::EliasFanoEncoder; #[test] fn num_longs_for_bits() { assert_eq!(EliasFanoEncoder::num_longs_for_bits(5), 1); assert_eq!(EliasFanoEncoder::num_longs_for_bits(31), 1); assert_eq!(EliasFanoEncoder::num_longs_for_bits(32), 1); assert_eq!(EliasFanoEncoder::num_longs_for_bits(33), 1); assert_eq!(EliasFanoEncoder::num_longs_for_bits(65), 2); assert_eq!(EliasFanoEncoder::num_longs_for_bits(128), 2); assert_eq!(EliasFanoEncoder::num_longs_for_bits(129), 3); } #[test] fn get_encoder() { let efe = EliasFanoEncoder::get_encoder(128, 510901); println!("efe: {:#?}", efe); } #[test] fn pack_value() { let mut lv = vec![0_i64; 2]; EliasFanoEncoder::pack_value(2, &mut lv, 2, 31); println!("wjj: {}", lv[0]); assert_eq!(lv[0], 0x8000000000000000_u64 as i64); lv[0] = 0; EliasFanoEncoder::pack_value(0b11111_i64, &mut lv, 5, 12); println! {"wjj: {:?}", lv}; assert_eq!(lv[0], 0xF000000000000000_u64 as i64); assert_eq!(lv[1], 1_i64); } #[test] fn encode_upper() { let mut ef = EliasFanoEncoder::new(7, 24, 256).unwrap(); println!("encoder: {:?}", ef); ef.encode_upper_bits(0); ef.num_encoded += 1; assert_eq!(ef.upper_longs[0], 1_i64); println!("encoder: {:?}, num: {}", ef.upper_longs[0], ef.num_encoded); ef.encode_upper_bits(0); ef.num_encoded += 1; assert_eq!(ef.upper_longs[0], 3_i64); println!("encoder: {:?}, num: {}", ef.upper_longs[0], ef.num_encoded); ef.encode_upper_bits(1); ef.num_encoded += 1; assert_eq!(ef.upper_longs[0], 11_i64); ef.encode_upper_bits(1); ef.num_encoded += 1; assert_eq!(ef.upper_longs[0], 27_i64); ef.encode_upper_bits(2); ef.num_encoded += 1; assert_eq!(ef.upper_longs[0], 91_i64); } }
let num_longs_for_low_bits = Self::num_longs_for_bits(num_values * num_low_bits as i64); if num_longs_for_low_bits > i32::max_value().into() { bail!(IllegalArgument(format!( "num_longs_for_low_bits too large to index a long array: {}",
0279-perfect-squares.go
// Given an integer n, return the least number of perfect square numbers that sum to n. // A perfect square is an integer that is the square of an integer; in other words, it is the product of some integer with itself. For example, 1, 4, 9, and 16 are perfect squares while 3 and 11 are not. // Example 1: // Input: n = 12 // Output: 3 // Explanation: 12 = 4 + 4 + 4. // Example 2: // Input: n = 13 // Output: 2 // Explanation: 13 = 4 + 9. // Constraints: // 1 <= n <= 104 func numSquares(n int) int
{ if n <= 2 { return n } lst := make([]int, n, n) for i := 1; i*i <= n; i++ { lst = append(lst, i*i) } cnt := 0 toCheck := map[int]bool{n: true} for { cnt++ temp := map[int]bool{} for x, _ := range toCheck { for _, y := range lst { if x == y { return cnt } if x < y { break } temp[x-y] = true } } toCheck = temp } return cnt }
tcp.go
package protocol import ( "encoding/binary" "errors" ) type TCP struct { PortSrc uint16 PortDst uint16 SeqNum uint32 AckNum uint32
WinSize uint16 Checksum uint16 UrgFlag uint16 Data []byte } func NewTCP() *TCP { u := new(TCP) u.Data = make([]byte, 0) return u } func (t *TCP) Len() (n uint16) { if t.Data != nil { return uint16(20 + len(t.Data)) } return uint16(20) } func (t *TCP) MarshalBinary() (data []byte, err error) { data = make([]byte, int(t.Len())) binary.BigEndian.PutUint16(data[:2], t.PortSrc) binary.BigEndian.PutUint16(data[2:4], t.PortDst) binary.BigEndian.PutUint32(data[4:8], t.SeqNum) binary.BigEndian.PutUint32(data[8:12], t.AckNum) data[12] = (t.HdrLen << 4) & 0xf0 data[13] = t.Code & 0x3f binary.BigEndian.PutUint16(data[14:16], t.WinSize) binary.BigEndian.PutUint16(data[16:18], t.Checksum) binary.BigEndian.PutUint16(data[18:20], t.UrgFlag) copy(data[20:], t.Data) return } func (t *TCP) UnmarshalBinary(data []byte) error { if len(data) < 20 { return errors.New("The []byte is too short to unmarshal a full ARP message.") } t.PortSrc = binary.BigEndian.Uint16(data[:2]) t.PortDst = binary.BigEndian.Uint16(data[2:4]) t.SeqNum = binary.BigEndian.Uint32(data[4:8]) t.AckNum = binary.BigEndian.Uint32(data[8:12]) t.HdrLen = (data[12] >> 4) & 0xf t.Code = data[13] & 0x3f t.WinSize = binary.BigEndian.Uint16(data[14:16]) t.Checksum = binary.BigEndian.Uint16(data[16:18]) t.UrgFlag = binary.BigEndian.Uint16(data[18:20]) if len(data) > 20 { t.Data = make([]byte, (len(data) - 20)) copy(t.Data, data[20:]) } return nil }
HdrLen uint8 Code uint8
primary_info.py
import pandas as pd from zipfile import ZipFile import numpy as np import re import os def year_identifier(file_name): ''' Abstrait: identify the year of the file ''' folder_regex = re.compile(r'20\d\d') match = folder_regex.search(str(file_name)) year = match.group() return year def debt_correction(dataframe): debt_ident_list = ['Empréstimos e Financiamentos'] lpa_ident_list = ['ON'] count_debt = 1 count_lpa = 1 for row in range(len(dataframe)): for col in range(len(dataframe.columns)): if dataframe.iloc[row,col] in debt_ident_list: prev_name = dataframe.iloc[row,col] dataframe.iat[row, col] = f'{prev_name} {count_debt}' count_debt += 1 if dataframe.iloc[row,col] in lpa_ident_list: prev_name = dataframe.iloc[row,col] dataframe.iat[row, col] = f'{prev_name} {count_lpa}' count_lpa += 1 return dataframe def dataframe_filtering(folder, file_name_list, company_list, prev=False): ''' Input: folder name, list with important files in the folder and list with companies of interest Output: ''' dataframe_general = [] for company in company_list: dataframe_company = [] dataframe_list = [] for file in file_name_list: # Create BPA DataFrame file_raw = pd.read_csv(f'raw_dfp\\{folder}\\{file}', encoding='iso-8859-1', delimiter=';', skiprows=0, low_memory=False) # Filter year and last year results if prev is False: file_1 = file_raw[~file_raw['ORDEM_EXERC'].str.startswith('P')] folder_year = year_identifier(file_name_list) else: file_1 = file_raw[file_raw['ORDEM_EXERC'].str.startswith('P')] folder_year = int(year_identifier(file_name_list)) - 1 # Filter the right columns file_2 = file_1[['DENOM_CIA', 'CD_CONTA','DS_CONTA', 'VL_CONTA']] # Filter the right companies file_3 = file_2[file_2['DENOM_CIA'].isin([company])] # Filter the right data if file.find('DRE') != -1: interest_data = ['Receita de Venda de Bens e/ou Serviços', 'Resultado Bruto', 'Despesas com Vendas', 'Despesas com Pesquisa e Desenvolvimento', 'Custos com Pesquisa e Desenvolvimento', 'Despesas com pesquisas e desenvolvimento', 'Pesquisa e Desenvolvimento', 'Pesquisa', 'Despesas com Pesquisas e Desenvolvimento', 'Custo com Pesquisa e Desenvolvimento Tecnológico', 'Despesas com gastos com desenvolvimento', 'Despesas com desenvolvimento de tecnologia e produtos', 'Com estudos em desenvolvimento', 'Despesas Gerais e Administrativas', 'Despesas de Depreciação', 'Despesas/Receitas Operacionais', 'Resultado Antes do Resultado Financeiro e dos Tributos', 'Resultado Financeiro', 'Resultado Antes dos Tributos sobre o Lucro', 'Resultado Líquido das Operações Continuadas', 'Lucro Básico por Ação', 'ON'] elif file.find('BPA') != -1: interest_data = ['Ativo Total', 'Ativo Circulante', 'Imobilizado'] elif file.find('BPP') != -1: interest_data = ['Passivo Circulante', 'Empréstimos e Financiamentos', 'Passivo Não Circulante', 'Patrimônio Líquido Consolidado', 'Reservas de Lucros', 'Lucros/Prejuízos Acumulados'] elif file.find('DFC_MI') != -1: interest_data = ['Lucro Líquido do exercício', 'Depreciação, Amortização e Impairment', 'Depreciação e amortização', 'Depreciação de arrendamento', 'Depreciação e Amortização', 'Depreciações e Amortizações', 'Amortização e Depreciação', 'Depreciação/amortização', 'Depreciações', 'Depreciação e Amortizações', 'Depreciação do imobilizado', 'Depreciação e depleção do imobilizado', 'Depreciação, exaustão e amortização', 'Depreciação, Amortização e Exaustão', 'Depreciação, Exaustão e Amortização', 'Aquisição de Imobilizado e Intangíveis', 'Adições de imobilizado', 'Compras de ativo imobilizado', 'Aquisições de imobilizado', 'Aquisições de Imobilizado', 'Aquisições de Imobilizado e Intangível', 'Aquisições de imobilizado e intangível', 'Aquisições de Imobilizados e Intangíveis (Exceto pelo Excedente de Cessão Onerosa)', 'Aquisições de imobilizados e intangíveis', 'Aquisições de imobilizado veículos frota', 'Aquisições de imobilizado de uso', 'Aquisições de Imobilizado de Uso', 'Aquisição de ativos imobilizados, intangível e propriedade para investimento', 'Aquisição de imobilizado e intangível'] file_4 = file_3[file_3['DS_CONTA'].isin(interest_data)] dataframe_list.append(file_4) # Concatenate each file dataframe into one and add year column dataframe_company = pd.concat(dataframe_list) dataframe_company = dataframe_company.rename(columns={"VL_CONTA": f"{folder_year}"}) # Append to general list dataframe_general.append(dataframe_company) return dataframe_general def primary_info(companies, clear_prev_folder=False): company_frames = [] for company in companies: company_frames.append(pd.DataFrame()) # Identify zip year for file in os.listdir('raw_dfp\\raw_zip'): zip_year = year_identifier(f'raw_dfp\\raw_zip\\{file}') # Create or clear the folder of the year output_folder = zip_year directory_elements = os.listdir('raw_dfp') if output_folder not in directory_elements: os.mkdir(f'raw_dfp\\{output_folder}') elif os.listdir(f'raw_dfp\\{output_folder}') != [] and clear_prev_folder is True: output_folder_elements = os.listdir(f'raw_dfp\\{output_folder}') for element in output_folder_elements: os.remove(f'raw_dfp\\{output_folder}\\{element}') # Extract files from zip if os.listdir(f'raw_dfp\\{output_folder}') == []: with ZipFile(f'raw_dfp\\raw_zip\\{file}', 'r') as zip: zip.extractall(path=f'raw_dfp\\{output_folder}') else: print(f"A pasta \"raw_dfp/{zip_year}\" ja tem arquivos internos. Confira a necessidade de descompactar o .zip.") print('Prosseguindo ...') # List folders in 'raw_dfp' and remove 'raw_zip' raw_folders = os.listdir('raw_dfp') raw_folders.remove('raw_zip') # Travel around raw_dfp folders excluding "raw_zip" for folder in raw_folders: # Remove all individual reports, aiming only consolidated reports file_list = os.listdir(f'raw_dfp\\{folder}') for file in file_list: file_regex = re.compile(r'ind_20\d\d') mo = file_regex.search(str(file)) if mo is not None: os.remove(f'raw_dfp\\{folder}\\{file}') # Travel around folder files for file in file_list: # Save DRE file name in a variable dre_regex = re.compile(r'DRE_con_20\d\d') mo_dre = dre_regex.search(str(file)) if mo_dre is not None: dre = file # Save BPA file name in a variable bpa_regex = re.compile(r'BPA_con_20\d\d') mo_bpa = bpa_regex.search(str(file)) if mo_bpa is not None: bpa = file # Save BPP file name in a variable bpp_regex = re.compile(r'BPP_con_20\d\d') mo_bpp = bpp_regex.search(str(file)) if mo_bpp is not None: bpp = file # Save DFC_MI file name in a variable dfc_regex = re.compile(r'DFC_MI_con_20\d\d') mo_dfc = dfc_regex.search(str(file)) if mo_dfc is not None: dfc = file folder_list = dataframe_filtering(folder, [dre, bpa, bpp, dfc], companies) # Create datframe for 2016 based on 2017 folder if int(folder) == 2017: folder_list_2 = dataframe_filtering(folder, [dre, bpa, bpp, dfc], companies, prev=True) for company_index in range(len(companies)): if len(folder_list_2[company_index]) == 0: # Do not add empty dataframe pass else: company_frames[company_index] = debt_correction(folder_list_2[company_index]) # Construct and append a final dataframe for each company with all years information for company_index in range(len(companies)): if len(folder_list[company_index]) == 0: pass elif len(company_frames[company_index]) == 0: company_frames[company_index] = debt_correction(folder_list[company_index]) else: main = company_frames[company_index] serie_corrected = debt_correction(folder_list[company_index][['DS_CONTA', str(folder)]]) serie = serie_corrected.set_index('DS_CONTA') #serie_no_dups = serie company_frames[company_index] = pd.merge(main, serie, on=['DS_CONTA']) return company_frames def worked_info(companies=['AMBEV S.A.'], clear_prev_folder=False): # Create return variable return_dict_list = [] # Extract primary information prim_info = primary_info(companies, clear_prev_folder=False) print('-+-' * 20) print('CARREGANDO DATAFFRAME ...') # Travel throught companies for comp_index in range(len(companies)): # Extract list of years collected year_columns = [] for column in prim_info[comp_index].columns: if '20' in column: year_columns.append(column) # Extract company frame primary_frame = prim_info[comp_index] #pd.set_option('display.expand_frame_repr', False) #print(primary_frame) #primary_frame.to_csv('primary_csv.csv',sep=' ') # Duplicate checker imobilizado_duplicate = 0 desp_ga_duplicate = 0 lucro_acumul_duplicate = 0 dai_duplicate = 0 ped_duplicate = 0 vendas_duplicate = 0 divida_curto_duplicate = 0 divida_longo_duplicate = 0 receita_duplicate = 0 # Initialize primary variables lists receita_list = [] lucro_brut_list = [] desp_vendas_list = [] desp_ga_list = [] dai_list = [] desp_oper_list = [] financeiro_list = [] lucropreimp_list = [] lucro_liq_list = [] lucro_oper_list = [] lucroporacao_list = [] ativo_total_list = [] ativo_circ_list = [] imobilizado_list = [] passivo_circ_list = [] divida_curto_list = [] divida_longo_list = [] passivo_ncirc_list = [] patr_liq_list = [] lucro_acumul_list = [] lucro_liq_exerc_list = [] desp_ativo_fixo_list = [] # Initialize intermediate variables desp_vga_list = [] desp_ped_list = [] # Travel trought cells for row in range(len(primary_frame)): col = 'DS_CONTA' # Fill primary variable lists (DRE) if primary_frame.iloc[row][col] == 'Receita de Venda de Bens e/ou Serviços': if receita_duplicate == 0: receita_duplicate += 1 for year in year_columns: receita_list.append(primary_frame.iloc[row][year]) else: pass elif primary_frame.iloc[row][col] == 'Resultado Bruto': for year in year_columns: lucro_brut_list.append(primary_frame.iloc[row][year]) elif primary_frame.iloc[row][col] == 'Despesas com Vendas': if vendas_duplicate == 0: vendas_duplicate += 1 for year in year_columns: desp_vendas_list.append(primary_frame.iloc[row][year]) else: pass elif primary_frame.iloc[row][col] == 'Despesas Gerais e Administrativas': if desp_ga_duplicate == 0: desp_ga_duplicate += 1 for year in year_columns: desp_ga_list.append(primary_frame.iloc[row][year]) else: pass elif primary_frame.iloc[row][col] in ['Despesas de Depreciação', 'Depreciação, Amortização e Impairment', 'Depreciação e amortização', 'Depreciação de arrendamento', 'Depreciação e Amortização', 'Depreciações e Amortizações', 'Amortização e Depreciação', 'Depreciação/amortização', 'Depreciações', 'Depreciação e Amortizações', 'Depreciação do imobilizado', 'Depreciação e depleção do imobilizado', 'Depreciação, exaustão e amortização', 'Depreciação, Amortização e Exaustão', 'Depreciação, Exaustão e Amortização']: if dai_duplicate == 0: dai_duplicate += 1 for year in year_columns: dai_list.append(primary_frame.iloc[row][year]) else: pass elif primary_frame.iloc[row][col] in ['Despesas com Pesquisa e Desenvolvimento',
ped_duplicate += 1 for year in year_columns: desp_ped_list.append(primary_frame.iloc[row][year]) else: pass elif primary_frame.iloc[row][col] == 'Despesas/Receitas Operacionais': for year in year_columns: desp_oper_list.append(primary_frame.iloc[row][year]) elif primary_frame.iloc[row][col] == 'Resultado Antes do Resultado Financeiro e dos Tributos': for year in year_columns: lucro_oper_list.append(primary_frame.iloc[row][year]) elif primary_frame.iloc[row][col] == 'Resultado Financeiro': for year in year_columns: financeiro_list.append(primary_frame.iloc[row][year]) elif primary_frame.iloc[row][col] == 'Resultado Antes dos Tributos sobre o Lucro': for year in year_columns: lucropreimp_list.append(primary_frame.iloc[row][year]) elif primary_frame.iloc[row][col] == 'Resultado Líquido das Operações Continuadas': for year in year_columns: lucro_liq_list.append(primary_frame.iloc[row][year]) elif primary_frame.iloc[row][col] == 'ON 1': for year in year_columns: lucroporacao_list.append(primary_frame.iloc[row][year]) # Fill primary variable lists (BPA and BPP) if primary_frame.iloc[row][col] == 'Ativo Total': for year in year_columns: ativo_total_list.append(primary_frame.iloc[row][year]) elif primary_frame.iloc[row][col] == 'Ativo Circulante': for year in year_columns: ativo_circ_list.append(primary_frame.iloc[row][year]) elif primary_frame.iloc[row][col] == 'Imobilizado': if imobilizado_duplicate == 0: imobilizado_duplicate += 1 for year in year_columns: imobilizado_list.append(primary_frame.iloc[row][year]) else: pass elif primary_frame.iloc[row][col] == 'Passivo Circulante': for year in year_columns: passivo_circ_list.append(primary_frame.iloc[row][year]) elif primary_frame.iloc[row][col] == 'Empréstimos e Financiamentos 1': if divida_curto_duplicate == 0: divida_curto_duplicate += 1 for year in year_columns: divida_curto_list.append(primary_frame.iloc[row][year]) else: pass elif primary_frame.iloc[row][col] == 'Empréstimos e Financiamentos 3': if divida_longo_duplicate == 0: divida_longo_duplicate += 1 for year in year_columns: divida_longo_list.append(primary_frame.iloc[row][year]) else: pass elif primary_frame.iloc[row][col] == 'Passivo Não Circulante': for year in year_columns: passivo_ncirc_list.append(primary_frame.iloc[row][year]) elif primary_frame.iloc[row][col] == 'Patrimônio Líquido Consolidado': for year in year_columns: patr_liq_list.append(primary_frame.iloc[row][year]) elif primary_frame.iloc[row][col] == 'Reservas de Lucros' or primary_frame.iloc[row][col] == 'Lucros/Prejuízos Acumulados': if lucro_acumul_duplicate == 0: lucro_acumul_duplicate += 1 for year in year_columns: lucro_acumul_list.append(primary_frame.iloc[row][year]) else: pass # Fill primary variable lists (DFC) elif primary_frame.iloc[row][col] == 'Lucro Líquido do exercício': for year in year_columns: lucro_liq_exerc_list.append(primary_frame.iloc[row][year]) elif primary_frame.iloc[row][col] in ['Aquisição de Imobilizado e Intangíveis', 'Adições de imobilizado', 'Compras de ativo imobilizado', 'Aquisições de imobilizado', 'Aquisições de Imobilizado', 'Aquisições de Imobilizado e Intangível', 'Aquisições de imobilizado e intangível', 'Aquisições de Imobilizados e Intangíveis (Exceto pelo Excedente de Cessão Onerosa)', 'Aquisições de imobilizados e intangíveis', 'Aquisições de imobilizado veículos frota', 'Aquisições de imobilizado de uso', 'Aquisições de Imobilizado de Uso', 'Aquisição de ativos imobilizados, intangível e propriedade para investimento', 'Aquisição de imobilizado e intangível']: for year in year_columns: desp_ativo_fixo_list.append(primary_frame.iloc[row][year]) # Build intermediate Variables desp_vga_list = np.array(desp_vendas_list) + np.array(desp_ga_list) divida_tot_list = np.array(divida_curto_list) + np.array(divida_longo_list) if lucro_brut_list == []: lucro_brut_list = np.zeros(len(year_columns)) if desp_ped_list == []: desp_ped_list = np.zeros(len(year_columns)) if dai_list == []: dai_list = np.zeros(len(year_columns)) if desp_ativo_fixo_list == []: desp_ativo_fixo_list = np.zeros(len(year_columns)) if lucro_liq_exerc_list == []: lucro_liq_exerc_list = lucro_liq_list # Build worked info marg_brut_list = 100 * np.divide(np.array(lucro_brut_list), np.array(receita_list)) marg_liq_list = 100 * np.divide(np.array(lucro_liq_list), np.array(receita_list)) vga_lucro_brut_list = 100 * np.divide(np.array(desp_vga_list), np.array(lucro_brut_list)) ped_lucro_brut_list = 100 * np.divide(np.array(desp_ped_list), np.array(lucro_brut_list)) deprec_lucro_brut_list = 100 * np.divide(np.array(dai_list), np.array(lucro_brut_list)) juros_lucro_oper_list = 100 * np.divide(np.array(financeiro_list), np.array(lucro_oper_list)) coef_liquidez_list = np.divide(np.array(ativo_circ_list), np.array(passivo_circ_list)) passivo_tot_patrliq_list = np.divide((np.array(passivo_circ_list) + np.array(passivo_ncirc_list)), np.array(patr_liq_list)) roe_list = 100 * np.divide(np.array(lucro_liq_list), np.array(patr_liq_list)) roa_list = 100 * np.divide(np.array(lucro_liq_list), np.array(ativo_total_list)) desp_ativo_fixo_lucro_liq_exerc_list = 100 * np.divide(np.array(desp_ativo_fixo_list), np.array(lucro_liq_exerc_list)) divida_curto_tot_list = 100 * np.divide(np.array(divida_curto_list), np.array(divida_tot_list)) divida_tot_lucro_oper_list = np.divide(np.array(divida_tot_list), np.array(lucro_oper_list)) company_dict = { 'year_columns': year_columns, 'marg_brut_list': marg_brut_list, 'marg_liq_list': marg_liq_list, 'vga_lucro_brut_list': vga_lucro_brut_list, 'ped_lucro_brut_list': ped_lucro_brut_list, 'deprec_lucro_brut_list': deprec_lucro_brut_list, 'juros_lucro_oper_list': juros_lucro_oper_list, 'lucro_brut_list': lucro_brut_list, 'lucro_liq_list': lucro_liq_list, 'lucroporacao_list':lucroporacao_list, 'coef_liquidez_list': coef_liquidez_list, 'imobilizado_list': imobilizado_list, 'passivo_tot_patrliq_list': passivo_tot_patrliq_list, 'roe_list': roe_list, 'roa_list': roa_list, 'lucro_acumul_list': lucro_acumul_list, 'desp_ativo_fixo_lucro_liq_exerc_list': desp_ativo_fixo_lucro_liq_exerc_list, 'divida_curto_tot_list': divida_curto_tot_list, 'divida_tot_lucro_oper_list': divida_tot_lucro_oper_list } return_dict_list.append(company_dict) return return_dict_list
'Custos com Pesquisa e Desenvolvimento', 'Despesas com pesquisas e desenvolvimento', 'Pesquisa e Desenvolvimento', 'Pesquisa', 'Despesas com Pesquisas e Desenvolvimento', 'Custo com Pesquisa e Desenvolvimento Tecnológico', 'Despesas com gastos com desenvolvimento', 'Despesas com desenvolvimento de tecnologia e produtos', 'Com estudos em desenvolvimento']: if ped_duplicate == 0:
main.rs
// DO NOT EDIT ! // This file was generated automatically from 'src/mako/cli/main.rs.mako' // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] #[macro_use] extern crate clap; extern crate yup_oauth2 as oauth2; extern crate yup_hyper_mock as mock; extern crate hyper_rustls; extern crate serde; extern crate serde_json; extern crate hyper; extern crate mime; extern crate strsim; extern crate google_cloudresourcemanager2 as api; use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; mod cmn; use cmn::{InvalidOptionsError, CLIError, JsonTokenStorage, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, calltype_from_str, remove_json_null_values, ComplexType, JsonType, JsonTypeInfo}; use std::default::Default; use std::str::FromStr; use oauth2::{Authenticator, DefaultAuthenticatorDelegate, FlowType}; use serde_json as json; use clap::ArgMatches; enum DoitError { IoError(String, io::Error), ApiError(api::Error), } struct Engine<'n> { opt: ArgMatches<'n>, hub: api::CloudResourceManager<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, JsonTokenStorage, hyper::Client>>, gp: Vec<&'static str>, gpm: Vec<(&'static str, &'static str)>, } impl<'n> Engine<'n> { fn _folders_create(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "lifecycle-state" => Some(("lifecycleState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "parent" => Some(("parent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "display-name", "lifecycle-state", "name", "parent"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::Folder = json::value::from_value(object).unwrap(); let mut call = self.hub.folders().create(request); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "parent" => { call = call.parent(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["parent"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _folders_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.folders().delete(opt.value_of("name").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work");
ostream.flush().unwrap(); Ok(()) } } } } fn _folders_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.folders().get(opt.value_of("name").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _folders_get_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { _ => { let suggestion = FieldCursor::did_you_mean(key, &vec![]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::GetIamPolicyRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.folders().get_iam_policy(request, opt.value_of("resource").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _folders_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.folders().list(); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "show-deleted" => { call = call.show_deleted(arg_from_str(value.unwrap_or("false"), err, "show-deleted", "boolean")); }, "parent" => { call = call.parent(value.unwrap_or("")); }, "page-token" => { call = call.page_token(value.unwrap_or("")); }, "page-size" => { call = call.page_size(arg_from_str(value.unwrap_or("-0"), err, "page-size", "integer")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["show-deleted", "page-size", "parent", "page-token"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _folders_move(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "destination-parent" => Some(("destinationParent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["destination-parent"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::MoveFolderRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.folders().move_(request, opt.value_of("name").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _folders_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "lifecycle-state" => Some(("lifecycleState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "display-name" => Some(("displayName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "parent" => Some(("parent", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "create-time" => Some(("createTime", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["create-time", "display-name", "lifecycle-state", "name", "parent"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::Folder = json::value::from_value(object).unwrap(); let mut call = self.hub.folders().patch(request, opt.value_of("name").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "update-mask" => { call = call.update_mask(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["update-mask"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _folders_search(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "page-token" => Some(("pageToken", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "page-size" => Some(("pageSize", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "query" => Some(("query", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["page-size", "page-token", "query"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::SearchFoldersRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.folders().search(request); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _folders_set_iam_policy(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "policy.etag" => Some(("policy.etag", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "policy.version" => Some(("policy.version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "update-mask" => Some(("updateMask", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["etag", "policy", "update-mask", "version"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::SetIamPolicyRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.folders().set_iam_policy(request, opt.value_of("resource").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _folders_test_iam_permissions(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "permissions" => Some(("permissions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["permissions"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::TestIamPermissionsRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.folders().test_iam_permissions(request, opt.value_of("resource").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _folders_undelete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { _ => { let suggestion = FieldCursor::did_you_mean(key, &vec![]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::UndeleteFolderRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.folders().undelete(request, opt.value_of("name").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _operations_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.operations().get(opt.value_of("name").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _doit(&self, dry_run: bool) -> Result<Result<(), DoitError>, Option<InvalidOptionsError>> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); let mut err_opt: Option<InvalidOptionsError> = None; match self.opt.subcommand() { ("folders", Some(opt)) => { match opt.subcommand() { ("create", Some(opt)) => { call_result = self._folders_create(opt, dry_run, &mut err); }, ("delete", Some(opt)) => { call_result = self._folders_delete(opt, dry_run, &mut err); }, ("get", Some(opt)) => { call_result = self._folders_get(opt, dry_run, &mut err); }, ("get-iam-policy", Some(opt)) => { call_result = self._folders_get_iam_policy(opt, dry_run, &mut err); }, ("list", Some(opt)) => { call_result = self._folders_list(opt, dry_run, &mut err); }, ("move", Some(opt)) => { call_result = self._folders_move(opt, dry_run, &mut err); }, ("patch", Some(opt)) => { call_result = self._folders_patch(opt, dry_run, &mut err); }, ("search", Some(opt)) => { call_result = self._folders_search(opt, dry_run, &mut err); }, ("set-iam-policy", Some(opt)) => { call_result = self._folders_set_iam_policy(opt, dry_run, &mut err); }, ("test-iam-permissions", Some(opt)) => { call_result = self._folders_test_iam_permissions(opt, dry_run, &mut err); }, ("undelete", Some(opt)) => { call_result = self._folders_undelete(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("folders".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("operations", Some(opt)) => { match opt.subcommand() { ("get", Some(opt)) => { call_result = self._operations_get(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("operations".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, _ => { err.issues.push(CLIError::MissingCommandError); writeln!(io::stderr(), "{}\n", self.opt.usage()).ok(); } } if dry_run { if err.issues.len() > 0 { err_opt = Some(err); } Err(err_opt) } else { Ok(call_result) } } // Please note that this call will fail if any part of the opt can't be handled fn new(opt: ArgMatches<'n>) -> Result<Engine<'n>, InvalidOptionsError> { let (config_dir, secret) = { let config_dir = match cmn::assure_config_dir_exists(opt.value_of("folder").unwrap_or("~/.google-service-cli")) { Err(e) => return Err(InvalidOptionsError::single(e, 3)), Ok(p) => p, }; match cmn::application_secret_from_directory(&config_dir, "cloudresourcemanager2-secret.json", "{\"installed\":{\"auth_uri\":\"https://accounts.google.com/o/oauth2/auth\",\"client_secret\":\"hCsslbCUyfehWMmbkG8vTYxG\",\"token_uri\":\"https://accounts.google.com/o/oauth2/token\",\"client_email\":\"\",\"redirect_uris\":[\"urn:ietf:wg:oauth:2.0:oob\",\"oob\"],\"client_x509_cert_url\":\"\",\"client_id\":\"620010449518-9ngf7o4dhs0dka470npqvor6dc5lqb9b.apps.googleusercontent.com\",\"auth_provider_x509_cert_url\":\"https://www.googleapis.com/oauth2/v1/certs\"}}") { Ok(secret) => (config_dir, secret), Err(e) => return Err(InvalidOptionsError::single(e, 4)) } }; let auth = Authenticator::new( &secret, DefaultAuthenticatorDelegate, if opt.is_present("debug-auth") { hyper::Client::with_connector(mock::TeeConnector { connector: hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new()) }) } else { hyper::Client::with_connector(hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new())) }, JsonTokenStorage { program_name: "cloudresourcemanager2", db_dir: config_dir.clone(), }, Some(FlowType::InstalledRedirect(54324))); let client = if opt.is_present("debug") { hyper::Client::with_connector(mock::TeeConnector { connector: hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new()) }) } else { hyper::Client::with_connector(hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new())) }; let engine = Engine { opt: opt, hub: api::CloudResourceManager::new(client, auth), gp: vec!["$-xgafv", "access-token", "alt", "callback", "fields", "key", "oauth-token", "pretty-print", "quota-user", "upload-type", "upload-protocol"], gpm: vec![ ("$-xgafv", "$.xgafv"), ("access-token", "access_token"), ("oauth-token", "oauth_token"), ("pretty-print", "prettyPrint"), ("quota-user", "quotaUser"), ("upload-type", "uploadType"), ("upload-protocol", "upload_protocol"), ] }; match engine._doit(true) { Err(Some(err)) => Err(err), Err(None) => Ok(engine), Ok(_) => unreachable!(), } } fn doit(&self) -> Result<(), DoitError> { match self._doit(false) { Ok(res) => res, Err(_) => unreachable!(), } } } fn main() { let mut exit_status = 0i32; let arg_data = [ ("folders", "methods: 'create', 'delete', 'get', 'get-iam-policy', 'list', 'move', 'patch', 'search', 'set-iam-policy', 'test-iam-permissions' and 'undelete'", vec![ ("create", Some(r##"Creates a Folder in the resource hierarchy. Returns an Operation which can be used to track the progress of the folder creation workflow. Upon success the Operation.response field will be populated with the created Folder. In order to succeed, the addition of this new Folder must not violate the Folder naming, height or fanout constraints. + The Folder's display_name must be distinct from all other Folder's that share its parent. + The addition of the Folder must not cause the active Folder hierarchy to exceed a height of 4. Note, the full active + deleted Folder hierarchy is allowed to reach a height of 8; this provides additional headroom when moving folders that contain deleted folders. + The addition of the Folder must not cause the total number of Folders under its parent to exceed 100. If the operation fails due to a folder constraint violation, some errors may be returned by the CreateFolder request, with status code FAILED_PRECONDITION and an error description. Other folder constraint violations will be communicated in the Operation, with the specific PreconditionFailure returned via the details list in the Operation.error field. The caller must have `resourcemanager.folders.create` permission on the identified parent."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager2_cli/folders_create", vec![ (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("delete", Some(r##"Requests deletion of a Folder. The Folder is moved into the DELETE_REQUESTED state immediately, and is deleted approximately 30 days later. This method may only be called on an empty Folder in the ACTIVE state, where a Folder is empty if it doesn't contain any Folders or Projects in the ACTIVE state. The caller must have `resourcemanager.folders.delete` permission on the identified folder."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager2_cli/folders_delete", vec![ (Some(r##"name"##), None, Some(r##"the resource name of the Folder to be deleted. Must be of the form `folders/{folder_id}`."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("get", Some(r##"Retrieves a Folder identified by the supplied resource name. Valid Folder resource names have the format `folders/{folder_id}` (for example, `folders/1234`). The caller must have `resourcemanager.folders.get` permission on the identified folder."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager2_cli/folders_get", vec![ (Some(r##"name"##), None, Some(r##"The resource name of the Folder to retrieve. Must be of the form `folders/{folder_id}`."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("get-iam-policy", Some(r##"Gets the access control policy for a Folder. The returned policy may be empty if no such policy or resource exists. The `resource` field should be the Folder's resource name, e.g. "folders/1234". The caller must have `resourcemanager.folders.getIamPolicy` permission on the identified folder."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager2_cli/folders_get-iam-policy", vec![ (Some(r##"resource"##), None, Some(r##"REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("list", Some(r##"Lists the Folders that are direct descendants of supplied parent resource. List provides a strongly consistent view of the Folders underneath the specified parent resource. List returns Folders sorted based upon the (ascending) lexical ordering of their display_name. The caller must have `resourcemanager.folders.list` permission on the identified parent."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager2_cli/folders_list", vec![ (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("move", Some(r##"Moves a Folder under a new resource parent. Returns an Operation which can be used to track the progress of the folder move workflow. Upon success the Operation.response field will be populated with the moved Folder. Upon failure, a FolderOperationError categorizing the failure cause will be returned - if the failure occurs synchronously then the FolderOperationError will be returned via the Status.details field and if it occurs asynchronously then the FolderOperation will be returned via the the Operation.error field. In addition, the Operation.metadata field will be populated with a FolderOperation message as an aid to stateless clients. Folder moves will be rejected if they violate either the naming, height or fanout constraints described in the CreateFolder documentation. The caller must have `resourcemanager.folders.move` permission on the folder's current and proposed new parent."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager2_cli/folders_move", vec![ (Some(r##"name"##), None, Some(r##"The resource name of the Folder to move. Must be of the form folders/{folder_id}"##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("patch", Some(r##"Updates a Folder, changing its display_name. Changes to the folder display_name will be rejected if they violate either the display_name formatting rules or naming constraints described in the CreateFolder documentation. The Folder's display name must start and end with a letter or digit, may contain letters, digits, spaces, hyphens and underscores and can be no longer than 30 characters. This is captured by the regular expression: [\p{L}\p{N}]([\p{L}\p{N}_- ]{0,28}[\p{L}\p{N}])?. The caller must have `resourcemanager.folders.update` permission on the identified folder. If the update fails due to the unique name constraint then a PreconditionFailure explaining this violation will be returned in the Status.details field."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager2_cli/folders_patch", vec![ (Some(r##"name"##), None, Some(r##"Output only. The resource name of the Folder. Its format is `folders/{folder_id}`, for example: "folders/1234"."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("search", Some(r##"Search for folders that match specific filter criteria. Search provides an eventually consistent view of the folders a user has access to which meet the specified filter criteria. This will only return folders on which the caller has the permission `resourcemanager.folders.get`."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager2_cli/folders_search", vec![ (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("set-iam-policy", Some(r##"Sets the access control policy on a Folder, replacing any existing policy. The `resource` field should be the Folder's resource name, e.g. "folders/1234". The caller must have `resourcemanager.folders.setIamPolicy` permission on the identified folder."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager2_cli/folders_set-iam-policy", vec![ (Some(r##"resource"##), None, Some(r##"REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("test-iam-permissions", Some(r##"Returns permissions that a caller has on the specified Folder. The `resource` field should be the Folder's resource name, e.g. "folders/1234". There are no permissions required for making this API call."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager2_cli/folders_test-iam-permissions", vec![ (Some(r##"resource"##), None, Some(r##"REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("undelete", Some(r##"Cancels the deletion request for a Folder. This method may only be called on a Folder in the DELETE_REQUESTED state. In order to succeed, the Folder's parent must be in the ACTIVE state. In addition, reintroducing the folder into the tree must not violate folder naming, height and fanout constraints described in the CreateFolder documentation. The caller must have `resourcemanager.folders.undelete` permission on the identified folder."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager2_cli/folders_undelete", vec![ (Some(r##"name"##), None, Some(r##"The resource name of the Folder to undelete. Must be of the form `folders/{folder_id}`."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("operations", "methods: 'get'", vec![ ("get", Some(r##"Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service."##), "Details at http://byron.github.io/google-apis-rs/google_cloudresourcemanager2_cli/operations_get", vec![ (Some(r##"name"##), None, Some(r##"The name of the operation resource."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ]; let mut app = App::new("cloudresourcemanager2") .author("Sebastian Thiel <[email protected]>") .version("1.0.8+20181008") .about("The Google Cloud Resource Manager API provides methods for creating, reading, and updating project metadata.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_cloudresourcemanager2_cli") .arg(Arg::with_name("url") .long("scope") .help("Specify the authentication a method should be executed in. Each scope requires the user to grant this application permission to use it.If unset, it defaults to the shortest scope url for a particular method.") .multiple(true) .takes_value(true)) .arg(Arg::with_name("folder") .long("config-dir") .help("A directory into which we will store our persistent data. Defaults to a user-writable directory that we will create during the first invocation.[default: ~/.google-service-cli") .multiple(false) .takes_value(true)) .arg(Arg::with_name("debug") .long("debug") .help("Output all server communication to standard error. `tx` and `rx` are placed into the same stream.") .multiple(false) .takes_value(false)) .arg(Arg::with_name("debug-auth") .long("debug-auth") .help("Output all communication related to authentication to standard error. `tx` and `rx` are placed into the same stream.") .multiple(false) .takes_value(false)); for &(main_command_name, about, ref subcommands) in arg_data.iter() { let mut mcmd = SubCommand::with_name(main_command_name).about(about); for &(sub_command_name, ref desc, url_info, ref args) in subcommands { let mut scmd = SubCommand::with_name(sub_command_name); if let &Some(desc) = desc { scmd = scmd.about(desc); } scmd = scmd.after_help(url_info); for &(ref arg_name, ref flag, ref desc, ref required, ref multi) in args { let arg_name_str = match (arg_name, flag) { (&Some(an), _ ) => an, (_ , &Some(f)) => f, _ => unreachable!(), }; let mut arg = Arg::with_name(arg_name_str) .empty_values(false); if let &Some(short_flag) = flag { arg = arg.short(short_flag); } if let &Some(desc) = desc { arg = arg.help(desc); } if arg_name.is_some() && flag.is_some() { arg = arg.takes_value(true); } if let &Some(required) = required { arg = arg.required(required); } if let &Some(multi) = multi { arg = arg.multiple(multi); } scmd = scmd.arg(arg); } mcmd = mcmd.subcommand(scmd); } app = app.subcommand(mcmd); } let matches = app.get_matches(); let debug = matches.is_present("debug"); match Engine::new(matches) { Err(err) => { exit_status = err.exit_code; writeln!(io::stderr(), "{}", err).ok(); }, Ok(engine) => { if let Err(doit_err) = engine.doit() { exit_status = 1; match doit_err { DoitError::IoError(path, err) => { writeln!(io::stderr(), "Failed to open output file '{}': {}", path, err).ok(); }, DoitError::ApiError(err) => { if debug { writeln!(io::stderr(), "{:#?}", err).ok(); } else { writeln!(io::stderr(), "{}", err).ok(); } } } } } } std::process::exit(exit_status); }
remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap();
pooled_connection.rs
use rocket::http; use rocket::request; use rocket::Outcome; use rocket::State; use nats::Client; use r2d2::PooledConnection; use r2d2_nats::NatsConnectionManager; use std::ops::Deref; use std::ops::DerefMut; use std::env; type Pool = r2d2::Pool<NatsConnectionManager>; pub fn init_pool() -> Pool { let manager = NatsConnectionManager::new(nats_url()) .expect("connection manager"); return r2d2::Pool::builder() .max_size(15) .build(manager) .unwrap(); } fn nats_url() -> String { env::var("NATS_ADDRESS") .expect("NATS_ADDRESS environment variable must be set") } pub struct NatsConnection(pub PooledConnection<NatsConnectionManager>); impl<'a, 'r> request::FromRequest<'a, 'r> for NatsConnection { type Error = (); fn from_request(request: &'a request::Request<'r>) -> request::Outcome<NatsConnection, ()>
} impl Deref for NatsConnection { type Target = Client; fn deref(&self) -> &Self::Target { &self.0 } } impl DerefMut for NatsConnection { fn deref_mut(&mut self) -> &mut nats::Client { &mut self.0 } }
{ let pool = request.guard::<State<Pool>>()?; match pool.get() { Ok(conn) => Outcome::Success(NatsConnection(conn)), Err(_) => Outcome::Failure((http::Status::ServiceUnavailable, ())), } }
mcdonalds_hu.py
# -*- coding: utf-8 -*- import scrapy import json import re from locations.items import GeojsonPointItem class McDonaldsHUSpider(scrapy.Spider): name = "mcdonalds_hu" allowed_domains = ["www.mcdonalds.hu"] start_urls = ( 'https://www.mcdonalds.hu/ettermeink', ) def store_hours(self, data): day_groups = [] this_day_group = {} weekdays = ['Mo', 'Th', 'We', 'Tu', 'Fr', 'Sa', 'Su'] day_hours = data.xpath('.//div[@class="grid__item one-half text--right"]//text()').extract() index = 0 for day_hour in day_hours: day_hour = day_hour.strip() if index == 7: break hours = '' match = re.search(r'([0-9]{1,2}):([0-9]{1,2})–([0-9]{1,2}):([0-9]{1,2})', day_hour)
hours = '{}:{}-{}:{}'.format(sh, sm, int(eh) + 12 if int(eh) < 12 else int(eh), em) short_day = weekdays[index] if not this_day_group: this_day_group = { 'from_day': short_day, 'to_day': short_day, 'hours': hours, } elif hours == this_day_group['hours']: this_day_group['to_day'] = short_day elif hours != this_day_group['hours']: day_groups.append(this_day_group) this_day_group = { 'from_day': short_day, 'to_day': short_day, 'hours': hours, } index = index + 1 day_groups.append(this_day_group) if not day_groups: return None opening_hours = '' if len(day_groups) == 1 and day_groups[0]['hours'] in ('00:00-23:59', '00:00-00:00'): opening_hours = '24/7' else: for day_group in day_groups: if day_group['from_day'] == day_group['to_day']: opening_hours += '{from_day} {hours}; '.format(**day_group) else: opening_hours += '{from_day}-{to_day} {hours}; '.format(**day_group) opening_hours = opening_hours [:-2] return opening_hours def parse_latlon(self, data): map_url = data.xpath('//a[@title="Mutatás a térképen"]/@href').extract_first().strip() lat_lon = map_url.split("loc:")[1] lat = lat_lon.split(",")[0] lon = lat_lon.split(",")[1] return lat, lon def parse_store(self, response): address = response.xpath('//h1[@class="text--uppercase"]/text()').extract_first() phone = response.xpath('//a[@title="Telefonszám"]/text()').extract_first() lat, lon = self.parse_latlon(response) properties = { 'ref': response.meta['ref'], 'phone': phone.strip() if phone else "", 'lon': lon, 'lat': lat, 'name': "McDonald's", 'addr_full': address.strip() if address else "" } opening_hours = self.store_hours(response) if opening_hours: properties['opening_hours'] = opening_hours yield GeojsonPointItem(**properties) def parse(self, response): results = response.xpath('//article') for item in results: ref_id = item.xpath('.//footer/a/@href').extract_first().strip() ref_id = ref_id.split("/")[2] yield scrapy.Request(response.urljoin('https://www.mcdonalds.hu/ettermeink/' + ref_id), meta={'ref':ref_id}, callback=self.parse_store)
if not match: hours = "off" else: sh, sm, eh, em = match.groups()
property-enum.js
var _slicedToArray = function () { function
(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }(); import React from 'react'; import PropTypes from 'prop-types'; import { Seq } from 'immutable'; import { FormLabel, FormSelect } from '../../components/style/export'; import PropertyStyle from './shared-property-style'; export default function PropertyEnum(_ref) { var value = _ref.value, onUpdate = _ref.onUpdate, configs = _ref.configs, sourceElement = _ref.sourceElement, internalState = _ref.internalState, state = _ref.state; var update = function update(val) { if (configs.hook) { return configs.hook(val, sourceElement, internalState, state).then(function (_val) { return onUpdate(_val); }); } return onUpdate(val); }; return React.createElement( 'table', { className: 'PropertyEnum', style: PropertyStyle.tableStyle }, React.createElement( 'tbody', null, React.createElement( 'tr', null, React.createElement( 'td', { style: PropertyStyle.firstTdStyle }, React.createElement( FormLabel, null, configs.label ) ), React.createElement( 'td', null, React.createElement( FormSelect, { value: value, onChange: function onChange(event) { return update(event.target.value); } }, Seq(configs.values).entrySeq().map(function (_ref2) { var _ref3 = _slicedToArray(_ref2, 2), key = _ref3[0], value = _ref3[1]; return React.createElement( 'option', { key: key, value: key }, value ); }) ) ) ) ) ); } PropertyEnum.propTypes = { value: PropTypes.any.isRequired, onUpdate: PropTypes.func.isRequired, configs: PropTypes.object.isRequired, sourceElement: PropTypes.object, internalState: PropTypes.object, state: PropTypes.object.isRequired };
sliceIterator
connection.rs
use crate::prelude::*; use core::str::FromStr; use core::time::Duration; use core::{fmt, u64}; use serde::{Deserialize, Serialize}; use tendermint_proto::Protobuf; use ibc_proto::ibc::core::connection::v1::{ ConnectionEnd as RawConnectionEnd, Counterparty as RawCounterparty, IdentifiedConnection as RawIdentifiedConnection, }; use crate::core::ics02_client::error::Error as ClientError; use crate::core::ics03_connection::error::Error; use crate::core::ics03_connection::version::Version; use crate::core::ics23_commitment::commitment::CommitmentPrefix; use crate::core::ics24_host::error::ValidationError; use crate::core::ics24_host::identifier::{ClientId, ConnectionId}; use crate::timestamp::ZERO_DURATION; #[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct IdentifiedConnectionEnd { pub connection_id: ConnectionId, pub connection_end: ConnectionEnd, } impl IdentifiedConnectionEnd { pub fn new(connection_id: ConnectionId, connection_end: ConnectionEnd) -> Self { IdentifiedConnectionEnd { connection_id, connection_end, } } pub fn id(&self) -> &ConnectionId { &self.connection_id } pub fn end(&self) -> &ConnectionEnd { &self.connection_end } } impl Protobuf<RawIdentifiedConnection> for IdentifiedConnectionEnd {} impl TryFrom<RawIdentifiedConnection> for IdentifiedConnectionEnd { type Error = Error; fn try_from(value: RawIdentifiedConnection) -> Result<Self, Self::Error> { let raw_connection_end = RawConnectionEnd { client_id: value.client_id.to_string(), versions: value.versions, state: value.state, counterparty: value.counterparty, delay_period: value.delay_period, }; Ok(IdentifiedConnectionEnd { connection_id: value.id.parse().map_err(Error::invalid_identifier)?, connection_end: raw_connection_end.try_into()?, }) } } impl From<IdentifiedConnectionEnd> for RawIdentifiedConnection { fn from(value: IdentifiedConnectionEnd) -> Self { RawIdentifiedConnection { id: value.connection_id.to_string(), client_id: value.connection_end.client_id.to_string(), versions: value .connection_end .versions .iter() .map(|v| From::from(v.clone())) .collect(), state: value.connection_end.state as i32, delay_period: value.connection_end.delay_period.as_nanos() as u64, counterparty: Some(value.connection_end.counterparty().clone().into()), } } } #[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct ConnectionEnd { pub state: State, client_id: ClientId, counterparty: Counterparty, versions: Vec<Version>, delay_period: Duration, } impl Default for ConnectionEnd { fn default() -> Self { Self { state: State::Uninitialized, client_id: Default::default(), counterparty: Default::default(), versions: Vec::new(), delay_period: ZERO_DURATION, } } } impl Protobuf<RawConnectionEnd> for ConnectionEnd {} impl TryFrom<RawConnectionEnd> for ConnectionEnd { type Error = Error; fn try_from(value: RawConnectionEnd) -> Result<Self, Self::Error> { let state = value.state.try_into()?; if state == State::Uninitialized { return Ok(ConnectionEnd::default()); } if value.client_id.is_empty() { return Err(Error::empty_proto_connection_end()); } Ok(Self::new( state, value.client_id.parse().map_err(Error::invalid_identifier)?, value .counterparty .ok_or_else(Error::missing_counterparty)? .try_into()?, value .versions .into_iter() .map(Version::try_from) .collect::<Result<Vec<_>, _>>()?, Duration::from_nanos(value.delay_period), )) } } impl From<ConnectionEnd> for RawConnectionEnd { fn from(value: ConnectionEnd) -> Self { RawConnectionEnd { client_id: value.client_id.to_string(), versions: value .versions .iter() .map(|v| From::from(v.clone())) .collect(), state: value.state as i32, counterparty: Some(value.counterparty.into()), delay_period: value.delay_period.as_nanos() as u64, } } } impl ConnectionEnd { pub fn new( state: State, client_id: ClientId, counterparty: Counterparty, versions: Vec<Version>, delay_period: Duration, ) -> Self { Self { state, client_id, counterparty, versions, delay_period, } } /// Getter for the state of this connection end. pub fn state(&self) -> &State { &self.state } /// Setter for the `state` field. pub fn set_state(&mut self, new_state: State) { self.state = new_state; } /// Setter for the `counterparty` field. pub fn set_counterparty(&mut self, new_cparty: Counterparty) { self.counterparty = new_cparty; } /// Setter for the `version` field. pub fn set_version(&mut self, new_version: Version) {
pub fn counterparty_matches(&self, other: &Counterparty) -> bool { self.counterparty.eq(other) } /// Helper function to compare the client id of this end with another client identifier. pub fn client_id_matches(&self, other: &ClientId) -> bool { self.client_id.eq(other) } /// Helper function to determine whether the connection is open. pub fn is_open(&self) -> bool { self.state_matches(&State::Open) } /// Helper function to determine whether the connection is uninitialized. pub fn is_uninitialized(&self) -> bool { self.state_matches(&State::Uninitialized) } /// Helper function to compare the state of this end with another state. pub fn state_matches(&self, other: &State) -> bool { self.state.eq(other) } /// Getter for the client id on the local party of this connection end. pub fn client_id(&self) -> &ClientId { &self.client_id } /// Getter for the list of versions in this connection end. pub fn versions(&self) -> &[Version] { &self.versions } /// Getter for the counterparty. pub fn counterparty(&self) -> &Counterparty { &self.counterparty } /// Getter for the delay_period field. This represents the duration, at minimum, /// to delay the sending of a packet after the client update for that packet has been submitted. pub fn delay_period(&self) -> Duration { self.delay_period } /// TODO: Clean this up, probably not necessary. pub fn validate_basic(&self) -> Result<(), ValidationError> { self.counterparty.validate_basic() } } #[derive(Clone, Debug, Default, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct Counterparty { client_id: ClientId, pub connection_id: Option<ConnectionId>, prefix: CommitmentPrefix, } impl Protobuf<RawCounterparty> for Counterparty {} // Converts from the wire format RawCounterparty. Typically used from the relayer side // during queries for response validation and to extract the Counterparty structure. impl TryFrom<RawCounterparty> for Counterparty { type Error = Error; fn try_from(value: RawCounterparty) -> Result<Self, Self::Error> { let connection_id = Some(value.connection_id) .filter(|x| !x.is_empty()) .map(|v| FromStr::from_str(v.as_str())) .transpose() .map_err(Error::invalid_identifier)?; Ok(Counterparty::new( value.client_id.parse().map_err(Error::invalid_identifier)?, connection_id, value .prefix .ok_or_else(Error::missing_counterparty)? .key_prefix .try_into() .map_err(|_| Error::ics02_client(ClientError::empty_prefix()))?, )) } } impl From<Counterparty> for RawCounterparty { fn from(value: Counterparty) -> Self { RawCounterparty { client_id: value.client_id.as_str().to_string(), connection_id: value .connection_id .map_or_else(|| "".to_string(), |v| v.as_str().to_string()), prefix: Some(ibc_proto::ibc::core::commitment::v1::MerklePrefix { key_prefix: value.prefix.into_vec(), }), } } } impl Counterparty { pub fn new( client_id: ClientId, connection_id: Option<ConnectionId>, prefix: CommitmentPrefix, ) -> Self { Self { client_id, connection_id, prefix, } } /// Getter for the client id. pub fn client_id(&self) -> &ClientId { &self.client_id } /// Getter for connection id. pub fn connection_id(&self) -> Option<&ConnectionId> { self.connection_id.as_ref() } pub fn prefix(&self) -> &CommitmentPrefix { &self.prefix } pub fn validate_basic(&self) -> Result<(), ValidationError> { Ok(()) } } #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] pub enum State { Uninitialized = 0, Init = 1, TryOpen = 2, Open = 3, } impl State { /// Yields the State as a string. pub fn as_str(&self) -> &'static str { match self { Self::Uninitialized => "UNINITIALIZED", Self::Init => "INIT", Self::TryOpen => "TRYOPEN", Self::Open => "OPEN", } } /// Parses the State out from a i32. pub fn from_i32(s: i32) -> Result<Self, Error> { match s { 0 => Ok(Self::Uninitialized), 1 => Ok(Self::Init), 2 => Ok(Self::TryOpen), 3 => Ok(Self::Open), _ => Err(Error::invalid_state(s)), } } /// Returns whether or not this connection state is `Open`. pub fn is_open(self) -> bool { self == State::Open } /// Returns whether or not this connection with this state /// has progressed less or the same than the argument. /// /// # Example /// ```rust,ignore /// assert!(State::Init.less_or_equal_progress(State::Open)); /// assert!(State::TryOpen.less_or_equal_progress(State::TryOpen)); /// assert!(!State::Open.less_or_equal_progress(State::Uninitialized)); /// ``` pub fn less_or_equal_progress(self, other: Self) -> bool { self as u32 <= other as u32 } } impl fmt::Display for State { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.as_str()) } } impl TryFrom<i32> for State { type Error = Error; fn try_from(value: i32) -> Result<Self, Self::Error> { match value { 0 => Ok(Self::Uninitialized), 1 => Ok(Self::Init), 2 => Ok(Self::TryOpen), 3 => Ok(Self::Open), _ => Err(Error::invalid_state(value)), } } } impl From<State> for i32 { fn from(value: State) -> Self { value.into() } }
self.versions = vec![new_version]; } /// Helper function to compare the counterparty of this end with another counterparty.
state.js
}, query: "", }; // change the reducer so it produces state values // corresponding to the initial state structure // people.map is a plain javascript object used // as key/value dict where key is the person id // people.all should just contain the person ids // in order received from the server // or null while loading // handle a SET_QUERY action that just sets the // query field export const reducer = (state = initialState, action) => { switch (action.type) { case "SET_PEOPLE": return { people: action.people, }; case "SET_PERSON": return { people: state.people.map((p) => p.id === action.person.id ? action.person : p ), }; default: return state; } }; // implement all selectors and action creators needed in ./connect export const getPersonById = (state, personId) => state.people.find((p) => p.id === personId); // ... and all the others
const initialState = { people: { map: {}, all: null,
bucketcontent_types.go
/* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1alpha1 import ( metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) // EDIT THIS FILE! THIS IS SCAFFOLDING FOR YOU TO OWN! // NOTE: json tags are required. Any new fields you add must have json tags for the fields to be serialized. // BucketContentSpec defines the desired state of BucketContent type BucketContentSpec struct { // INSERT ADDITIONAL SPEC FIELDS - desired state of cluster // Important: Run "make" to regenerate code after modifying this file // Foo is an example field of BucketContent. Edit BucketContent_types.go to remove/update Foo string `json:"foo,omitempty"` } // BucketContentStatus defines the observed state of BucketContent type BucketContentStatus struct { // INSERT ADDITIONAL STATUS FIELD - define observed state of cluster // Important: Run "make" to regenerate code after modifying this file } // +kubebuilder:object:root=true // BucketContent is the Schema for the bucketcontents API type BucketContent struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata,omitempty"` Spec BucketContentSpec `json:"spec,omitempty"` Status BucketContentStatus `json:"status,omitempty"` } // +kubebuilder:object:root=true // BucketContentList contains a list of BucketContent type BucketContentList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata,omitempty"` Items []BucketContent `json:"items"` } func
() { SchemeBuilder.Register(&BucketContent{}, &BucketContentList{}) }
init
functions4.rs
// functions4.rs // Make me compile! Execute `rustlings hint functions4` for hints :) // This store is having a sale where if the price is an even number, you get // 10 Rustbucks off, but if it's an odd number, it's 3 Rustbucks off. fn main() { let original_price = 51; println!("Your sale price is {}", sale_price(original_price)); }
return price - 3 } fn is_even(num: i32) -> bool { num % 2 == 0 }
fn sale_price(price: i32) -> i32 { if is_even(price) { return price - 10 }
api_test.go
// Copyright (c) 2012-2013 Jason McVetta. This is Free Software, released // under the terms of the GPL v3. See http://www.gnu.org/copyleft/gpl.html for // details. Resist intellectual serfdom - the ownership of ideas is akin to // slavery. package napping import ( "bytes" "encoding/json" "fmt" "github.com/stretchr/testify/assert" "io/ioutil" "log" "net/http" "net/http/httptest" "net/url" "strings" "testing" ) func init() { log.SetFlags(log.Ltime | log.Lshortfile) } func TestInvalidUrl(t *testing.T) { // // Missing protocol scheme - url.Parse should fail // url := "://foobar.com" _, err := Get(url, nil, nil, nil) assert.NotEqual(t, nil, err) // // Unsupported protocol scheme - HttpClient.Do should fail // url = "foo://bar.com" _, err = Get(url, nil, nil, nil) assert.NotEqual(t, nil, err) } type structType struct { Foo int Bar string } type errorStruct struct { Status int Message string } var ( fooParams = Params{"foo": "bar"} barParams = Params{"bar": "baz"} fooStruct = structType{ Foo: 111, Bar: "foo", } barStruct = structType{ Foo: 222, Bar: "bar", } ) func TestGet(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(HandleGet)) defer srv.Close() // // Good request // url := "http://" + srv.Listener.Addr().String() p := fooParams.AsUrlValues() res := structType{} resp, err := Get(url, &p, &res, nil) if err != nil { t.Error(err) } assert.Equal(t, 200, resp.Status()) assert.Equal(t, res, barStruct) // // Bad request // url = "http://" + srv.Listener.Addr().String() p = Params{"bad": "value"}.AsUrlValues() e := errorStruct{} resp, err = Get(url, &p, nil, nil) if err != nil { t.Fatal(err) } if resp.Status() == 200 { t.Error("Server returned 200 success when it should have failed") } assert.Equal(t, 500, resp.Status()) expected := errorStruct{ Message: "Bad query params: bad=value", Status: 500, } resp.Unmarshal(&e) assert.Equal(t, e, expected) } // TestDefaultParams tests using per-session default query parameters. func TestDefaultParams(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(HandleGet)) defer srv.Close() // // Good request // url := "http://" + srv.Listener.Addr().String() p := fooParams.AsUrlValues() res := structType{} s := Session{ Params: &p, } resp, err := s.Get(url, nil, &res, nil) if err != nil { t.Error(err) } assert.Equal(t, 200, resp.Status()) assert.Equal(t, res, barStruct) // // Bad request // url = "http://" + srv.Listener.Addr().String() p = Params{"bad": "value"}.AsUrlValues() e := errorStruct{} resp, err = Get(url, &p, nil, nil) if err != nil { t.Fatal(err) } if resp.Status() == 200 { t.Error("Server returned 200 success when it should have failed") } assert.Equal(t, 500, resp.Status()) expected := errorStruct{ Message: "Bad query params: bad=value", Status: 500, } resp.Unmarshal(&e) assert.Equal(t, e, expected) } func TestDelete(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(HandleDelete)) defer srv.Close() url := "http://" + srv.Listener.Addr().String() resp, err := Delete(url, nil, nil, nil) if err != nil { t.Error(err) } assert.Equal(t, 200, resp.Status()) } func TestHead(t *testing.T) { // TODO: test result srv := httptest.NewServer(http.HandlerFunc(HandleHead)) defer srv.Close() url := "http://" + srv.Listener.Addr().String() resp, err := Head(url, nil, nil) if err != nil { t.Error(err) } assert.Equal(t, 200, resp.Status()) } func TestOptions(t *testing.T) { // TODO: test result srv := httptest.NewServer(http.HandlerFunc(HandleOptions)) defer srv.Close() url := "http://" + srv.Listener.Addr().String() resp, err := Options(url, nil, nil) if err != nil { t.Error(err) } assert.Equal(t, 200, resp.Status()) } func TestPost(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(HandlePost)) defer srv.Close() s := Session{} s.Log = true url := "http://" + srv.Listener.Addr().String() payload := fooStruct res := structType{} resp, err := s.Post(url, &payload, &res, nil) if err != nil { t.Error(err) } assert.Equal(t, 200, resp.Status()) assert.Equal(t, res, barStruct) } func TestPostUnmarshallable(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(HandlePost)) defer srv.Close() type ft func() var f ft url := "http://" + srv.Listener.Addr().String() res := structType{} payload := f _, err := Post(url, &payload, &res, nil) assert.NotEqual(t, nil, err) _, ok := err.(*json.UnsupportedTypeError) if !ok { t.Log(err) t.Error("Expected json.UnsupportedTypeError") } } func TestPostParamsInUrl(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(HandlePost)) defer srv.Close() s := Session{} s.Log = true u := "http://" + srv.Listener.Addr().String() u += "?spam=eggs" // Add query params to URL payload := fooStruct res := structType{} resp, err := s.Post(u, &payload, &res, nil) if err != nil { t.Error(err) } expected := &url.Values{} expected.Add("spam", "eggs") assert.Equal(t, expected, resp.Params) } func TestPut(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(HandlePut)) defer srv.Close() url := "http://" + srv.Listener.Addr().String() res := structType{} resp, err := Put(url, &fooStruct, &res, nil) if err != nil { t.Error(err) } assert.Equal(t, resp.Status(), 200) // Server should return NO data assert.Equal(t, resp.RawText(), "") } func TestPatch(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(HandlePatch)) defer srv.Close() url := "http://" + srv.Listener.Addr().String() res := structType{} resp, err := Patch(url, &fooStruct, &res, nil) if err != nil { t.Error(err) } assert.Equal(t, resp.Status(), 200) // Server should return NO data assert.Equal(t, resp.RawText(), "") } func TestRawRequestWithData(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(HandleRaw)) defer srv.Close() var payload = bytes.NewBufferString("napping") res := structType{} req := Request{ Url: "http://" + srv.Listener.Addr().String(), Method: "PUT", RawPayload: true, Payload: payload, Result: &res, } resp, err := Send(&req) if err != nil { t.Error(err) } assert.Equal(t, resp.Status(), 200) assert.Equal(t, res.Bar, "napping") } func TestRawRequestWithoutData(t *testing.T)
func TestRawRequestInvalidType(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(HandleRaw)) defer srv.Close() payload := structType{} res := structType{} req := Request{ Url: "http://" + srv.Listener.Addr().String(), Method: "PUT", RawPayload: true, Payload: payload, Result: &res, } _, err := Send(&req) if err == nil { t.Error("Validation error expected") } else { assert.Equal(t, err.Error(), "Payload must be of type *bytes.Buffer if RawPayload is set to true") } } // TestRawResponse tests capturing the raw response body. func TestRawResponse(t *testing.T) { srv := httptest.NewServer(http.HandlerFunc(HandleRaw)) defer srv.Close() var payload = bytes.NewBufferString("napping") req := Request{ Url: "http://" + srv.Listener.Addr().String(), Method: "PUT", RawPayload: true, CaptureResponseBody: true, Payload: payload, } resp, err := Send(&req) if err != nil { t.Error(err) } assert.Equal(t, resp.Status(), 200) rawResponseStruct := structType{ Foo: 0, Bar: "napping", } blob, err := json.Marshal(rawResponseStruct) assert.Equal(t, bytes.Equal(resp.ResponseBody.Bytes(), blob), true) } func JsonError(w http.ResponseWriter, msg string, code int) { e := errorStruct{ Status: code, Message: msg, } blob, err := json.Marshal(e) if err != nil { http.Error(w, msg, code) return } http.Error(w, string(blob), code) } func HandleGet(w http.ResponseWriter, req *http.Request) { method := strings.ToUpper(req.Method) if method != "GET" { msg := fmt.Sprintf("Expected method GET, received %v", method) http.Error(w, msg, 500) return } u := req.URL q := u.Query() for k, _ := range fooParams { if fooParams[k] != q.Get(k) { msg := "Bad query params: " + u.Query().Encode() JsonError(w, msg, http.StatusInternalServerError) return } } // // Generate response // blob, err := json.Marshal(barStruct) if err != nil { JsonError(w, err.Error(), http.StatusInternalServerError) return } req.Header.Add("content-type", "application/json") w.Write(blob) } func HandleDelete(w http.ResponseWriter, req *http.Request) { method := strings.ToUpper(req.Method) if method != "DELETE" { msg := fmt.Sprintf("Expected method DELETE, received %v", method) http.Error(w, msg, 500) return } } func HandleHead(w http.ResponseWriter, req *http.Request) { method := strings.ToUpper(req.Method) if method != "HEAD" { msg := fmt.Sprintf("Expected method HEAD, received %v", method) http.Error(w, msg, 500) return } } func HandleOptions(w http.ResponseWriter, req *http.Request) { method := strings.ToUpper(req.Method) if method != "OPTIONS" { msg := fmt.Sprintf("Expected method OPTIONS, received %v", method) http.Error(w, msg, 500) return } } func HandlePost(w http.ResponseWriter, req *http.Request) { method := strings.ToUpper(req.Method) if method != "POST" { msg := fmt.Sprintf("Expected method POST, received %v", method) http.Error(w, msg, 500) return } // // Parse Payload // if req.ContentLength <= 0 { msg := "Content-Length must be greater than 0." JsonError(w, msg, http.StatusLengthRequired) return } body, err := ioutil.ReadAll(req.Body) if err != nil { JsonError(w, err.Error(), http.StatusInternalServerError) return } var s structType err = json.Unmarshal(body, &s) if err != nil { JsonError(w, err.Error(), http.StatusBadRequest) return } if s != fooStruct { msg := "Bad request body" JsonError(w, msg, http.StatusBadRequest) return } // // Compose Response // blob, err := json.Marshal(barStruct) if err != nil { JsonError(w, err.Error(), http.StatusInternalServerError) return } req.Header.Add("content-type", "application/json") w.Write(blob) } func HandlePut(w http.ResponseWriter, req *http.Request) { method := strings.ToUpper(req.Method) if method != "PUT" { msg := fmt.Sprintf("Expected method PUT, received %v", method) http.Error(w, msg, 500) return } // // Parse Payload // if req.ContentLength <= 0 { msg := "Content-Length must be greater than 0." JsonError(w, msg, http.StatusLengthRequired) return } body, err := ioutil.ReadAll(req.Body) if err != nil { JsonError(w, err.Error(), http.StatusInternalServerError) return } var s structType err = json.Unmarshal(body, &s) if err != nil { JsonError(w, err.Error(), http.StatusBadRequest) return } if s != fooStruct { msg := "Bad request body" JsonError(w, msg, http.StatusBadRequest) return } return } func HandlePatch(w http.ResponseWriter, req *http.Request) { method := strings.ToUpper(req.Method) if method != "PATCH" { msg := fmt.Sprintf("Expected method PATCH, received %v", method) http.Error(w, msg, 500) return } // // Parse Payload // if req.ContentLength <= 0 { msg := "Content-Length must be greater than 0." JsonError(w, msg, http.StatusLengthRequired) return } body, err := ioutil.ReadAll(req.Body) if err != nil { JsonError(w, err.Error(), http.StatusInternalServerError) return } var s structType err = json.Unmarshal(body, &s) if err != nil { JsonError(w, err.Error(), http.StatusBadRequest) return } if s != fooStruct { msg := "Bad request body" JsonError(w, msg, http.StatusBadRequest) return } return } func HandleRaw(w http.ResponseWriter, req *http.Request) { var err error var result = structType{} if req.ContentLength <= 0 { result.Bar = "empty" } else { var body []byte body, err = ioutil.ReadAll(req.Body) if err == nil { result.Bar = string(body) } } if err != nil { JsonError(w, err.Error(), http.StatusInternalServerError) return } var blob []byte blob, err = json.Marshal(result) if err != nil { JsonError(w, err.Error(), http.StatusInternalServerError) return } w.Header().Add("content-type", "application/json") w.Write(blob) return }
{ srv := httptest.NewServer(http.HandlerFunc(HandleRaw)) defer srv.Close() var payload *bytes.Buffer = nil res := structType{} req := Request{ Url: "http://" + srv.Listener.Addr().String(), Method: "PUT", RawPayload: true, Payload: payload, Result: &res, } resp, err := Send(&req) if err != nil { t.Error(err) } assert.Equal(t, resp.Status(), 200) assert.Equal(t, res.Bar, "empty") }
user_interface.py
import submodule.appfunction as af import tkinter.filedialog from tkinter import * import tkinter as tk
import PIL.ImageTk import PIL.Image from PIL import * import cv2 import os class ObjectDetection_ui(tk.Tk): def __init__(self): self.window = tk.Tk() self.window.title("Object Detection") af.tf_version_check(); self.modelPath = "" self.labelMapPath = "" # Open a file dialog asking for the input image file def askopenimgfile(self): path = filedialog.askopenfilename() # got back the detected image img_processed = af.input_image(path, self.modelPath, self.labelMapPath) img = img_processed.resize((800, 600)) photo = PIL.ImageTk.PhotoImage(img) # open the image in a new window self.new_window = tk.Toplevel() self.new_window.title("Image") self.img_import = Label(self.new_window, image=photo, height=600, width=800) self.img_import.pack(fill=BOTH, expand=YES) self.img_import.bind("<Configure>", lambda event, arg=img: self.resize_image(event, arg)) self.img_import.pack(fill=BOTH, expand=YES) self.new_window.mainloop() # image resize related to the window size def resize_image(self, event, img): w, h = event.width, event.height img_copy = img.copy() resize_img = img_copy.resize((w,h)) photo = PIL.ImageTk.PhotoImage(resize_img) self.img_import.config(image=photo) self.img_import.image=photo #avoid garbage collection # Open a file dialog asking for the input video file def askopenvideofile(self): path = filedialog.askopenfilename() stop = af.input_video(path, self.modelPath, self.labelMapPath) if stop == True: return # Open the webcam of the user's laptop def askcam(self): stop = af.input_cam(self.modelPath, self.labelMapPath); # stop streaming and release the camera # if window close or "q" is pressed if (stop == True): return # Delete the placeholder when new input is indicated def delete_placeholder(self, entry): entry.delete(0, END) # Open a file dialog asking for the model file def askmodelfile(self, entry): path = filedialog.askopenfilename() if not af.file_is_exist(path) or not os.access(path, os.R_OK): raise Exception("Model file doesn't exist or is unreadable!") self.delete_placeholder(entry) entry.insert(0, path) self.modelPath = path # Open a file dialog asking for the label map file def asklabelfile(self, entry): path = filedialog.askopenfilename() if not af.file_is_exist(path) or not os.access(path, os.R_OK): raise Exception("Label map file doesn't exist or is unreadable!") self.delete_placeholder(entry) entry.insert(0, path) self.labelMapPath = path # main function where the ui runs def main(self): self.group_1 = Frame(self.window) self.group_1.pack() self.modelGroup = Frame(self.group_1) self.modelGroup.pack(fill=X, expand=YES) self.labelGroup = Frame(self.group_1) self.labelGroup.pack(fill=X, expand=YES) # display the path of model used and label map data file custPath = StringVar(None) pretext_model = "Please indicate the path to your detection model (*.pbtxt)" self.model_path = Entry(self.modelGroup, width=54, textvariable=custPath) self.model_path.insert(0, pretext_model) self.model_path.pack(side=LEFT) self.model_path.bind("<Button-1>", lambda event, arg=self.model_path: self.delete_placeholder(arg)) # browse for a model self.model_input = Button(self.modelGroup, text = "Browse", command = lambda: self.askmodelfile(self.model_path)) self.model_input.pack(side=LEFT) # label map data file custPath_label = StringVar(None) pretext_label = "Please indicate the path to your label map file (*.pb)" self.label_path = Entry(self.labelGroup, width=54, textvariable=custPath_label) self.label_path.insert(0, pretext_label) self.label_path.pack(side=LEFT) self.label_path.bind("<Button-1>", lambda event, arg=self.label_path: self.delete_placeholder(arg)) # browse for a label map file self.label_input = Button(self.labelGroup, text = "Browse", command = lambda: self.asklabelfile(self.label_path)) self.label_input.pack(side=LEFT) # Buttons of 3 input-type options and Quit self.group_2 = Frame(self.window) self.group_2.pack(fill=X, expand=YES) self.group_btn = Frame(self.group_2) self.group_btn.pack() # define all buttons image_input = Button(self.group_btn, text = "Image", command = self.askopenimgfile) image_input.pack(side=LEFT) video_input = Button(self.group_btn, text = "Video", command = self.askopenvideofile) video_input.pack(side=LEFT) cam_input = Button(self.group_btn, text = "Camera", command = self.askcam) cam_input.pack(side=LEFT) quitBtn = Button(self.group_btn, text="Quit", command = quit) quitBtn.pack(side=LEFT) self.window.mainloop() # start the user interface start = ObjectDetection_ui() start.main() if __name__ == "__main__": ObjectDetection_ui().main()
import numpy as np
main.go
package main import ( "os" "github.com/spf13/cobra" "github.com/virtual-disk-array/vda/pkg/csi" "k8s.io/klog" ) type argsStruct struct { endpoint string vdaEndpoint string enableCs bool enableNs bool nodeId string } var ( cmd = &cobra.Command{ Use: "vdacsi", Short: "vda csi controller and node driver", } args = argsStruct{} ) func init()
func main() { if err := cmd.Execute(); err != nil { klog.Errorf("Args err: %v", err) os.Exit(1) } klog.Infof("args: %v", args) csi.StartGrpcServer( args.endpoint, args.vdaEndpoint, args.enableCs, args.enableNs, args.nodeId) }
{ cmd.PersistentFlags().StringVarP( &args.endpoint, "endpoint", "", "unix:///csi/csi-provisioner.sock", "endpoint") cmd.PersistentFlags().StringVarP( &args.vdaEndpoint, "vda-endpoint", "", "localhost:9520", "vda endpoint") cmd.PersistentFlags().BoolVarP( &args.enableCs, "enable-cs", "", false, "enable controller server") cmd.PersistentFlags().BoolVarP( &args.enableNs, "enable-ns", "", false, "enable node server") cmd.PersistentFlags().StringVarP( &args.nodeId, "node-id", "", "", "node id") }
util.test.ts
/* tslint:disable:no-console */ import { TextDocument, TextEdit } from 'vscode-languageserver-protocol' import { getChange } from '../../util/diff' import { createTmpFile, isGitIgnored, readFileByLine, statAsync } from '../../util/fs' import { fuzzyChar, fuzzyMatch, getCharCodes } from '../../util/fuzzy' import { isCocItem } from '../../util/index' import { score } from '../../util/match' import Uri from 'vscode-uri' import path = require('path') import fs = require('fs') describe('score test', () => { test('should match schema', () => { let uri = Uri.file('/foo').toString() let s = score([{ language: '*', scheme: 'file' }], uri, 'typescript') expect(s).toBe(5) }) }) describe('isCocItem test', () => { test('should be coc item', () => { let item = { word: 'f', user_data: '{"cid": 123}' }
expect(isCocItem(item)).toBeTruthy }) test('shoud not be coc item', () => { expect(isCocItem(null)).toBeFalsy expect(isCocItem({})).toBeFalsy expect(isCocItem({ word: '' })).toBeFalsy expect(isCocItem({ word: '', user_data: 'abc' })).toBeFalsy }) }) describe('fuzzy match test', () => { test('should be fuzzy match', () => { let needle = 'aBc' let codes = getCharCodes(needle) expect(fuzzyMatch(codes, 'abc')).toBeFalsy expect(fuzzyMatch(codes, 'ab')).toBeFalsy expect(fuzzyMatch(codes, 'addbdd')).toBeFalsy expect(fuzzyMatch(codes, 'abbbBc')).toBeTruthy expect(fuzzyMatch(codes, 'daBc')).toBeTruthy expect(fuzzyMatch(codes, 'ABCz')).toBeTruthy }) test('should be fuzzy for character', () => { expect(fuzzyChar('a', 'a')).toBeTruthy expect(fuzzyChar('a', 'A')).toBeTruthy expect(fuzzyChar('z', 'z')).toBeTruthy expect(fuzzyChar('z', 'Z')).toBeTruthy expect(fuzzyChar('A', 'a')).toBeFalsy expect(fuzzyChar('A', 'A')).toBeTruthy expect(fuzzyChar('Z', 'z')).toBeFalsy expect(fuzzyChar('Z', 'Z')).toBeTruthy }) }) describe('fs test', () => { test('fs statAsync', async () => { let res = await statAsync(__filename) expect(res).toBeDefined expect(res.isFile()).toBe(true) }) test('fs statAsync #1', async () => { let res = await statAsync(path.join(__dirname, 'file_not_exist')) expect(res).toBeNull }) test('should be not ignored', async () => { let res = await isGitIgnored(__filename) expect(res).toBeFalsy }) test('should be ignored', async () => { let res = await isGitIgnored(path.resolve(__dirname, '../lib/index.js.map')) expect(res).toBeTruthy }) test('should read file by line', async () => { let lines = [] await readFileByLine(__filename, line => { lines.push(line) }) expect(lines.length > 0).toBeTruthy }) test('should create tmp file', async () => { let filename = await createTmpFile('coc test') expect(typeof filename).toBe('string') let stat = fs.statSync(filename) expect(stat.isFile()).toBeTruthy }) }) describe('diff test', () => { function expectChange(from: string, to: string): void { let doc = TextDocument.create('/coc', 'text', 0, from) let change = getChange(from, to) let { newText } = change let start = doc.positionAt(change.start) let end = doc.positionAt(change.end) let edit: TextEdit = { range: { start, end }, newText } let newContent = TextDocument.applyEdits(doc, [edit]) expect(newContent).toBe(to) } test('should get change', () => { expectChange('a', 'b') expectChange('a', 'bb') expectChange('abc\ndef', 'abbc\ndf') let arr = new Array(100000) let content = arr.fill('a').join('\n') expectChange(content, '') expectChange('', content) expectChange('abc', 'abbc\ndf') }) })
test_dtmf_decoder.py
# -*- coding: utf-8 -*- # Copyright 2020 Tomaz Muraus # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import unittest from radio_bridge.dtmf import FFTDTMFDecoderImplementation __all__ = ["TestFFTDTMFDecoder"] BASE_DIR = os.path.dirname(os.path.abspath(__file__)) FIXTURES_DIR = os.path.abspath(os.path.join(BASE_DIR, "../fixtures/dtmf")) class TestFFTDTMFDecoder(unittest.TestCase):
def test_decode_anytone_578_dtmf_data(self): values = [ ("1.wav", "1"), ("2.wav", "2"), ("3.wav", "3"), ("4.wav", "4"), ("5.wav", "5"), ("6.wav", "6"), ("7.wav", "7"), ("8.wav", "8"), ("9.wav", "9"), ("*.wav", "*"), ("0.wav", "0"), ("#.wav", "#"), ] for file_path, expected_code in values: file_path = os.path.join(FIXTURES_DIR, "anytone_578/", file_path) decoder = FFTDTMFDecoderImplementation(file_path=file_path) self.assertEqual(decoder.decode(), expected_code) def test_decode_audio_check_tone_generator_data(self): values = [ ("audiocheck.net_dtmf_1.wav", "1"), ("audiocheck.net_dtmf_2.wav", "2"), ("audiocheck.net_dtmf_3.wav", "3"), ("audiocheck.net_dtmf_4.wav", "4"), ("audiocheck.net_dtmf_5.wav", "5"), ("audiocheck.net_dtmf_6.wav", "6"), ("audiocheck.net_dtmf_7.wav", "7"), ("audiocheck.net_dtmf_8.wav", "8"), ("audiocheck.net_dtmf_9.wav", "9"), ("audiocheck.net_dtmf_*.wav", "*"), ("audiocheck.net_dtmf_0.wav", "0"), ("audiocheck.net_dtmf_#.wav", "#"), ] for file_path, expected_code in values: file_path = os.path.join(FIXTURES_DIR, "audiochecknet/", file_path) decoder = FFTDTMFDecoderImplementation(file_path=file_path) self.assertEqual(decoder.decode(), expected_code)
mode.ts
export interface IGameMode { getMode(): IMode; setMode(mode: IMode): void; } export interface IMode { dx: number; dy: number; lives: number; maxDx: number; maxDy: number; name: string; } class
{ private dx: number; private dy: number; private lives: number; private maxDx: number; private maxDy: number; private name: string; constructor(mode: IMode) { this.dx = mode.dx; this.dy = mode.dy; this.lives = mode.lives; this.maxDx = mode.maxDx; this.maxDy = mode.maxDy; this.name = mode.name; } public getMode(): IMode { return { dx: this.dx, dy: this.dy, lives: this.lives, maxDx: this.maxDx, maxDy: this.maxDy, name: this.name }; } public setMode(mode: IMode): void { const { dx, dy, lives, maxDx, maxDy, name } = mode; this.dx = dx; this.dy = dy; this.lives = lives; this.maxDx = maxDx; this.maxDy = maxDy; this.name = name; } } export default Mode;
Mode
issue-53123-raw-pointer-cast.rs
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(nll)] #![allow(unused_variables)]
fn try_transform<F>(self, f: F) where Self: Sized, F: FnOnce(Self); } impl<'a, T> TryTransform for &'a mut T { fn try_transform<F>(self, f: F) where // The bug was that `Self: Sized` caused the lifetime of `this` to "extend" for all // of 'a instead of only lasting as long as the binding is used (for just that line). Self: Sized, F: FnOnce(Self), { let this: *mut T = self as *mut T; f(self); } } fn main() { }
pub trait TryTransform {
bitstream_test.go
package bitstream import ( "bytes" "fmt" "io" "strings" "testing" ) func TestBitStreamEOF(t *testing.T) { br := NewReader(strings.NewReader("0")) b, err := br.ReadByte() if b != '0' { t.Error("ReadByte didn't return first byte") } b, err = br.ReadByte() if err != io.EOF { t.Error("ReadByte on empty string didn't return EOF") } // 0 = 0b00110000 br = NewReader(strings.NewReader("0")) buf := bytes.NewBuffer(nil) bw := NewWriter(buf) for i := 0; i < 4; i++ { bit, err := br.ReadBit() if err == io.EOF { break } if err != nil { t.Error("GetBit returned error err=", err.Error()) return } err = bw.WriteBit(bit) if err != nil { t.Errorf("unexpected writer error") } } bw.Flush(One) err = bw.WriteByte(0xAA) if err != nil { t.Error("unable to WriteByte") } c := buf.Bytes() if len(c) != 2 || c[1] != 0xAA || c[0] != 0x3f { t.Error("bad return from 4 read bytes") } _, err = NewReader(strings.NewReader("")).ReadBit() if err != io.EOF { t.Error("ReadBit on empty string didn't return EOF") } } func TestBitStream(t *testing.T)
func TestByteStream(t *testing.T) { buf := bytes.NewBuffer(nil) br := NewReader(strings.NewReader("hello")) bw := NewWriter(buf) for i := 0; i < 3; i++ { bit, err := br.ReadBit() if err == io.EOF { break } if err != nil { t.Error("GetBit returned error err=", err.Error()) return } err = bw.WriteBit(bit) if err != nil { t.Errorf("unexpected writer error") } } for i := 0; i < 3; i++ { byt, err := br.ReadByte() if err == io.EOF { break } if err != nil { t.Error("GetByte returned error err=", err.Error()) return } bw.WriteByte(byt) } u, err := br.ReadBits(13) if err != nil { t.Error("ReadBits returned error err=", err.Error()) return } err = bw.WriteBits(u, 13) if err != nil { t.Errorf("unexpected writer error") } err = bw.WriteBits(('!'<<12)|('.'<<4)|0x02, 20) if err != nil { t.Errorf("unexpected writer error") } // 0x2f == '/' bw.Flush(One) s := buf.String() if s != "hello!./" { t.Errorf("expected 'hello!./', got=%x", []byte(s)) } } var myError error = fmt.Errorf("my error") type badWriter struct{} func (w *badWriter) Write(p []byte) (n int, err error) { return 0, myError } func TestErrorPropagation(t *testing.T) { // check WriteBit w := &badWriter{} bw := NewWriter(w) for i := 0; i < 7; i++ { err := bw.WriteBit(One) if err != nil { t.Errorf("unexpected error during buffered write operation") } } err := bw.WriteBit(One) if err != myError { t.Errorf("failed to propagate error") } // check WriteBits w = &badWriter{} bw = NewWriter(w) err = bw.WriteBits(256, 8) if err != myError { t.Errorf("failed to propagate error") } }
{ buf := bytes.NewBuffer(nil) br := NewReader(strings.NewReader("hello")) bw := NewWriter(buf) for { bit, err := br.ReadBit() if err == io.EOF { break } if err != nil { t.Error("GetBit returned error err=", err.Error()) return } err = bw.WriteBit(bit) if err != nil { t.Errorf("unexpected writer error") } } s := buf.String() if s != "hello" { t.Error("expected 'hello', got=", []byte(s)) } }
sql.js
// CodeMirror, copyright (c) by Marijn Haverbeke and others // Distributed under an MIT license: http://codemirror.net/LICENSE (function(mod) { if (typeof exports == "object" && typeof module == "object") // CommonJS mod(require("../../lib/codemirror")); else if (typeof define == "function" && define.amd) // AMD define(["../../lib/codemirror"], mod); else // Plain browser env mod(CodeMirror); })(function(CodeMirror) { "use strict"; CodeMirror.defineMode("sql", function(config, parserConfig) { "use strict"; var client = parserConfig.client || {}, atoms = parserConfig.atoms || {"false": true, "true": true, "null": true}, builtin = parserConfig.builtin || {}, keywords = parserConfig.keywords || {}, operatorChars = parserConfig.operatorChars || /^[*+\-%<>!=&|~^]/, support = parserConfig.support || {}, hooks = parserConfig.hooks || {}, dateSQL = parserConfig.dateSQL || {"date" : true, "time" : true, "timestamp" : true}; function tokenBase(stream, state) { var ch = stream.next(); // call hooks from the mime type if (hooks[ch]) { var result = hooks[ch](stream, state); if (result !== false) return result; } if (support.hexNumber == true && ((ch == "0" && stream.match(/^[xX][0-9a-fA-F]+/)) || (ch == "x" || ch == "X") && stream.match(/^'[0-9a-fA-F]+'/))) { // hex // ref: http://dev.mysql.com/doc/refman/5.5/en/hexadecimal-literals.html return "number"; } else if (support.binaryNumber == true && (((ch == "b" || ch == "B") && stream.match(/^'[01]+'/)) || (ch == "0" && stream.match(/^b[01]+/)))) { // bitstring // ref: http://dev.mysql.com/doc/refman/5.5/en/bit-field-literals.html return "number"; } else if (ch.charCodeAt(0) > 47 && ch.charCodeAt(0) < 58) { // numbers // ref: http://dev.mysql.com/doc/refman/5.5/en/number-literals.html stream.match(/^[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?/); support.decimallessFloat == true && stream.eat('.'); return "number"; } else if (ch == "?" && (stream.eatSpace() || stream.eol() || stream.eat(";"))) { // placeholders return "variable-3"; } else if (ch == "'" || (ch == '"' && support.doubleQuote)) { // strings // ref: http://dev.mysql.com/doc/refman/5.5/en/string-literals.html state.tokenize = tokenLiteral(ch); return state.tokenize(stream, state); } else if ((((support.nCharCast == true && (ch == "n" || ch == "N")) || (support.charsetCast == true && ch == "_" && stream.match(/[a-z][a-z0-9]*/i))) && (stream.peek() == "'" || stream.peek() == '"'))) { // charset casting: _utf8'str', N'str', n'str' // ref: http://dev.mysql.com/doc/refman/5.5/en/string-literals.html return "keyword"; } else if (/^[\(\),\;\[\]]/.test(ch)) { // no highlightning return null; } else if (support.commentSlashSlash && ch == "/" && stream.eat("/")) { // 1-line comment stream.skipToEnd(); return "comment"; } else if ((support.commentHash && ch == "#") || (ch == "-" && stream.eat("-") && (!support.commentSpaceRequired || stream.eat(" ")))) { // 1-line comments // ref: https://kb.askmonty.org/en/comment-syntax/ stream.skipToEnd(); return "comment"; } else if (ch == "/" && stream.eat("*")) { // multi-line comments // ref: https://kb.askmonty.org/en/comment-syntax/ state.tokenize = tokenComment; return state.tokenize(stream, state); } else if (ch == ".") { // .1 for 0.1 if (support.zerolessFloat == true && stream.match(/^(?:\d+(?:e[+-]?\d+)?)/i)) { return "number"; } // .table_name (ODBC) // // ref: http://dev.mysql.com/doc/refman/5.6/en/identifier-qualifiers.html if (support.ODBCdotTable == true && stream.match(/^[a-zA-Z_]+/)) { return "variable-2"; } } else if (operatorChars.test(ch)) { // operators stream.eatWhile(operatorChars); return null; } else if (ch == '{' && (stream.match(/^( )*(d|D|t|T|ts|TS)( )*'[^']*'( )*}/) || stream.match(/^( )*(d|D|t|T|ts|TS)( )*"[^"]*"( )*}/))) { // dates (weird ODBC syntax) // ref: http://dev.mysql.com/doc/refman/5.5/en/date-and-time-literals.html return "number"; } else { stream.eatWhile(/^[_\w\d]/); var word = stream.current().toLowerCase(); // dates (standard SQL syntax) // ref: http://dev.mysql.com/doc/refman/5.5/en/date-and-time-literals.html if (dateSQL.hasOwnProperty(word) && (stream.match(/^( )+'[^']*'/) || stream.match(/^( )+"[^"]*"/))) return "number"; if (atoms.hasOwnProperty(word)) return "atom"; if (builtin.hasOwnProperty(word)) return "builtin"; if (keywords.hasOwnProperty(word)) return "keyword"; if (client.hasOwnProperty(word)) return "string-2"; return null; } } // 'string', with char specified in quote escaped by '\' function tokenLiteral(quote) { return function(stream, state) { var escaped = false, ch; while ((ch = stream.next()) != null) { if (ch == quote && !escaped) { state.tokenize = tokenBase; break; } escaped = !escaped && ch == "\\"; } return "string"; }; } function tokenComment(stream, state) { while (true) { if (stream.skipTo("*")) { stream.next(); if (stream.eat("/")) { state.tokenize = tokenBase; break; } } else { stream.skipToEnd(); break; } } return "comment"; } function pushContext(stream, state, type) { state.context = { prev: state.context, indent: stream.indentation(), col: stream.column(), type: type }; } function popContext(state) { state.indent = state.context.indent; state.context = state.context.prev; } return { startState: function() { return {tokenize: tokenBase, context: null}; }, token: function(stream, state) { if (stream.sol()) { if (state.context && state.context.align == null) state.context.align = false; } if (stream.eatSpace()) return null; var style = state.tokenize(stream, state); if (style == "comment") return style; if (state.context && state.context.align == null) state.context.align = true; var tok = stream.current(); if (tok == "(") pushContext(stream, state, ")"); else if (tok == "[") pushContext(stream, state, "]"); else if (state.context && state.context.type == tok) popContext(state); return style; }, indent: function(state, textAfter) { var cx = state.context; if (!cx) return CodeMirror.Pass; var closing = textAfter.charAt(0) == cx.type; if (cx.align) return cx.col + (closing ? 0 : 1); else return cx.indent + (closing ? 0 : config.indentUnit); }, blockCommentStart: "/*", blockCommentEnd: "*/", lineComment: support.commentSlashSlash ? "//" : support.commentHash ? "#" : null }; }); (function() { "use strict"; // `identifier` function hookIdentifier(stream) { // MySQL/MariaDB identifiers // ref: http://dev.mysql.com/doc/refman/5.6/en/identifier-qualifiers.html var ch; while ((ch = stream.next()) != null) { if (ch == "`" && !stream.eat("`")) return "variable-2"; } stream.backUp(stream.current().length - 1); return stream.eatWhile(/\w/) ? "variable-2" : null; } // variable token function hookVar(stream) { // variables // @@prefix.varName @varName // varName can be quoted with ` or ' or " // ref: http://dev.mysql.com/doc/refman/5.5/en/user-variables.html if (stream.eat("@")) { stream.match(/^session\./); stream.match(/^local\./); stream.match(/^global\./); } if (stream.eat("'")) { stream.match(/^.*'/); return "variable-2"; } else if (stream.eat('"')) { stream.match(/^.*"/); return "variable-2"; } else if (stream.eat("`")) { stream.match(/^.*`/); return "variable-2"; } else if (stream.match(/^[0-9a-zA-Z$\.\_]+/)) { return "variable-2"; } return null; }; // short client keyword token function hookClient(stream) { // \N means NULL // ref: http://dev.mysql.com/doc/refman/5.5/en/null-values.html if (stream.eat("N")) { return "atom"; } // \g, etc // ref: http://dev.mysql.com/doc/refman/5.5/en/mysql-commands.html return stream.match(/^[a-zA-Z.#!?]/) ? "variable-2" : null; } // these keywords are used by all SQL dialects (however, a mode can still overwrite it) var sqlKeywords = "alter and as asc between by count create delete desc distinct drop from group having in insert into is join like not on or order select set table union update values where limit "; // turn a space-separated list into an array function set(str) { var obj = {}, words = str.split(" "); for (var i = 0; i < words.length; ++i) obj[words[i]] = true; return obj; } // A generic SQL Mode. It's not a standard, it just try to support what is generally supported CodeMirror.defineMIME("text/x-sql", { name: "sql", keywords: set(sqlKeywords + "begin"), builtin: set("bool boolean bit blob enum long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text bigint int int1 int2 int3 int4 int8 integer float float4 float8 double char varbinary varchar varcharacter precision real date datetime year unsigned signed decimal numeric"), atoms: set("false true null unknown"), operatorChars: /^[*+\-%<>!=]/, dateSQL: set("date time timestamp"), support: set("ODBCdotTable doubleQuote binaryNumber hexNumber") }); CodeMirror.defineMIME("text/x-mssql", { name: "sql", client: set("charset clear connect edit ego exit go help nopager notee nowarning pager print prompt quit rehash source status system tee"), keywords: set(sqlKeywords + "begin trigger proc view index for add constraint key primary foreign collate clustered nonclustered declare"), builtin: set("bigint numeric bit smallint decimal smallmoney int tinyint money float real char varchar text nchar nvarchar ntext binary varbinary image cursor timestamp hierarchyid uniqueidentifier sql_variant xml table "), atoms: set("false true null unknown"), operatorChars: /^[*+\-%<>!=]/, dateSQL: set("date datetimeoffset datetime2 smalldatetime datetime time"), hooks: { "@": hookVar } }); CodeMirror.defineMIME("text/x-mysql", { name: "sql", client: set("charset clear connect edit ego exit go help nopager notee nowarning pager print prompt quit rehash source status system tee"), keywords: set(sqlKeywords + "accessible action add after algorithm all analyze asensitive at authors auto_increment autocommit avg avg_row_length before binary binlog both btree cache call cascade cascaded case catalog_name chain change changed character check checkpoint checksum class_origin client_statistics close coalesce code collate collation collations column columns comment commit committed completion concurrent condition connection consistent constraint contains continue contributors convert cross current current_date current_time current_timestamp current_user cursor data database databases day_hour day_microsecond day_minute day_second deallocate dec declare default delay_key_write delayed delimiter des_key_file describe deterministic dev_pop dev_samp deviance diagnostics directory disable discard distinctrow div dual dumpfile each elseif enable enclosed end ends engine engines enum errors escape escaped even event events every execute exists exit explain extended fast fetch field fields first flush for force foreign found_rows full fulltext function general get global grant grants group group_concat handler hash help high_priority hosts hour_microsecond hour_minute hour_second if ignore ignore_server_ids import index index_statistics infile inner innodb inout insensitive insert_method install interval invoker isolation iterate key keys kill language last leading leave left level limit linear lines list load local localtime localtimestamp lock logs low_priority master master_heartbeat_period master_ssl_verify_server_cert masters match max max_rows maxvalue message_text middleint migrate min min_rows minute_microsecond minute_second mod mode modifies modify mutex mysql_errno natural next no no_write_to_binlog offline offset one online open optimize option optionally out outer outfile pack_keys parser partition partitions password phase plugin plugins prepare preserve prev primary privileges procedure processlist profile profiles purge query quick range read read_write reads real rebuild recover references regexp relaylog release remove rename reorganize repair repeatable replace require resignal restrict resume return returns revoke right rlike rollback rollup row row_format rtree savepoint schedule schema schema_name schemas second_microsecond security sensitive separator serializable server session share show signal slave slow smallint snapshot soname spatial specific sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_no_cache sql_small_result sqlexception sqlstate sqlwarning ssl start starting starts status std stddev stddev_pop stddev_samp storage straight_join subclass_origin sum suspend table_name table_statistics tables tablespace temporary terminated to trailing transaction trigger triggers truncate uncommitted undo uninstall unique unlock upgrade usage use use_frm user user_resources user_statistics using utc_date utc_time utc_timestamp value variables varying view views warnings when while with work write xa xor year_month zerofill begin do then else loop repeat"), builtin: set("bool boolean bit blob decimal double float long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text bigint int int1 int2 int3 int4 int8 integer float float4 float8 double char varbinary varchar varcharacter precision date datetime year unsigned signed numeric"), atoms: set("false true null unknown"), operatorChars: /^[*+\-%<>!=&|^]/, dateSQL: set("date time timestamp"), support: set("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber doubleQuote nCharCast charsetCast commentHash commentSpaceRequired"), hooks: { "@": hookVar, "`": hookIdentifier, "\\": hookClient } }); CodeMirror.defineMIME("text/x-mariadb", { name: "sql", client: set("charset clear connect edit ego exit go help nopager notee nowarning pager print prompt quit rehash source status system tee"), keywords: set(sqlKeywords + "accessible action add after algorithm all always analyze asensitive at authors auto_increment autocommit avg avg_row_length before binary binlog both btree cache call cascade cascaded case catalog_name chain change changed character check checkpoint checksum class_origin client_statistics close coalesce code collate collation collations column columns comment commit committed completion concurrent condition connection consistent constraint contains continue contributors convert cross current current_date current_time current_timestamp current_user cursor data database databases day_hour day_microsecond day_minute day_second deallocate dec declare default delay_key_write delayed delimiter des_key_file describe deterministic dev_pop dev_samp deviance diagnostics directory disable discard distinctrow div dual dumpfile each elseif enable enclosed end ends engine engines enum errors escape escaped even event events every execute exists exit explain extended fast fetch field fields first flush for force foreign found_rows full fulltext function general generated get global grant grants group groupby_concat handler hard hash help high_priority hosts hour_microsecond hour_minute hour_second if ignore ignore_server_ids import index index_statistics infile inner innodb inout insensitive insert_method install interval invoker isolation iterate key keys kill language last leading leave left level limit linear lines list load local localtime localtimestamp lock logs low_priority master master_heartbeat_period master_ssl_verify_server_cert masters match max max_rows maxvalue message_text middleint migrate min min_rows minute_microsecond minute_second mod mode modifies modify mutex mysql_errno natural next no no_write_to_binlog offline offset one online open optimize option optionally out outer outfile pack_keys parser partition partitions password persistent phase plugin plugins prepare preserve prev primary privileges procedure processlist profile profiles purge query quick range read read_write reads real rebuild recover references regexp relaylog release remove rename reorganize repair repeatable replace require resignal restrict resume return returns revoke right rlike rollback rollup row row_format rtree savepoint schedule schema schema_name schemas second_microsecond security sensitive separator serializable server session share show shutdown signal slave slow smallint snapshot soft soname spatial specific sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_no_cache sql_small_result sqlexception sqlstate sqlwarning ssl start starting starts status std stddev stddev_pop stddev_samp storage straight_join subclass_origin sum suspend table_name table_statistics tables tablespace temporary terminated to trailing transaction trigger triggers truncate uncommitted undo uninstall unique unlock upgrade usage use use_frm user user_resources user_statistics using utc_date utc_time utc_timestamp value variables varying view views virtual warnings when while with work write xa xor year_month zerofill begin do then else loop repeat"), builtin: set("bool boolean bit blob decimal double float long longblob longtext medium mediumblob mediumint mediumtext time timestamp tinyblob tinyint tinytext text bigint int int1 int2 int3 int4 int8 integer float float4 float8 double char varbinary varchar varcharacter precision date datetime year unsigned signed numeric"), atoms: set("false true null unknown"), operatorChars: /^[*+\-%<>!=&|^]/, dateSQL: set("date time timestamp"), support: set("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber doubleQuote nCharCast charsetCast commentHash commentSpaceRequired"), hooks: { "@": hookVar, "`": hookIdentifier, "\\": hookClient } }); // the query language used by Apache Cassandra is called CQL, but this mime type // is called Cassandra to avoid confusion with Contextual Query Language CodeMirror.defineMIME("text/x-cassandra", { name: "sql", client: { }, keywords: set("add all allow alter and any apply as asc authorize batch begin by clustering columnfamily compact consistency count create custom delete desc distinct drop each_quorum exists filtering from grant if in index insert into key keyspace keyspaces level limit local_one local_quorum modify nan norecursive nosuperuser not of on one order password permission permissions primary quorum rename revoke schema select set storage superuser table three to token truncate ttl two type unlogged update use user users using values where with writetime"), builtin: set("ascii bigint blob boolean counter decimal double float frozen inet int list map static text timestamp timeuuid tuple uuid varchar varint"), atoms: set("false true infinity NaN"), operatorChars: /^[<>=]/, dateSQL: { }, support: set("commentSlashSlash decimallessFloat"), hooks: { } }); // this is based on Peter Raganitsch's 'plsql' mode CodeMirror.defineMIME("text/x-plsql", { name: "sql", client: set("appinfo arraysize autocommit autoprint autorecovery autotrace blockterminator break btitle cmdsep colsep compatibility compute concat copycommit copytypecheck define describe echo editfile embedded escape exec execute feedback flagger flush heading headsep instance linesize lno loboffset logsource long longchunksize markup native newpage numformat numwidth pagesize pause pno recsep recsepchar release repfooter repheader serveroutput shiftinout show showmode size spool sqlblanklines sqlcase sqlcode sqlcontinue sqlnumber sqlpluscompatibility sqlprefix sqlprompt sqlterminator suffix tab term termout time timing trimout trimspool ttitle underline verify version wrap"), keywords: set("abort accept access add all alter and any array arraylen as asc assert assign at attributes audit authorization avg base_table begin between binary_integer body boolean by case cast char char_base check close cluster clusters colauth column comment commit compress connect connected constant constraint crash create current currval cursor data_base database date dba deallocate debugoff debugon decimal declare default definition delay delete desc digits dispose distinct do drop else elseif elsif enable end entry escape exception exception_init exchange exclusive exists exit external fast fetch file for force form from function generic goto grant group having identified if immediate in increment index indexes indicator initial initrans insert interface intersect into is key level library like limited local lock log logging long loop master maxextents maxtrans member minextents minus mislabel mode modify multiset new next no noaudit nocompress nologging noparallel not nowait number_base object of off offline on online only open option or order out package parallel partition pctfree pctincrease pctused pls_integer positive positiven pragma primary prior private privileges procedure public raise range raw read rebuild record ref references refresh release rename replace resource restrict return returning returns reverse revoke rollback row rowid rowlabel rownum rows run savepoint schema segment select separate session set share snapshot some space split sql start statement storage subtype successful synonym tabauth table tables tablespace task terminate then to trigger truncate type union unique unlimited unrecoverable unusable update use using validate value values variable view views when whenever where while with work"), builtin: set("abs acos add_months ascii asin atan atan2 average bfile bfilename bigserial bit blob ceil character chartorowid chr clob concat convert cos cosh count dec decode deref dual dump dup_val_on_index empty error exp false float floor found glb greatest hextoraw initcap instr instrb int integer isopen last_day least lenght lenghtb ln lower lpad ltrim lub make_ref max min mlslabel mod months_between natural naturaln nchar nclob new_time next_day nextval nls_charset_decl_len nls_charset_id nls_charset_name nls_initcap nls_lower nls_sort nls_upper nlssort no_data_found notfound null number numeric nvarchar2 nvl others power rawtohex real reftohex round rowcount rowidtochar rowtype rpad rtrim serial sign signtype sin sinh smallint soundex sqlcode sqlerrm sqrt stddev string substr substrb sum sysdate tan tanh to_char text to_date to_label to_multi_byte to_number to_single_byte translate true trunc uid unlogged upper user userenv varchar varchar2 variance varying vsize xml"), operatorChars: /^[*+\-%<>!=~]/, dateSQL: set("date time timestamp"), support: set("doubleQuote nCharCast zerolessFloat binaryNumber hexNumber") }); // Created to support specific hive keywords CodeMirror.defineMIME("text/x-hive", { name: "sql", keywords: set("select alter $elem$ $key$ $value$ add after all analyze and archive as asc before between binary both bucket buckets by cascade case cast change cluster clustered clusterstatus collection column columns comment compute concatenate continue create cross cursor data database databases dbproperties deferred delete delimited desc describe directory disable distinct distribute drop else enable end escaped exclusive exists explain export extended external false fetch fields fileformat first format formatted from full function functions grant group having hold_ddltime idxproperties if import in index indexes inpath inputdriver inputformat insert intersect into is items join keys lateral left like limit lines load local location lock locks mapjoin materialized minus msck no_drop nocompress not of offline on option or order out outer outputdriver outputformat overwrite partition partitioned partitions percent plus preserve procedure purge range rcfile read readonly reads rebuild recordreader recordwriter recover reduce regexp rename repair replace restrict revoke right rlike row schema schemas semi sequencefile serde serdeproperties set shared show show_database sort sorted ssl statistics stored streamtable table tables tablesample tblproperties temporary terminated textfile then tmp to touch transform trigger true unarchive undo union uniquejoin unlock update use using utc utc_tmestamp view when where while with"), builtin: set("bool boolean long timestamp tinyint smallint bigint int float double date datetime unsigned string array struct map uniontype"), atoms: set("false true null unknown"), operatorChars: /^[*+\-%<>!=]/, dateSQL: set("date timestamp"), support: set("ODBCdotTable doubleQuote binaryNumber hexNumber") }); CodeMirror.defineMIME("text/x-pgsql", { name: "sql", client: set("source"), // http://www.postgresql.org/docs/9.5/static/sql-keywords-appendix.html keywords: set(sqlKeywords + "a abort abs absent absolute access according action ada add admin after aggregate all allocate also always analyse analyze any are array array_agg array_max_cardinality asensitive assertion assignment asymmetric at atomic attribute attributes authorization avg backward base64 before begin begin_frame begin_partition bernoulli binary bit_length blob blocked bom both breadth c cache call called cardinality cascade cascaded case cast catalog catalog_name ceil ceiling chain characteristics characters character_length character_set_catalog character_set_name character_set_schema char_length check checkpoint class class_origin clob close cluster coalesce cobol collate collation collation_catalog collation_name collation_schema collect column columns column_name command_function command_function_code comment comments commit committed concurrently condition condition_number configuration conflict connect connection connection_name constraint constraints constraint_catalog constraint_name constraint_schema constructor contains content continue control conversion convert copy corr corresponding cost covar_pop covar_samp cross csv cube cume_dist current current_catalog current_date current_default_transform_group current_path current_role current_row current_schema current_time current_timestamp current_transform_group_for_type current_user cursor cursor_name cycle data database datalink datetime_interval_code datetime_interval_precision day db deallocate dec declare default defaults deferrable deferred defined definer degree delimiter delimiters dense_rank depth deref derived describe descriptor deterministic diagnostics dictionary disable discard disconnect dispatch dlnewcopy dlpreviouscopy dlurlcomplete dlurlcompleteonly dlurlcompletewrite dlurlpath dlurlpathonly dlurlpathwrite dlurlscheme dlurlserver dlvalue do document domain dynamic dynamic_function dynamic_function_code each element else empty enable encoding encrypted end end-exec end_frame end_partition enforced enum equals escape event every except exception exclude excluding exclusive exec execute exists exp explain expression extension external extract false family fetch file filter final first first_value flag float floor following for force foreign fortran forward found frame_row free freeze fs full function functions fusion g general generated get global go goto grant granted greatest grouping groups handler header hex hierarchy hold hour id identity if ignore ilike immediate immediately immutable implementation implicit import including increment indent index indexes indicator inherit inherits initially inline inner inout input insensitive instance instantiable instead integrity intersect intersection invoker isnull isolation k key key_member key_type label lag language large last last_value lateral lead leading leakproof least left length level library like_regex link listen ln load local localtime localtimestamp location locator lock locked logged lower m map mapping match matched materialized max maxvalue max_cardinality member merge message_length message_octet_length message_text method min minute minvalue mod mode modifies module month more move multiset mumps name names namespace national natural nchar nclob nesting new next nfc nfd nfkc nfkd nil no none normalize normalized nothing notify notnull nowait nth_value ntile null nullable nullif nulls number object occurrences_regex octets octet_length of off offset oids old only open operator option options ordering ordinality others out outer output over overlaps overlay overriding owned owner p pad parameter parameter_mode parameter_name parameter_ordinal_position parameter_specific_catalog parameter_specific_name parameter_specific_schema parser partial partition pascal passing passthrough password percent percentile_cont percentile_disc percent_rank period permission placing plans pli policy portion position position_regex power precedes preceding prepare prepared preserve primary prior privileges procedural procedure program public quote range rank read reads reassign recheck recovery recursive ref references referencing refresh regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy regr_syy reindex relative release rename repeatable replace replica requiring reset respect restart restore restrict result return returned_cardinality returned_length returned_octet_length returned_sqlstate returning returns revoke right role rollback rollup routine routine_catalog routine_name routine_schema row rows row_count row_number rule savepoint scale schema schema_name scope scope_catalog scope_name scope_schema scroll search second section security selective self sensitive sequence sequences serializable server server_name session session_user setof sets share show similar simple size skip snapshot some source space specific specifictype specific_name sql sqlcode sqlerror sqlexception sqlstate sqlwarning sqrt stable standalone start state statement static statistics stddev_pop stddev_samp stdin stdout storage strict strip structure style subclass_origin submultiset substring substring_regex succeeds sum symmetric sysid system system_time system_user t tables tablesample tablespace table_name temp template temporary then ties timezone_hour timezone_minute to token top_level_count trailing transaction transactions_committed transactions_rolled_back transaction_active transform transforms translate translate_regex translation treat trigger trigger_catalog trigger_name trigger_schema trim trim_array true truncate trusted type types uescape unbounded uncommitted under unencrypted unique unknown unlink unlisten unlogged unnamed unnest until untyped upper uri usage user user_defined_type_catalog user_defined_type_code user_defined_type_name user_defined_type_schema using vacuum valid validate validator value value_of varbinary variadic var_pop var_samp verbose version versioning view views volatile when whenever whitespace width_bucket window within work wrapper write xmlagg xmlattributes xmlbinary xmlcast xmlcomment xmlconcat xmldeclaration xmldocument xmlelement xmlexists xmlforest xmliterate xmlnamespaces xmlparse xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltext xmlvalidate year yes loop repeat"), // http://www.postgresql.org/docs/9.5/static/datatype.html builtin: set("bigint int8 bigserial serial8 bit varying varbit boolean bool box bytea character char varchar cidr circle date double precision float8 inet integer int int4 interval json jsonb line lseg macaddr money numeric decimal path pg_lsn point polygon real float4 smallint int2 smallserial serial2 serial serial4 text time without zone with timetz timestamp timestamptz tsquery tsvector txid_snapshot uuid xml"), atoms: set("false true null unknown"), operatorChars: /^[*+\-%<>!=&|^]/, dateSQL: set("date time timestamp"), support: set("ODBCdotTable decimallessFloat zerolessFloat binaryNumber hexNumber nCharCast charsetCast commentHash commentSpaceRequired") }); }()); });
How Properties of Mime Types are used by SQL Mode ================================================= keywords: A list of keywords you want to be highlighted. builtin: A list of builtin types you want to be highlighted (if you want types to be of class "builtin" instead of "keyword"). operatorChars: All characters that must be handled as operators. client: Commands parsed and executed by the client (not the server). support: A list of supported syntaxes which are not common, but are supported by more than 1 DBMS. * ODBCdotTable: .tableName * zerolessFloat: .1 * doubleQuote * nCharCast: N'string' * charsetCast: _utf8'string' * commentHash: use # char for comments * commentSlashSlash: use // for comments * commentSpaceRequired: require a space after -- for comments atoms: Keywords that must be highlighted as atoms,. Some DBMS's support more atoms than others: UNKNOWN, INFINITY, UNDERFLOW, NaN... dateSQL: Used for date/time SQL standard syntax, because not all DBMS's support same temporal types. */
/*
setup.py
#!/usr/bin/env python import sys import os from setuptools import setup, find_packages if len(sys.argv) == 1:
if sys.argv[1] == 'test': from subprocess import call sys.exit(call([sys.executable, '-m', 'pytest'] + sys.argv[2:])) # Create the resource file dataflow/git_revision if os.system('"{sys.executable}" dataflow/rev.py'.format(sys=sys)) != 0: print("setup.py failed to build dataflow/git_revision", file=sys.stderr) sys.exit(1) packages = find_packages(exclude=['reflbin']) #sys.dont_write_bytecode = False dist = setup( name='reductus', version='0.1b2', author='Paul Kienzle', author_email='[email protected]', url='http://github.com/reductus/reductus', description='Data reduction for neutron scattering', long_description=open('README.rst').read(), classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Science/Research', 'License :: Public Domain', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3.7', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Chemistry', 'Topic :: Scientific/Engineering :: Physics', ], zip_safe=False, packages=packages, include_package_data=True, entry_points = { 'console_scripts': ['reductus=web_gui.run:main'], }, install_requires=[ 'scipy', 'numpy', 'h5py', 'uncertainties', 'docutils', 'wheel', 'pytz', 'msgpack-python', 'flask', ], extras_require={ 'masked_curve_fit': ['numdifftools'], }, tests_require=['pytest'], ) # End of file
sys.argv.append('install')
About.js
import React from 'react' const About = () => ( <div> <br/> <h3>About Recipe Box: </h3> <p>Recipe Box is a cookbook in your pocket. Our service allows you to add and remove any recipes and saves all your recipes in our database so that they will never be lost. We hope to provide the best experience in the kitchen.</p> </div> )
export default About
preprocess_2nd.py
""" BROS Copyright 2022-present NAVER Corp. Apache License v2.0 Do 2nd preprocess on top of the result of the 'preprocess.sh' file. Reference: https://github.com/microsoft/unilm/blob/master/layoutlm/deprecated/examples/seq_labeling/run_seq_labeling.py """ import json import os from collections import Counter from tqdm import tqdm from transformers import BertTokenizer MAX_SEQ_LENGTH = 512 MODEL_TYPE = "bert" VOCA = "bert-base-uncased" INPUT_PATH = "./data" OUTPUT_PATH = "../../datasets/funsd" os.makedirs(OUTPUT_PATH, exist_ok=True) os.makedirs(os.path.join(OUTPUT_PATH, "preprocessed"), exist_ok=True) def main(): for dataset_split in ["train", "val"]: print(f"dataset_split: {dataset_split}") do_2nd_preprocess(dataset_split) os.system(f"cp -r {os.path.join(INPUT_PATH, 'training_data')} {OUTPUT_PATH}") os.system(f"cp -r {os.path.join(INPUT_PATH, 'testing_data')} {OUTPUT_PATH}") os.system(f"cp {os.path.join(INPUT_PATH, 'labels.txt')} {OUTPUT_PATH}") def do_2nd_preprocess(dataset_split): label_fpath = os.path.join(INPUT_PATH, "labels.txt") labels = get_labels(label_fpath) tokenizer = BertTokenizer.from_pretrained(VOCA, do_lower_case=True) cls_token_id = tokenizer.convert_tokens_to_ids("[CLS]") sep_token_id = tokenizer.convert_tokens_to_ids("[SEP]") pad_token_id = tokenizer.convert_tokens_to_ids("[PAD]") ignore_index = -100 if dataset_split == "train": mode = "train" elif dataset_split == "val": mode = "test" else: raise ValueError(f"Invalid dataset_split={dataset_split}") examples = read_examples_from_file(INPUT_PATH, mode) features = convert_examples_to_features( examples, labels, MAX_SEQ_LENGTH, tokenizer, cls_token_at_end=bool(MODEL_TYPE in ["xlnet"]), # xlnet has a cls token at the end cls_token=tokenizer.cls_token, cls_token_segment_id=2 if MODEL_TYPE in ["xlnet"] else 0, sep_token=tokenizer.sep_token, sep_token_extra=bool(MODEL_TYPE in ["roberta"]), # roberta uses an extra separator b/w pairs of sentences, cf. github.com/pytorch/fairseq/commit/1684e166e3da03f5b600dbb7855cb98ddfcd0805 pad_on_left=bool(MODEL_TYPE in ["xlnet"]), # pad on the left for xlnet pad_token=tokenizer.convert_tokens_to_ids([tokenizer.pad_token])[0], pad_token_segment_id=4 if MODEL_TYPE in ["xlnet"] else 0, pad_token_label_id=ignore_index, ) # Save image ocr files image_cnter = Counter() preprocessed_fnames = [] for example, feature in tqdm(zip(examples, features), total=len(examples)): # Example: guid, words, labels, boxes, actual_bboxes, file_name, page_size # Feature: input_ids, input_mask, segment_ids, label_ids, # boxes, actual_bboxes, file_name, page_size this_file_name = "{}_{}.json".format( example.file_name[: example.file_name.rfind(".")], image_cnter[example.file_name], ) image_cnter[example.file_name] += 1 data_obj = {} # meta data_obj["meta"] = {} # data_obj["meta"]["image_size"] # = example.page_size[::-1] + [3] # [height, width, rgb?] height, width = example.page_size[::-1] data_obj["meta"]["imageSize"] = {"width": width, "height": height} data_obj["meta"]["voca"] = VOCA if mode == "train": data_obj["meta"]["image_path"] = os.path.join( "training_data", "images", example.file_name ) elif mode == "test": data_obj["meta"]["image_path"] = os.path.join( "testing_data", "images", example.file_name ) else: raise ValueError(f"Unknown mode={mode}") # words # text, tokens, boundingBox data_obj["words"] = [] this_input_ids = [] for word, bb in zip(example.words, example.actual_bboxes): word_tokens = [] for splitted_word in word.split(): word_tokens.append( tokenizer.convert_tokens_to_ids(tokenizer.tokenize(splitted_word)) ) tokens = tokenizer.convert_tokens_to_ids(tokenizer.tokenize(word)) word_obj = { "text": word, "tokens": tokens, "boundingBox": [ [bb[0], bb[1]], [bb[2], bb[1]], [bb[2], bb[3]], [bb[0], bb[3]], ], } data_obj["words"].append(word_obj) this_input_ids.extend(tokens) if VOCA == "bert-base-uncased": feature_input_ids = feature.input_ids assert feature_input_ids[0] == cls_token_id feature_input_ids = feature_input_ids[ 1 : feature_input_ids.index(sep_token_id) ] assert feature_input_ids == this_input_ids else: raise NotImplementedError # masks, labels data_obj["parse"] = {} if VOCA == "bert-base-uncased": data_obj["parse"]["seq_len"] = sum(feature.input_mask) data_obj["parse"]["input_ids"] = feature.input_ids data_obj["parse"]["input_mask"] = feature.input_mask data_obj["parse"]["label_ids"] = feature.label_ids else: raise NotImplementedError # Save file name to list preprocessed_fnames.append(os.path.join("preprocessed", this_file_name)) # Save to file data_obj_file = os.path.join(OUTPUT_PATH, "preprocessed", this_file_name) with open(data_obj_file, "w", encoding="utf-8") as fp: json.dump(data_obj, fp, ensure_ascii=False) # Save file name list file preprocessed_filelist_file = os.path.join( OUTPUT_PATH, f"preprocessed_files_{dataset_split}.txt" ) with open(preprocessed_filelist_file, "w", encoding="utf-8") as fp: fp.write("\n".join(preprocessed_fnames)) def get_labels(path): with open(path, "r") as f: labels = f.read().splitlines() if "O" not in labels: labels = ["O"] + labels return labels class InputExample(object): """A single training/test example for token classification.""" def __init__(self, guid, words, labels, boxes, actual_bboxes, file_name, page_size): """Constructs a InputExample. Args: guid: Unique id for the example. words: list. The words of the sequence. labels: (Optional) list. The labels for each word of the sequence. This should be specified for train and dev examples, but not for test examples. """ self.guid = guid self.words = words self.labels = labels self.boxes = boxes self.actual_bboxes = actual_bboxes self.file_name = file_name self.page_size = page_size def read_examples_from_file(data_dir, mode): file_path = os.path.join(data_dir, "{}.txt".format(mode)) box_file_path = os.path.join(data_dir, "{}_box.txt".format(mode)) image_file_path = os.path.join(data_dir, "{}_image.txt".format(mode)) guid_index = 1 examples = [] with open(file_path, encoding="utf-8") as f, open( box_file_path, encoding="utf-8" ) as fb, open(image_file_path, encoding="utf-8") as fi: words = [] boxes = [] actual_bboxes = [] file_name = None page_size = None labels = [] for line, bline, iline in zip(f, fb, fi): if line.startswith("-DOCSTART-") or line == "" or line == "\n": if words: examples.append( InputExample( guid="{}-{}".format(mode, guid_index), words=words, labels=labels, boxes=boxes, actual_bboxes=actual_bboxes, file_name=file_name, page_size=page_size, ) ) guid_index += 1 words = [] boxes = [] actual_bboxes = [] file_name = None page_size = None labels = [] else: splits = line.split("\t") bsplits = bline.split("\t")
isplits = iline.split("\t") assert len(splits) == 2 assert len(bsplits) == 2 assert len(isplits) == 4 assert splits[0] == bsplits[0] words.append(splits[0]) if len(splits) > 1: labels.append(splits[-1].replace("\n", "")) box = bsplits[-1].replace("\n", "") box = [int(b) for b in box.split()] boxes.append(box) actual_bbox = [int(b) for b in isplits[1].split()] actual_bboxes.append(actual_bbox) page_size = [int(i) for i in isplits[2].split()] file_name = isplits[3].strip() else: # Examples could have no label for mode = "test" labels.append("O") if words: examples.append( InputExample( guid="%s-%d".format(mode, guid_index), words=words, labels=labels, boxes=boxes, actual_bboxes=actual_bboxes, file_name=file_name, page_size=page_size, ) ) return examples class InputFeatures(object): """A single set of features of data.""" def __init__( self, input_ids, input_mask, segment_ids, label_ids, boxes, actual_bboxes, file_name, page_size, ): assert ( 0 <= all(boxes) <= 1000 ), "Error with input bbox ({}): the coordinate value is not between 0 and 1000".format( boxes ) self.input_ids = input_ids self.input_mask = input_mask self.segment_ids = segment_ids self.label_ids = label_ids self.boxes = boxes self.actual_bboxes = actual_bboxes self.file_name = file_name self.page_size = page_size def convert_examples_to_features( examples, label_list, max_seq_length, tokenizer, cls_token_at_end=False, cls_token="[CLS]", cls_token_segment_id=1, sep_token="[SEP]", sep_token_extra=False, pad_on_left=False, pad_token=0, cls_token_box=[0, 0, 0, 0], sep_token_box=[1000, 1000, 1000, 1000], pad_token_box=[0, 0, 0, 0], pad_token_segment_id=0, pad_token_label_id=-1, sequence_a_segment_id=0, mask_padding_with_zero=True, ): """Loads a data file into a list of `InputBatch`s `cls_token_at_end` define the location of the CLS token: - False (Default, BERT/XLM pattern): [CLS] + A + [SEP] + B + [SEP] - True (XLNet/GPT pattern): A + [SEP] + B + [SEP] + [CLS] `cls_token_segment_id` define the segment id associated to the CLS token (0 for BERT, 2 for XLNet) """ label_map = {label: i for i, label in enumerate(label_list)} features = [] for (ex_index, example) in enumerate(examples): file_name = example.file_name page_size = example.page_size width, height = page_size # if ex_index % 10000 == 0: # print("Writing example {} of {}".format(ex_index, len(examples))) tokens = [] token_boxes = [] actual_bboxes = [] label_ids = [] for word, label, box, actual_bbox in zip( example.words, example.labels, example.boxes, example.actual_bboxes ): word_tokens = tokenizer.tokenize(word) tokens.extend(word_tokens) token_boxes.extend([box] * len(word_tokens)) actual_bboxes.extend([actual_bbox] * len(word_tokens)) # Use the real label id for the first token of the word, and padding ids for the remaining tokens label_ids.extend( [label_map[label]] + [pad_token_label_id] * (len(word_tokens) - 1) ) # Account for [CLS] and [SEP] with "- 2" and with "- 3" for RoBERTa. special_tokens_count = 3 if sep_token_extra else 2 if len(tokens) > max_seq_length - special_tokens_count: tokens = tokens[: (max_seq_length - special_tokens_count)] token_boxes = token_boxes[: (max_seq_length - special_tokens_count)] actual_bboxes = actual_bboxes[: (max_seq_length - special_tokens_count)] label_ids = label_ids[: (max_seq_length - special_tokens_count)] # The convention in BERT is: # (a) For sequence pairs: # tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP] # type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1 # (b) For single sequences: # tokens: [CLS] the dog is hairy . [SEP] # type_ids: 0 0 0 0 0 0 0 # # Where "type_ids" are used to indicate whether this is the first # sequence or the second sequence. The embedding vectors for `type=0` and # `type=1` were learned during pre-training and are added to the wordpiece # embedding vector (and position vector). This is not *strictly* necessary # since the [SEP] token unambiguously separates the sequences, but it makes # it easier for the model to learn the concept of sequences. # # For classification tasks, the first vector (corresponding to [CLS]) is # used as as the "sentence vector". Note that this only makes sense because # the entire model is fine-tuned. tokens += [sep_token] token_boxes += [sep_token_box] actual_bboxes += [[0, 0, width, height]] label_ids += [pad_token_label_id] if sep_token_extra: # roberta uses an extra separator b/w pairs of sentences tokens += [sep_token] token_boxes += [sep_token_box] actual_bboxes += [[0, 0, width, height]] label_ids += [pad_token_label_id] segment_ids = [sequence_a_segment_id] * len(tokens) if cls_token_at_end: tokens += [cls_token] token_boxes += [cls_token_box] actual_bboxes += [[0, 0, width, height]] label_ids += [pad_token_label_id] segment_ids += [cls_token_segment_id] else: tokens = [cls_token] + tokens token_boxes = [cls_token_box] + token_boxes actual_bboxes = [[0, 0, width, height]] + actual_bboxes label_ids = [pad_token_label_id] + label_ids segment_ids = [cls_token_segment_id] + segment_ids input_ids = tokenizer.convert_tokens_to_ids(tokens) # The mask has 1 for real tokens and 0 for padding tokens. Only real # tokens are attended to. input_mask = [1 if mask_padding_with_zero else 0] * len(input_ids) # Zero-pad up to the sequence length. padding_length = max_seq_length - len(input_ids) if pad_on_left: input_ids = ([pad_token] * padding_length) + input_ids input_mask = ( [0 if mask_padding_with_zero else 1] * padding_length ) + input_mask segment_ids = ([pad_token_segment_id] * padding_length) + segment_ids label_ids = ([pad_token_label_id] * padding_length) + label_ids token_boxes = ([pad_token_box] * padding_length) + token_boxes else: input_ids += [pad_token] * padding_length input_mask += [0 if mask_padding_with_zero else 1] * padding_length segment_ids += [pad_token_segment_id] * padding_length label_ids += [pad_token_label_id] * padding_length token_boxes += [pad_token_box] * padding_length assert len(input_ids) == max_seq_length assert len(input_mask) == max_seq_length assert len(segment_ids) == max_seq_length assert len(label_ids) == max_seq_length assert len(token_boxes) == max_seq_length # if ex_index < 5: # print("*** Example ***") # print("guid: {}".format(example.guid)) # print("tokens: {}".format(" ".join([str(x) for x in tokens]))) # print("input_ids: {}".format(" ".join([str(x) for x in input_ids]))) # print("input_mask: {}".format(" ".join([str(x) for x in input_mask]))) # print("segment_ids: {}".format(" ".join([str(x) for x in segment_ids]))) # print("label_ids: {}".format(" ".join([str(x) for x in label_ids]))) # print("boxes: {}".format(" ".join([str(x) for x in token_boxes]))) # print("actual_bboxes: {}".format(" ".join([str(x) for x in actual_bboxes]))) features.append( InputFeatures( input_ids=input_ids, input_mask=input_mask, segment_ids=segment_ids, label_ids=label_ids, boxes=token_boxes, actual_bboxes=actual_bboxes, file_name=file_name, page_size=page_size, ) ) return features if __name__ == "__main__": main()
dashboard.component.ts
import {Component, OnInit} from 'angular2/core'; import {Router} from 'angular2/router'; import {Movie} from './movie'; import {MovieService} from './movie.service'; @Component({ selector: 'my-dashboard', templateUrl: 'app/dashboard.component.html', styleUrls: ['app/dashboard.component.css'] }) export class DashboardComponent implements OnInit { public movies: Movie[] = []; public listType : String; constructor(private _movieService: MovieService, private _router: Router) { this.listType = "latest"; }
this._movieService.getMovies().then(movies => this.movies = movies.slice(1,100)); } gotoDetail(movie: Movie) { this._router.navigate(['MovieDetail', { id: movie.id }]); } }
ngOnInit() {
node.go
// Copyright (c) 2019, The Emergent Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package eve import ( "github.com/goki/ki/ki" "github.com/goki/ki/kit" ) // Node is the common interface for all eve nodes type Node interface { ki.Ki // NodeType returns the type of node this is (Body, Group, Joint) NodeType() NodeTypes // AsNodeBase returns a generic NodeBase for our node -- gives generic // access to all the base-level data structures without needing interface methods. AsNodeBase() *NodeBase // AsBody returns a generic Body interface for our node -- nil if not a Body AsBody() Body // IsDynamic returns true if node has Dynamic flag set -- otherwise static // Groups that contain dynamic objects set their dynamic flags. IsDynamic() bool // GroupBBox sets bounding boxes for groups based on groups or bodies. // called in a FuncDownMeLast traversal. GroupBBox() // InitAbs sets current Abs physical state parameters from Initial values // which are local, relative to parent -- is passed the parent (nil = top). // Body nodes should also set their bounding boxes. // Called in a FuncDownMeFirst traversal. InitAbs(par *NodeBase) // RelToAbs updates current world Abs physical state parameters // based on Rel values added to updated Abs values at higher levels. // Abs.LinVel is updated from the resulting change from prior position. // This is useful for manual updating of relative positions (scripted movement). // It is passed the parent (nil = top). // Body nodes should also update their bounding boxes. // Called in a FuncDownMeFirst traversal. RelToAbs(par *NodeBase) // StepPhys computes one update of the world Abs physical state parameters, // using *current* velocities -- add forces prior to calling. // Use this for physics-based state updates. // Body nodes should also update their bounding boxes. StepPhys(step float32) } // NodeBase is the basic eve node, which has position, rotation, velocity // and computed bounding boxes, etc. // There are only three different kinds of Nodes: Group, Body, and Joint type NodeBase struct { ki.Node Initial Phys `view:"inline" desc:"initial position, orientation, velocity in *local* coordinates (relative to parent)"` Rel Phys `view:"inline" desc:"current relative (local) position, orientation, velocity -- only change these values, as abs values are computed therefrom"` Abs Phys `inactive:"+" view:"inline" desc:"current absolute (world) position, orientation, velocity"` BBox BBox `desc:"bounding box in world coordinates (aggregated for groups)"` } var KiT_NodeBase = kit.Types.AddType(&NodeBase{}, NodeBaseProps) var NodeBaseProps = ki.Props{ "EnumType:Flag": KiT_NodeFlags, } func (nb *NodeBase) AsNodeBase() *NodeBase { return nb } func (nb *NodeBase) AsBody() Body { return nil } func (nb *NodeBase) IsDynamic() bool { return nb.HasFlag(int(Dynamic)) } // InitAbsBase is the base-level version of InitAbs -- most nodes call this. // InitAbs sets current Abs physical state parameters from Initial values // which are local, relative to parent -- is passed the parent (nil = top). // Body nodes should also set their bounding boxes. // Called in a FuncDownMeFirst traversal. func (nb *NodeBase) InitAbsBase(par *NodeBase) { if nb.Initial.Quat.IsNil() { nb.Initial.Quat.SetIdentity() } nb.Rel = nb.Initial if par != nil { nb.Abs.FromRel(&nb.Initial, &par.Abs) } else { nb.Abs = nb.Initial } } // RelToAbsBase is the base-level version of RelToAbs -- most nodes call this. // note: Group WorldRelToAbs ensures only called on Dynamic nodes. // RelToAbs updates current world Abs physical state parameters // based on Rel values added to updated Abs values at higher levels. // Abs.LinVel is updated from the resulting change from prior position. // This is useful for manual updating of relative positions (scripted movement). // It is passed the parent (nil = top). // Body nodes should also update their bounding boxes. // Called in a FuncDownMeFirst traversal. func (nb *NodeBase) RelToAbsBase(par *NodeBase) { ppos := nb.Abs.Pos if par != nil { nb.Abs.FromRel(&nb.Rel, &par.Abs) } else { nb.Abs = nb.Rel } nb.Abs.LinVel = nb.Abs.Pos.Sub(ppos) // needed for VelBBox prjn } // StepPhysBase is base-level version of StepPhys -- most nodes call this. // note: Group WorldRelToAbs ensures only called on Dynamic nodes. // Computes one update of the world Abs physical state parameters, // using *current* velocities -- add forces prior to calling. // Use this for physics-based state updates. // Body nodes should also update their bounding boxes. func (nb *NodeBase) StepPhysBase(step float32) { nb.Abs.StepByAngVel(step) nb.Abs.StepByLinVel(step) } // KiToNode converts Ki to a Node interface and a Node3DBase obj -- nil if not. func
(k ki.Ki) (Node, *NodeBase) { if k == nil || k.This() == nil { // this also checks for destroyed return nil, nil } nii, ok := k.(Node) if ok { return nii, nii.AsNodeBase() } return nil, nil } ///////////////////////////////////////////////////////////////////// // NodeTypes // NodeTypes is a list of node types type NodeTypes int const ( // note: uppercase required to not conflict with type names BODY NodeTypes = iota GROUP JOINT NodeTypesN ) //go:generate stringer -type=NodeTypes var KiT_NodeTypes = kit.Enums.AddEnum(NodeTypesN, kit.NotBitFlag, nil) ///////////////////////////////////////////////////////////////////// // NodeFlags // NodeFlags define eve node bitflags -- uses ki Flags field (64 bit capacity) type NodeFlags int //go:generate stringer -type=NodeFlags var KiT_NodeFlags = kit.Enums.AddEnumExt(ki.KiT_Flags, NodeFlagsN, kit.BitFlag, nil) const ( // Dynamic means that this node can move -- if not so marked, it is // a Static node. Any top-level group that is not Dynamic is immediately // pruned from further consideration, so top-level groups should be // separated into Dynamic and Static nodes at the start. Dynamic NodeFlags = NodeFlags(ki.FlagsN) + iota NodeFlagsN )
KiToNode
unsafe_test.go
// +build !purego,!appengine,!js package atom import ( "testing" "unsafe" ) func
(t *testing.T) { var p Pointer if p.Value() != nil { t.Fatal("Expected initial value to be nil") } var t1, t2 uint64 v1 := unsafe.Pointer(&t1) p.Set(v1) if v := p.Value(); v != v1 { t.Fatal("Value unchanged") } v2 := unsafe.Pointer(&t2) if p.CompareAndSwap(v2, v2) { t.Fatal("CompareAndSwap reported swap when the old value did not match") } if v := p.Value(); v != v1 { t.Fatal("Value changed") } if !p.CompareAndSwap(v1, v2) { t.Fatal("CompareAndSwap did not report a swap") } if v := p.Value(); v != v2 { t.Fatal("Value unchanged") } if p.Swap(v1) != v2 { t.Fatal("Old value does not match") } if v := p.Value(); v != v1 { t.Fatal("Value unchanged") } }
TestPointer
context.rs
//! Type context book-keeping. use crate::arena::Arena; use crate::dep_graph::DepGraph; use crate::hir::exports::ExportMap; use crate::hir::place::Place as HirPlace; use crate::ich::{NodeIdHashingMode, StableHashingContext}; use crate::infer::canonical::{Canonical, CanonicalVarInfo, CanonicalVarInfos}; use crate::lint::{struct_lint_level, LintDiagnosticBuilder, LintLevelSource}; use crate::middle; use crate::middle::cstore::{CrateStoreDyn, EncodedMetadata}; use crate::middle::resolve_lifetime::{self, LifetimeScopeForPath, ObjectLifetimeDefault}; use crate::middle::stability; use crate::mir::interpret::{self, Allocation, ConstValue, Scalar}; use crate::mir::{Body, Field, Local, Place, PlaceElem, ProjectionKind, Promoted}; use crate::traits; use crate::ty::query::{self, OnDiskCache, TyCtxtAt}; use crate::ty::subst::{GenericArg, GenericArgKind, InternalSubsts, Subst, SubstsRef, UserSubsts}; use crate::ty::TyKind::*; use crate::ty::{ self, AdtDef, AdtKind, Binder, BindingMode, BoundVar, CanonicalPolyFnSig, Const, ConstVid, DefIdTree, ExistentialPredicate, FloatTy, FloatVar, FloatVid, GenericParamDefKind, InferConst, InferTy, IntTy, IntVar, IntVid, List, MainDefinition, ParamConst, ParamTy, PolyFnSig, Predicate, PredicateInner, PredicateKind, ProjectionTy, Region, RegionKind, ReprOptions, TraitObjectVisitor, Ty, TyKind, TyS, TyVar, TyVid, TypeAndMut, UintTy, Visibility, }; use rustc_ast as ast; use rustc_ast::expand::allocator::AllocatorKind; use rustc_attr as attr; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::profiling::SelfProfilerRef; use rustc_data_structures::sharded::{IntoPointer, ShardedHashMap}; use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableVec}; use rustc_data_structures::steal::Steal; use rustc_data_structures::sync::{self, Lock, Lrc, WorkerLocal}; use rustc_errors::ErrorReported; use rustc_hir as hir; use rustc_hir::def::{DefKind, Res}; use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, LOCAL_CRATE}; use rustc_hir::definitions::Definitions; use rustc_hir::intravisit::Visitor; use rustc_hir::lang_items::LangItem; use rustc_hir::{ Constness, HirId, ItemKind, ItemLocalId, ItemLocalMap, ItemLocalSet, Node, TraitCandidate, }; use rustc_index::vec::{Idx, IndexVec}; use rustc_macros::HashStable; use rustc_middle::mir::FakeReadCause; use rustc_serialize::opaque::{FileEncodeResult, FileEncoder}; use rustc_session::config::{BorrowckMode, CrateType, OutputFilenames}; use rustc_session::lint::{Level, Lint}; use rustc_session::Session; use rustc_span::source_map::MultiSpan; use rustc_span::symbol::{kw, sym, Ident, Symbol}; use rustc_span::{Span, DUMMY_SP}; use rustc_target::abi::{Layout, TargetDataLayout, VariantIdx}; use rustc_target::spec::abi; use smallvec::SmallVec; use std::any::Any; use std::borrow::Borrow; use std::cmp::Ordering; use std::collections::hash_map::{self, Entry}; use std::fmt; use std::hash::{Hash, Hasher}; use std::iter; use std::mem; use std::ops::{Bound, Deref}; use std::sync::Arc; /// A type that is not publicly constructable. This prevents people from making [`TyKind::Error`]s /// except through the error-reporting functions on a [`tcx`][TyCtxt]. #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] #[derive(TyEncodable, TyDecodable, HashStable)] pub struct DelaySpanBugEmitted(()); type InternedSet<'tcx, T> = ShardedHashMap<Interned<'tcx, T>, ()>; pub struct CtxtInterners<'tcx> { /// The arena that types, regions, etc. are allocated from. arena: &'tcx WorkerLocal<Arena<'tcx>>, /// Specifically use a speedy hash algorithm for these hash sets, since /// they're accessed quite often. type_: InternedSet<'tcx, TyS<'tcx>>, type_list: InternedSet<'tcx, List<Ty<'tcx>>>, substs: InternedSet<'tcx, InternalSubsts<'tcx>>, canonical_var_infos: InternedSet<'tcx, List<CanonicalVarInfo<'tcx>>>, region: InternedSet<'tcx, RegionKind>, poly_existential_predicates: InternedSet<'tcx, List<ty::Binder<'tcx, ExistentialPredicate<'tcx>>>>, predicate: InternedSet<'tcx, PredicateInner<'tcx>>, predicates: InternedSet<'tcx, List<Predicate<'tcx>>>, projs: InternedSet<'tcx, List<ProjectionKind>>, place_elems: InternedSet<'tcx, List<PlaceElem<'tcx>>>, const_: InternedSet<'tcx, Const<'tcx>>, /// Const allocations. allocation: InternedSet<'tcx, Allocation>, bound_variable_kinds: InternedSet<'tcx, List<ty::BoundVariableKind>>, } impl<'tcx> CtxtInterners<'tcx> { fn new(arena: &'tcx WorkerLocal<Arena<'tcx>>) -> CtxtInterners<'tcx> { CtxtInterners { arena, type_: Default::default(), type_list: Default::default(), substs: Default::default(), region: Default::default(), poly_existential_predicates: Default::default(), canonical_var_infos: Default::default(), predicate: Default::default(), predicates: Default::default(), projs: Default::default(), place_elems: Default::default(), const_: Default::default(), allocation: Default::default(), bound_variable_kinds: Default::default(), } } /// Interns a type. #[allow(rustc::usage_of_ty_tykind)] #[inline(never)] fn intern_ty(&self, kind: TyKind<'tcx>) -> Ty<'tcx> { self.type_ .intern(kind, |kind| { let flags = super::flags::FlagComputation::for_kind(&kind); let ty_struct = TyS { kind, flags: flags.flags, outer_exclusive_binder: flags.outer_exclusive_binder, }; Interned(self.arena.alloc(ty_struct)) }) .0 } #[inline(never)] fn intern_predicate( &self, kind: Binder<'tcx, PredicateKind<'tcx>>, ) -> &'tcx PredicateInner<'tcx> { self.predicate .intern(kind, |kind| { let flags = super::flags::FlagComputation::for_predicate(kind); let predicate_struct = PredicateInner { kind, flags: flags.flags, outer_exclusive_binder: flags.outer_exclusive_binder, }; Interned(self.arena.alloc(predicate_struct)) }) .0 } } pub struct CommonTypes<'tcx> { pub unit: Ty<'tcx>, pub bool: Ty<'tcx>, pub char: Ty<'tcx>, pub isize: Ty<'tcx>, pub i8: Ty<'tcx>, pub i16: Ty<'tcx>, pub i32: Ty<'tcx>, pub i64: Ty<'tcx>, pub i128: Ty<'tcx>, pub usize: Ty<'tcx>, pub u8: Ty<'tcx>, pub u16: Ty<'tcx>, pub u32: Ty<'tcx>, pub u64: Ty<'tcx>, pub u128: Ty<'tcx>, pub f32: Ty<'tcx>, pub f64: Ty<'tcx>, pub str_: Ty<'tcx>, pub never: Ty<'tcx>, pub self_param: Ty<'tcx>, /// Dummy type used for the `Self` of a `TraitRef` created for converting /// a trait object, and which gets removed in `ExistentialTraitRef`. /// This type must not appear anywhere in other converted types. pub trait_object_dummy_self: Ty<'tcx>, } pub struct CommonLifetimes<'tcx> { /// `ReEmpty` in the root universe. pub re_root_empty: Region<'tcx>, /// `ReStatic` pub re_static: Region<'tcx>, /// Erased region, used after type-checking pub re_erased: Region<'tcx>, } pub struct CommonConsts<'tcx> { pub unit: &'tcx Const<'tcx>, } pub struct LocalTableInContext<'a, V> { hir_owner: LocalDefId, data: &'a ItemLocalMap<V>, } /// Validate that the given HirId (respectively its `local_id` part) can be /// safely used as a key in the maps of a TypeckResults. For that to be /// the case, the HirId must have the same `owner` as all the other IDs in /// this table (signified by `hir_owner`). Otherwise the HirId /// would be in a different frame of reference and using its `local_id` /// would result in lookup errors, or worse, in silently wrong data being /// stored/returned. #[inline] fn validate_hir_id_for_typeck_results(hir_owner: LocalDefId, hir_id: hir::HirId) { if hir_id.owner != hir_owner { invalid_hir_id_for_typeck_results(hir_owner, hir_id); } } #[cold] #[inline(never)] fn invalid_hir_id_for_typeck_results(hir_owner: LocalDefId, hir_id: hir::HirId) { ty::tls::with(|tcx| { bug!( "node {} with HirId::owner {:?} cannot be placed in TypeckResults with hir_owner {:?}", tcx.hir().node_to_string(hir_id), hir_id.owner, hir_owner ) }); } impl<'a, V> LocalTableInContext<'a, V> { pub fn contains_key(&self, id: hir::HirId) -> bool { validate_hir_id_for_typeck_results(self.hir_owner, id); self.data.contains_key(&id.local_id) } pub fn get(&self, id: hir::HirId) -> Option<&V> { validate_hir_id_for_typeck_results(self.hir_owner, id); self.data.get(&id.local_id) } pub fn iter(&self) -> hash_map::Iter<'_, hir::ItemLocalId, V> { self.data.iter() } } impl<'a, V> ::std::ops::Index<hir::HirId> for LocalTableInContext<'a, V> { type Output = V; fn index(&self, key: hir::HirId) -> &V { self.get(key).expect("LocalTableInContext: key not found") } } pub struct LocalTableInContextMut<'a, V> { hir_owner: LocalDefId, data: &'a mut ItemLocalMap<V>, } impl<'a, V> LocalTableInContextMut<'a, V> { pub fn get_mut(&mut self, id: hir::HirId) -> Option<&mut V> { validate_hir_id_for_typeck_results(self.hir_owner, id); self.data.get_mut(&id.local_id) } pub fn entry(&mut self, id: hir::HirId) -> Entry<'_, hir::ItemLocalId, V> { validate_hir_id_for_typeck_results(self.hir_owner, id); self.data.entry(id.local_id) } pub fn insert(&mut self, id: hir::HirId, val: V) -> Option<V> { validate_hir_id_for_typeck_results(self.hir_owner, id); self.data.insert(id.local_id, val) } pub fn remove(&mut self, id: hir::HirId) -> Option<V> { validate_hir_id_for_typeck_results(self.hir_owner, id); self.data.remove(&id.local_id) } } /// All information necessary to validate and reveal an `impl Trait`. #[derive(TyEncodable, TyDecodable, Debug, HashStable)] pub struct ResolvedOpaqueTy<'tcx> { /// The revealed type as seen by this function. pub concrete_type: Ty<'tcx>, /// Generic parameters on the opaque type as passed by this function. /// For `type Foo<A, B> = impl Bar<A, B>; fn foo<T, U>() -> Foo<T, U> { .. }` /// this is `[T, U]`, not `[A, B]`. pub substs: SubstsRef<'tcx>, } /// Whenever a value may be live across a generator yield, the type of that value winds up in the /// `GeneratorInteriorTypeCause` struct. This struct adds additional information about such /// captured types that can be useful for diagnostics. In particular, it stores the span that /// caused a given type to be recorded, along with the scope that enclosed the value (which can /// be used to find the await that the value is live across). /// /// For example: /// /// ```ignore (pseudo-Rust) /// async move { /// let x: T = expr; /// foo.await /// ... /// } /// ``` /// /// Here, we would store the type `T`, the span of the value `x`, the "scope-span" for /// the scope that contains `x`, the expr `T` evaluated from, and the span of `foo.await`. #[derive(TyEncodable, TyDecodable, Clone, Debug, Eq, Hash, PartialEq, HashStable)] #[derive(TypeFoldable)] pub struct GeneratorInteriorTypeCause<'tcx> { /// Type of the captured binding. pub ty: Ty<'tcx>, /// Span of the binding that was captured. pub span: Span, /// Span of the scope of the captured binding. pub scope_span: Option<Span>, /// Span of `.await` or `yield` expression. pub yield_span: Span, /// Expr which the type evaluated from. pub expr: Option<hir::HirId>, } #[derive(TyEncodable, TyDecodable, Debug)] pub struct TypeckResults<'tcx> { /// The `HirId::owner` all `ItemLocalId`s in this table are relative to. pub hir_owner: LocalDefId, /// Resolved definitions for `<T>::X` associated paths and /// method calls, including those of overloaded operators. type_dependent_defs: ItemLocalMap<Result<(DefKind, DefId), ErrorReported>>, /// Resolved field indices for field accesses in expressions (`S { field }`, `obj.field`) /// or patterns (`S { field }`). The index is often useful by itself, but to learn more /// about the field you also need definition of the variant to which the field /// belongs, but it may not exist if it's a tuple field (`tuple.0`). field_indices: ItemLocalMap<usize>, /// Stores the types for various nodes in the AST. Note that this table /// is not guaranteed to be populated until after typeck. See /// typeck::check::fn_ctxt for details. node_types: ItemLocalMap<Ty<'tcx>>, /// Stores the type parameters which were substituted to obtain the type /// of this node. This only applies to nodes that refer to entities /// parameterized by type parameters, such as generic fns, types, or /// other items. node_substs: ItemLocalMap<SubstsRef<'tcx>>, /// This will either store the canonicalized types provided by the user /// or the substitutions that the user explicitly gave (if any) attached /// to `id`. These will not include any inferred values. The canonical form /// is used to capture things like `_` or other unspecified values. /// /// For example, if the user wrote `foo.collect::<Vec<_>>()`, then the /// canonical substitutions would include only `for<X> { Vec<X> }`. /// /// See also `AscribeUserType` statement in MIR. user_provided_types: ItemLocalMap<CanonicalUserType<'tcx>>, /// Stores the canonicalized types provided by the user. See also /// `AscribeUserType` statement in MIR. pub user_provided_sigs: DefIdMap<CanonicalPolyFnSig<'tcx>>, adjustments: ItemLocalMap<Vec<ty::adjustment::Adjustment<'tcx>>>, /// Stores the actual binding mode for all instances of hir::BindingAnnotation. pat_binding_modes: ItemLocalMap<BindingMode>, /// Stores the types which were implicitly dereferenced in pattern binding modes /// for later usage in THIR lowering. For example, /// /// ``` /// match &&Some(5i32) { /// Some(n) => {}, /// _ => {}, /// } /// ``` /// leads to a `vec![&&Option<i32>, &Option<i32>]`. Empty vectors are not stored. /// /// See: /// <https://github.com/rust-lang/rfcs/blob/master/text/2005-match-ergonomics.md#definitions> pat_adjustments: ItemLocalMap<Vec<Ty<'tcx>>>, /// Records the reasons that we picked the kind of each closure; /// not all closures are present in the map. closure_kind_origins: ItemLocalMap<(Span, HirPlace<'tcx>)>, /// For each fn, records the "liberated" types of its arguments /// and return type. Liberated means that all bound regions /// (including late-bound regions) are replaced with free /// equivalents. This table is not used in codegen (since regions /// are erased there) and hence is not serialized to metadata. liberated_fn_sigs: ItemLocalMap<ty::FnSig<'tcx>>, /// For each FRU expression, record the normalized types of the fields /// of the struct - this is needed because it is non-trivial to /// normalize while preserving regions. This table is used only in /// MIR construction and hence is not serialized to metadata. fru_field_types: ItemLocalMap<Vec<Ty<'tcx>>>, /// For every coercion cast we add the HIR node ID of the cast /// expression to this set. coercion_casts: ItemLocalSet, /// Set of trait imports actually used in the method resolution. /// This is used for warning unused imports. During type /// checking, this `Lrc` should not be cloned: it must have a ref-count /// of 1 so that we can insert things into the set mutably. pub used_trait_imports: Lrc<FxHashSet<LocalDefId>>, /// If any errors occurred while type-checking this body, /// this field will be set to `Some(ErrorReported)`. pub tainted_by_errors: Option<ErrorReported>, /// All the opaque types that are restricted to concrete types /// by this function. pub concrete_opaque_types: FxHashMap<DefId, ResolvedOpaqueTy<'tcx>>, /// Tracks the minimum captures required for a closure; /// see `MinCaptureInformationMap` for more details. pub closure_min_captures: ty::MinCaptureInformationMap<'tcx>, /// Tracks the fake reads required for a closure and the reason for the fake read. /// When performing pattern matching for closures, there are times we don't end up /// reading places that are mentioned in a closure (because of _ patterns). However, /// to ensure the places are initialized, we introduce fake reads. /// Consider these two examples: /// ``` (discriminant matching with only wildcard arm) /// let x: u8; /// let c = || match x { _ => () }; /// ``` /// In this example, we don't need to actually read/borrow `x` in `c`, and so we don't /// want to capture it. However, we do still want an error here, because `x` should have /// to be initialized at the point where c is created. Therefore, we add a "fake read" /// instead. /// ``` (destructured assignments) /// let c = || { /// let (t1, t2) = t; /// } /// ``` /// In the second example, we capture the disjoint fields of `t` (`t.0` & `t.1`), but /// we never capture `t`. This becomes an issue when we build MIR as we require /// information on `t` in order to create place `t.0` and `t.1`. We can solve this /// issue by fake reading `t`. pub closure_fake_reads: FxHashMap<DefId, Vec<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>>, /// Stores the type, expression, span and optional scope span of all types /// that are live across the yield of this generator (if a generator). pub generator_interior_types: ty::Binder<'tcx, Vec<GeneratorInteriorTypeCause<'tcx>>>, /// We sometimes treat byte string literals (which are of type `&[u8; N]`) /// as `&[u8]`, depending on the pattern in which they are used. /// This hashset records all instances where we behave /// like this to allow `const_to_pat` to reliably handle this situation. pub treat_byte_string_as_slice: ItemLocalSet, } impl<'tcx> TypeckResults<'tcx> { pub fn new(hir_owner: LocalDefId) -> TypeckResults<'tcx> { TypeckResults { hir_owner, type_dependent_defs: Default::default(), field_indices: Default::default(), user_provided_types: Default::default(), user_provided_sigs: Default::default(), node_types: Default::default(), node_substs: Default::default(), adjustments: Default::default(), pat_binding_modes: Default::default(), pat_adjustments: Default::default(), closure_kind_origins: Default::default(), liberated_fn_sigs: Default::default(), fru_field_types: Default::default(), coercion_casts: Default::default(), used_trait_imports: Lrc::new(Default::default()), tainted_by_errors: None, concrete_opaque_types: Default::default(), closure_min_captures: Default::default(), closure_fake_reads: Default::default(), generator_interior_types: ty::Binder::dummy(Default::default()), treat_byte_string_as_slice: Default::default(), } } /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node. pub fn qpath_res(&self, qpath: &hir::QPath<'_>, id: hir::HirId) -> Res { match *qpath { hir::QPath::Resolved(_, ref path) => path.res, hir::QPath::TypeRelative(..) | hir::QPath::LangItem(..) => self .type_dependent_def(id) .map_or(Res::Err, |(kind, def_id)| Res::Def(kind, def_id)), } } pub fn type_dependent_defs( &self, ) -> LocalTableInContext<'_, Result<(DefKind, DefId), ErrorReported>> { LocalTableInContext { hir_owner: self.hir_owner, data: &self.type_dependent_defs } } pub fn type_dependent_def(&self, id: HirId) -> Option<(DefKind, DefId)> { validate_hir_id_for_typeck_results(self.hir_owner, id); self.type_dependent_defs.get(&id.local_id).cloned().and_then(|r| r.ok()) } pub fn type_dependent_def_id(&self, id: HirId) -> Option<DefId> { self.type_dependent_def(id).map(|(_, def_id)| def_id) } pub fn type_dependent_defs_mut( &mut self, ) -> LocalTableInContextMut<'_, Result<(DefKind, DefId), ErrorReported>> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.type_dependent_defs } } pub fn field_indices(&self) -> LocalTableInContext<'_, usize> { LocalTableInContext { hir_owner: self.hir_owner, data: &self.field_indices } } pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<'_, usize> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.field_indices } } pub fn user_provided_types(&self) -> LocalTableInContext<'_, CanonicalUserType<'tcx>> { LocalTableInContext { hir_owner: self.hir_owner, data: &self.user_provided_types } } pub fn user_provided_types_mut( &mut self, ) -> LocalTableInContextMut<'_, CanonicalUserType<'tcx>> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.user_provided_types } } pub fn node_types(&self) -> LocalTableInContext<'_, Ty<'tcx>> { LocalTableInContext { hir_owner: self.hir_owner, data: &self.node_types } } pub fn node_types_mut(&mut self) -> LocalTableInContextMut<'_, Ty<'tcx>> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.node_types } } pub fn node_type(&self, id: hir::HirId) -> Ty<'tcx> { self.node_type_opt(id).unwrap_or_else(|| { bug!("node_type: no type for node `{}`", tls::with(|tcx| tcx.hir().node_to_string(id))) }) } pub fn node_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> { validate_hir_id_for_typeck_results(self.hir_owner, id); self.node_types.get(&id.local_id).cloned() } pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<'_, SubstsRef<'tcx>> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.node_substs } } pub fn node_substs(&self, id: hir::HirId) -> SubstsRef<'tcx> { validate_hir_id_for_typeck_results(self.hir_owner, id); self.node_substs.get(&id.local_id).cloned().unwrap_or_else(|| InternalSubsts::empty()) } pub fn node_substs_opt(&self, id: hir::HirId) -> Option<SubstsRef<'tcx>> { validate_hir_id_for_typeck_results(self.hir_owner, id); self.node_substs.get(&id.local_id).cloned() } // Returns the type of a pattern as a monotype. Like @expr_ty, this function // doesn't provide type parameter substitutions. pub fn pat_ty(&self, pat: &hir::Pat<'_>) -> Ty<'tcx> { self.node_type(pat.hir_id) } // Returns the type of an expression as a monotype. // // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in // some cases, we insert `Adjustment` annotations such as auto-deref or // auto-ref. The type returned by this function does not consider such // adjustments. See `expr_ty_adjusted()` instead. // // NB (2): This type doesn't provide type parameter substitutions; e.g., if you // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize" // instead of "fn(ty) -> T with T = isize". pub fn expr_ty(&self, expr: &hir::Expr<'_>) -> Ty<'tcx> { self.node_type(expr.hir_id) } pub fn expr_ty_opt(&self, expr: &hir::Expr<'_>) -> Option<Ty<'tcx>> { self.node_type_opt(expr.hir_id) } pub fn adjustments(&self) -> LocalTableInContext<'_, Vec<ty::adjustment::Adjustment<'tcx>>> { LocalTableInContext { hir_owner: self.hir_owner, data: &self.adjustments } } pub fn adjustments_mut( &mut self, ) -> LocalTableInContextMut<'_, Vec<ty::adjustment::Adjustment<'tcx>>> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.adjustments } } pub fn expr_adjustments(&self, expr: &hir::Expr<'_>) -> &[ty::adjustment::Adjustment<'tcx>] { validate_hir_id_for_typeck_results(self.hir_owner, expr.hir_id); self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..]) } /// Returns the type of `expr`, considering any `Adjustment` /// entry recorded for that expression. pub fn expr_ty_adjusted(&self, expr: &hir::Expr<'_>) -> Ty<'tcx> { self.expr_adjustments(expr).last().map_or_else(|| self.expr_ty(expr), |adj| adj.target) } pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr<'_>) -> Option<Ty<'tcx>> { self.expr_adjustments(expr).last().map(|adj| adj.target).or_else(|| self.expr_ty_opt(expr)) } pub fn is_method_call(&self, expr: &hir::Expr<'_>) -> bool { // Only paths and method calls/overloaded operators have // entries in type_dependent_defs, ignore the former here. if let hir::ExprKind::Path(_) = expr.kind { return false; } matches!(self.type_dependent_defs().get(expr.hir_id), Some(Ok((DefKind::AssocFn, _)))) } pub fn extract_binding_mode(&self, s: &Session, id: HirId, sp: Span) -> Option<BindingMode> { self.pat_binding_modes().get(id).copied().or_else(|| { s.delay_span_bug(sp, "missing binding mode"); None }) } pub fn pat_binding_modes(&self) -> LocalTableInContext<'_, BindingMode> { LocalTableInContext { hir_owner: self.hir_owner, data: &self.pat_binding_modes } } pub fn pat_binding_modes_mut(&mut self) -> LocalTableInContextMut<'_, BindingMode> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_binding_modes } } pub fn pat_adjustments(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> { LocalTableInContext { hir_owner: self.hir_owner, data: &self.pat_adjustments } } pub fn pat_adjustments_mut(&mut self) -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_adjustments } } /// For a given closure, returns the iterator of `ty::CapturedPlace`s that are captured /// by the closure. pub fn closure_min_captures_flattened( &self, closure_def_id: DefId, ) -> impl Iterator<Item = &ty::CapturedPlace<'tcx>> { self.closure_min_captures .get(&closure_def_id) .map(|closure_min_captures| closure_min_captures.values().flat_map(|v| v.iter())) .into_iter() .flatten() } pub fn closure_kind_origins(&self) -> LocalTableInContext<'_, (Span, HirPlace<'tcx>)> { LocalTableInContext { hir_owner: self.hir_owner, data: &self.closure_kind_origins } } pub fn closure_kind_origins_mut( &mut self, ) -> LocalTableInContextMut<'_, (Span, HirPlace<'tcx>)> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.closure_kind_origins } } pub fn liberated_fn_sigs(&self) -> LocalTableInContext<'_, ty::FnSig<'tcx>> { LocalTableInContext { hir_owner: self.hir_owner, data: &self.liberated_fn_sigs } } pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<'_, ty::FnSig<'tcx>> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.liberated_fn_sigs } } pub fn fru_field_types(&self) -> LocalTableInContext<'_, Vec<Ty<'tcx>>> { LocalTableInContext { hir_owner: self.hir_owner, data: &self.fru_field_types } } pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> { LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.fru_field_types } } pub fn is_coercion_cast(&self, hir_id: hir::HirId) -> bool { validate_hir_id_for_typeck_results(self.hir_owner, hir_id); self.coercion_casts.contains(&hir_id.local_id) } pub fn set_coercion_cast(&mut self, id: ItemLocalId) { self.coercion_casts.insert(id); } pub fn coercion_casts(&self) -> &ItemLocalSet { &self.coercion_casts } } impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for TypeckResults<'tcx> { fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { let ty::TypeckResults { hir_owner, ref type_dependent_defs, ref field_indices, ref user_provided_types, ref user_provided_sigs, ref node_types, ref node_substs, ref adjustments, ref pat_binding_modes, ref pat_adjustments, ref closure_kind_origins, ref liberated_fn_sigs, ref fru_field_types, ref coercion_casts, ref used_trait_imports, tainted_by_errors, ref concrete_opaque_types, ref closure_min_captures, ref closure_fake_reads, ref generator_interior_types, ref treat_byte_string_as_slice, } = *self; hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { hcx.local_def_path_hash(hir_owner); type_dependent_defs.hash_stable(hcx, hasher); field_indices.hash_stable(hcx, hasher); user_provided_types.hash_stable(hcx, hasher); user_provided_sigs.hash_stable(hcx, hasher); node_types.hash_stable(hcx, hasher); node_substs.hash_stable(hcx, hasher); adjustments.hash_stable(hcx, hasher); pat_binding_modes.hash_stable(hcx, hasher); pat_adjustments.hash_stable(hcx, hasher); closure_kind_origins.hash_stable(hcx, hasher); liberated_fn_sigs.hash_stable(hcx, hasher); fru_field_types.hash_stable(hcx, hasher); coercion_casts.hash_stable(hcx, hasher); used_trait_imports.hash_stable(hcx, hasher); tainted_by_errors.hash_stable(hcx, hasher); concrete_opaque_types.hash_stable(hcx, hasher); closure_min_captures.hash_stable(hcx, hasher); closure_fake_reads.hash_stable(hcx, hasher); generator_interior_types.hash_stable(hcx, hasher); treat_byte_string_as_slice.hash_stable(hcx, hasher); }) } } rustc_index::newtype_index! { pub struct UserTypeAnnotationIndex { derive [HashStable] DEBUG_FORMAT = "UserType({})", const START_INDEX = 0, } } /// Mapping of type annotation indices to canonical user type annotations. pub type CanonicalUserTypeAnnotations<'tcx> = IndexVec<UserTypeAnnotationIndex, CanonicalUserTypeAnnotation<'tcx>>; #[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable, TypeFoldable, Lift)] pub struct CanonicalUserTypeAnnotation<'tcx> { pub user_ty: CanonicalUserType<'tcx>, pub span: Span, pub inferred_ty: Ty<'tcx>, } /// Canonicalized user type annotation. pub type CanonicalUserType<'tcx> = Canonical<'tcx, UserType<'tcx>>; impl CanonicalUserType<'tcx> { /// Returns `true` if this represents a substitution of the form `[?0, ?1, ?2]`, /// i.e., each thing is mapped to a canonical variable with the same index. pub fn is_identity(&self) -> bool { match self.value { UserType::Ty(_) => false, UserType::TypeOf(_, user_substs) => { if user_substs.user_self_ty.is_some() { return false; } iter::zip(user_substs.substs, BoundVar::new(0)..).all(|(kind, cvar)| { match kind.unpack() { GenericArgKind::Type(ty) => match ty.kind() { ty::Bound(debruijn, b) => { // We only allow a `ty::INNERMOST` index in substitutions. assert_eq!(*debruijn, ty::INNERMOST); cvar == b.var } _ => false, }, GenericArgKind::Lifetime(r) => match r { ty::ReLateBound(debruijn, br) => { // We only allow a `ty::INNERMOST` index in substitutions. assert_eq!(*debruijn, ty::INNERMOST); cvar == br.var } _ => false, }, GenericArgKind::Const(ct) => match ct.val { ty::ConstKind::Bound(debruijn, b) => { // We only allow a `ty::INNERMOST` index in substitutions. assert_eq!(debruijn, ty::INNERMOST); cvar == b } _ => false, }, } }) } } } } /// A user-given type annotation attached to a constant. These arise /// from constants that are named via paths, like `Foo::<A>::new` and /// so forth. #[derive(Copy, Clone, Debug, PartialEq, TyEncodable, TyDecodable)] #[derive(HashStable, TypeFoldable, Lift)] pub enum UserType<'tcx> { Ty(Ty<'tcx>), /// The canonical type is the result of `type_of(def_id)` with the /// given substitutions applied. TypeOf(DefId, UserSubsts<'tcx>), } impl<'tcx> CommonTypes<'tcx> { fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> { let mk = |ty| interners.intern_ty(ty); CommonTypes { unit: mk(Tuple(List::empty())), bool: mk(Bool), char: mk(Char), never: mk(Never), isize: mk(Int(ty::IntTy::Isize)), i8: mk(Int(ty::IntTy::I8)), i16: mk(Int(ty::IntTy::I16)), i32: mk(Int(ty::IntTy::I32)), i64: mk(Int(ty::IntTy::I64)), i128: mk(Int(ty::IntTy::I128)), usize: mk(Uint(ty::UintTy::Usize)), u8: mk(Uint(ty::UintTy::U8)), u16: mk(Uint(ty::UintTy::U16)), u32: mk(Uint(ty::UintTy::U32)), u64: mk(Uint(ty::UintTy::U64)), u128: mk(Uint(ty::UintTy::U128)), f32: mk(Float(ty::FloatTy::F32)), f64: mk(Float(ty::FloatTy::F64)), str_: mk(Str), self_param: mk(ty::Param(ty::ParamTy { index: 0, name: kw::SelfUpper })), trait_object_dummy_self: mk(Infer(ty::FreshTy(0))), } } } impl<'tcx> CommonLifetimes<'tcx> { fn new(interners: &CtxtInterners<'tcx>) -> CommonLifetimes<'tcx> { let mk = |r| interners.region.intern(r, |r| Interned(interners.arena.alloc(r))).0; CommonLifetimes { re_root_empty: mk(RegionKind::ReEmpty(ty::UniverseIndex::ROOT)), re_static: mk(RegionKind::ReStatic), re_erased: mk(RegionKind::ReErased), } } } impl<'tcx> CommonConsts<'tcx> { fn new(interners: &CtxtInterners<'tcx>, types: &CommonTypes<'tcx>) -> CommonConsts<'tcx> { let mk_const = |c| interners.const_.intern(c, |c| Interned(interners.arena.alloc(c))).0; CommonConsts { unit: mk_const(ty::Const { val: ty::ConstKind::Value(ConstValue::Scalar(Scalar::ZST)), ty: types.unit, }), } } } // This struct contains information regarding the `ReFree(FreeRegion)` corresponding to a lifetime // conflict. #[derive(Debug)] pub struct FreeRegionInfo { // `LocalDefId` corresponding to FreeRegion pub def_id: LocalDefId, // the bound region corresponding to FreeRegion pub boundregion: ty::BoundRegionKind, // checks if bound region is in Impl Item pub is_impl_item: bool, } /// The central data structure of the compiler. It stores references /// to the various **arenas** and also houses the results of the /// various **compiler queries** that have been performed. See the /// [rustc dev guide] for more details. /// /// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/ty.html #[derive(Copy, Clone)] #[rustc_diagnostic_item = "TyCtxt"] pub struct TyCtxt<'tcx> { gcx: &'tcx GlobalCtxt<'tcx>, } impl<'tcx> Deref for TyCtxt<'tcx> { type Target = &'tcx GlobalCtxt<'tcx>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.gcx } } pub struct GlobalCtxt<'tcx> { pub arena: &'tcx WorkerLocal<Arena<'tcx>>, interners: CtxtInterners<'tcx>, pub(crate) cstore: Box<CrateStoreDyn>, pub sess: &'tcx Session, /// This only ever stores a `LintStore` but we don't want a dependency on that type here. /// /// FIXME(Centril): consider `dyn LintStoreMarker` once /// we can upcast to `Any` for some additional type safety. pub lint_store: Lrc<dyn Any + sync::Sync + sync::Send>, pub dep_graph: DepGraph, pub prof: SelfProfilerRef, /// Common types, pre-interned for your convenience. pub types: CommonTypes<'tcx>, /// Common lifetimes, pre-interned for your convenience. pub lifetimes: CommonLifetimes<'tcx>, /// Common consts, pre-interned for your convenience. pub consts: CommonConsts<'tcx>, /// Visibilities produced by resolver. pub visibilities: FxHashMap<LocalDefId, Visibility>, /// Resolutions of `extern crate` items produced by resolver. extern_crate_map: FxHashMap<LocalDefId, CrateNum>, /// Map indicating what traits are in scope for places where this /// is relevant; generated by resolve. trait_map: FxHashMap<LocalDefId, FxHashMap<ItemLocalId, StableVec<TraitCandidate>>>, /// Export map produced by name resolution. export_map: ExportMap<LocalDefId>, pub(crate) untracked_crate: &'tcx hir::Crate<'tcx>, pub(crate) definitions: Definitions, /// This provides access to the incremental compilation on-disk cache for query results. /// Do not access this directly. It is only meant to be used by /// `DepGraph::try_mark_green()` and the query infrastructure. /// This is `None` if we are not incremental compilation mode pub on_disk_cache: Option<OnDiskCache<'tcx>>, pub queries: &'tcx dyn query::QueryEngine<'tcx>, pub query_caches: query::QueryCaches<'tcx>, maybe_unused_trait_imports: FxHashSet<LocalDefId>, maybe_unused_extern_crates: Vec<(LocalDefId, Span)>, /// A map of glob use to a set of names it actually imports. Currently only /// used in save-analysis. pub(crate) glob_map: FxHashMap<LocalDefId, FxHashSet<Symbol>>, /// Extern prelude entries. The value is `true` if the entry was introduced /// via `extern crate` item and not `--extern` option or compiler built-in. pub extern_prelude: FxHashMap<Symbol, bool>, // Internal caches for metadata decoding. No need to track deps on this. pub ty_rcache: Lock<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>, pub pred_rcache: Lock<FxHashMap<ty::CReaderCacheKey, Predicate<'tcx>>>, /// Caches the results of trait selection. This cache is used /// for things that do not have to do with the parameters in scope. pub selection_cache: traits::SelectionCache<'tcx>, /// Caches the results of trait evaluation. This cache is used /// for things that do not have to do with the parameters in scope. /// Merge this with `selection_cache`? pub evaluation_cache: traits::EvaluationCache<'tcx>, /// The definite name of the current crate after taking into account /// attributes, commandline parameters, etc. pub crate_name: Symbol, /// Data layout specification for the current target. pub data_layout: TargetDataLayout, /// `#[stable]` and `#[unstable]` attributes stability_interner: ShardedHashMap<&'tcx attr::Stability, ()>, /// `#[rustc_const_stable]` and `#[rustc_const_unstable]` attributes const_stability_interner: ShardedHashMap<&'tcx attr::ConstStability, ()>, /// Stores memory for globals (statics/consts). pub(crate) alloc_map: Lock<interpret::AllocMap<'tcx>>, layout_interner: ShardedHashMap<&'tcx Layout, ()>, output_filenames: Arc<OutputFilenames>, pub main_def: Option<MainDefinition>, } impl<'tcx> TyCtxt<'tcx> { pub fn typeck_opt_const_arg( self, def: ty::WithOptConstParam<LocalDefId>, ) -> &'tcx TypeckResults<'tcx> { if let Some(param_did) = def.const_param_did { self.typeck_const_arg((def.did, param_did)) } else { self.typeck(def.did) } } pub fn alloc_steal_mir(self, mir: Body<'tcx>) -> &'tcx Steal<Body<'tcx>> { self.arena.alloc(Steal::new(mir)) } pub fn alloc_steal_promoted( self, promoted: IndexVec<Promoted, Body<'tcx>>, ) -> &'tcx Steal<IndexVec<Promoted, Body<'tcx>>> { self.arena.alloc(Steal::new(promoted)) } pub fn alloc_adt_def( self, did: DefId, kind: AdtKind, variants: IndexVec<VariantIdx, ty::VariantDef>, repr: ReprOptions, ) -> &'tcx ty::AdtDef { self.arena.alloc(ty::AdtDef::new(self, did, kind, variants, repr)) } pub fn intern_const_alloc(self, alloc: Allocation) -> &'tcx Allocation { self.interners .allocation .intern(alloc, |alloc| Interned(self.interners.arena.alloc(alloc))) .0 } /// Allocates a read-only byte or string literal for `mir::interpret`. pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId { // Create an allocation that just contains these bytes. let alloc = interpret::Allocation::from_bytes_byte_aligned_immutable(bytes); let alloc = self.intern_const_alloc(alloc); self.create_memory_alloc(alloc) } pub fn intern_stability(self, stab: attr::Stability) -> &'tcx attr::Stability { self.stability_interner.intern(stab, |stab| self.arena.alloc(stab)) } pub fn intern_const_stability(self, stab: attr::ConstStability) -> &'tcx attr::ConstStability { self.const_stability_interner.intern(stab, |stab| self.arena.alloc(stab)) } pub fn intern_layout(self, layout: Layout) -> &'tcx Layout { self.layout_interner.intern(layout, |layout| self.arena.alloc(layout)) } /// Returns a range of the start/end indices specified with the /// `rustc_layout_scalar_valid_range` attribute. pub fn layout_scalar_valid_range(self, def_id: DefId) -> (Bound<u128>, Bound<u128>) { let attrs = self.get_attrs(def_id); let get = |name| { let attr = match attrs.iter().find(|a| self.sess.check_name(a, name)) { Some(attr) => attr, None => return Bound::Unbounded, }; debug!("layout_scalar_valid_range: attr={:?}", attr); if let Some( &[ast::NestedMetaItem::Literal(ast::Lit { kind: ast::LitKind::Int(a, _), .. })], ) = attr.meta_item_list().as_deref() { Bound::Included(a) } else { self.sess .delay_span_bug(attr.span, "invalid rustc_layout_scalar_valid_range attribute"); Bound::Unbounded } }; ( get(sym::rustc_layout_scalar_valid_range_start), get(sym::rustc_layout_scalar_valid_range_end), ) } pub fn lift<T: Lift<'tcx>>(self, value: T) -> Option<T::Lifted> { value.lift_to_tcx(self) } /// Creates a type context and call the closure with a `TyCtxt` reference /// to the context. The closure enforces that the type context and any interned /// value (types, substs, etc.) can only be used while `ty::tls` has a valid /// reference to the context, to allow formatting values that need it. pub fn create_global_ctxt( s: &'tcx Session, lint_store: Lrc<dyn Any + sync::Send + sync::Sync>, arena: &'tcx WorkerLocal<Arena<'tcx>>, resolutions: ty::ResolverOutputs, krate: &'tcx hir::Crate<'tcx>, dep_graph: DepGraph, on_disk_cache: Option<query::OnDiskCache<'tcx>>, queries: &'tcx dyn query::QueryEngine<'tcx>, crate_name: &str, output_filenames: &OutputFilenames, ) -> GlobalCtxt<'tcx> { let data_layout = TargetDataLayout::parse(&s.target).unwrap_or_else(|err| { s.fatal(&err); }); let interners = CtxtInterners::new(arena); let common_types = CommonTypes::new(&interners); let common_lifetimes = CommonLifetimes::new(&interners); let common_consts = CommonConsts::new(&interners, &common_types); let cstore = resolutions.cstore; let mut trait_map: FxHashMap<_, FxHashMap<_, _>> = FxHashMap::default(); for (hir_id, v) in krate.trait_map.iter() { let map = trait_map.entry(hir_id.owner).or_default(); map.insert(hir_id.local_id, StableVec::new(v.to_vec())); } GlobalCtxt { sess: s, lint_store, cstore, arena, interners, dep_graph, prof: s.prof.clone(), types: common_types, lifetimes: common_lifetimes, consts: common_consts, visibilities: resolutions.visibilities, extern_crate_map: resolutions.extern_crate_map, trait_map, export_map: resolutions.export_map, maybe_unused_trait_imports: resolutions.maybe_unused_trait_imports, maybe_unused_extern_crates: resolutions.maybe_unused_extern_crates, glob_map: resolutions.glob_map, extern_prelude: resolutions.extern_prelude, untracked_crate: krate, definitions: resolutions.definitions, on_disk_cache, queries, query_caches: query::QueryCaches::default(), ty_rcache: Default::default(), pred_rcache: Default::default(), selection_cache: Default::default(), evaluation_cache: Default::default(), crate_name: Symbol::intern(crate_name), data_layout, layout_interner: Default::default(), stability_interner: Default::default(), const_stability_interner: Default::default(), alloc_map: Lock::new(interpret::AllocMap::new()), output_filenames: Arc::new(output_filenames.clone()), main_def: resolutions.main_def, } } /// Constructs a `TyKind::Error` type and registers a `delay_span_bug` to ensure it gets used. #[track_caller] pub fn ty_error(self) -> Ty<'tcx> { self.ty_error_with_message(DUMMY_SP, "TyKind::Error constructed but no error reported") } /// Constructs a `TyKind::Error` type and registers a `delay_span_bug` with the given `msg` to /// ensure it gets used. #[track_caller] pub fn ty_error_with_message<S: Into<MultiSpan>>(self, span: S, msg: &str) -> Ty<'tcx> { self.sess.delay_span_bug(span, msg); self.mk_ty(Error(DelaySpanBugEmitted(()))) } /// Like `err` but for constants. #[track_caller] pub fn const_error(self, ty: Ty<'tcx>) -> &'tcx Const<'tcx> { self.sess .delay_span_bug(DUMMY_SP, "ty::ConstKind::Error constructed but no error reported."); self.mk_const(ty::Const { val: ty::ConstKind::Error(DelaySpanBugEmitted(())), ty }) } pub fn consider_optimizing<T: Fn() -> String>(self, msg: T) -> bool { let cname = self.crate_name(LOCAL_CRATE).as_str(); self.sess.consider_optimizing(&cname, msg) } pub fn lib_features(self) -> &'tcx middle::lib_features::LibFeatures { self.get_lib_features(()) } /// Obtain all lang items of this crate and all dependencies (recursively) pub fn lang_items(self) -> &'tcx rustc_hir::lang_items::LanguageItems { self.get_lang_items(()) } /// Obtain the given diagnostic item's `DefId`. Use `is_diagnostic_item` if you just want to /// compare against another `DefId`, since `is_diagnostic_item` is cheaper. pub fn get_diagnostic_item(self, name: Symbol) -> Option<DefId> { self.all_diagnostic_items(()).get(&name).copied() } /// Check whether the diagnostic item with the given `name` has the given `DefId`. pub fn is_diagnostic_item(self, name: Symbol, did: DefId) -> bool { self.diagnostic_items(did.krate).get(&name) == Some(&did) } pub fn stability(self) -> &'tcx stability::Index<'tcx> { self.stability_index(()) } pub fn crates(self) -> &'tcx [CrateNum] { self.all_crate_nums(()) } pub fn allocator_kind(self) -> Option<AllocatorKind> { self.cstore.allocator_kind() } pub fn features(self) -> &'tcx rustc_feature::Features { self.features_query(()) } pub fn def_key(self, id: DefId) -> rustc_hir::definitions::DefKey { if let Some(id) = id.as_local() { self.hir().def_key(id) } else { self.cstore.def_key(id) } } /// Converts a `DefId` into its fully expanded `DefPath` (every /// `DefId` is really just an interned `DefPath`). /// /// Note that if `id` is not local to this crate, the result will /// be a non-local `DefPath`. pub fn def_path(self, id: DefId) -> rustc_hir::definitions::DefPath { if let Some(id) = id.as_local() { self.hir().def_path(id) } else { self.cstore.def_path(id) } } /// Returns whether or not the crate with CrateNum 'cnum' /// is marked as a private dependency pub fn is_private_dep(self, cnum: CrateNum) -> bool { if cnum == LOCAL_CRATE { false } else { self.cstore.crate_is_private_dep_untracked(cnum) } } #[inline] pub fn def_path_hash(self, def_id: DefId) -> rustc_hir::definitions::DefPathHash { if let Some(def_id) = def_id.as_local() { self.definitions.def_path_hash(def_id) } else { self.cstore.def_path_hash(def_id) } } pub fn def_path_debug_str(self, def_id: DefId) -> String { // We are explicitly not going through queries here in order to get // crate name and disambiguator since this code is called from debug!() // statements within the query system and we'd run into endless // recursion otherwise. let (crate_name, crate_disambiguator) = if def_id.is_local() { (self.crate_name, self.sess.local_crate_disambiguator()) } else { ( self.cstore.crate_name_untracked(def_id.krate), self.cstore.crate_disambiguator_untracked(def_id.krate), ) }; format!( "{}[{}]{}", crate_name, // Don't print the whole crate disambiguator. That's just // annoying in debug output. &(crate_disambiguator.to_fingerprint().to_hex())[..4], self.def_path(def_id).to_string_no_crate_verbose() ) } pub fn metadata_encoding_version(self) -> Vec<u8> { self.cstore.metadata_encoding_version().to_vec() } pub fn encode_metadata(self) -> EncodedMetadata { let _prof_timer = self.prof.verbose_generic_activity("generate_crate_metadata"); self.cstore.encode_metadata(self) } // Note that this is *untracked* and should only be used within the query // system if the result is otherwise tracked through queries pub fn cstore_as_any(self) -> &'tcx dyn Any { self.cstore.as_any() } #[inline(always)] pub fn create_stable_hashing_context(self) -> StableHashingContext<'tcx> { let krate = self.gcx.untracked_crate; StableHashingContext::new(self.sess, krate, &self.definitions, &*self.cstore) } #[inline(always)] pub fn create_no_span_stable_hashing_context(self) -> StableHashingContext<'tcx> { let krate = self.gcx.untracked_crate; StableHashingContext::ignore_spans(self.sess, krate, &self.definitions, &*self.cstore) } pub fn serialize_query_result_cache(self, encoder: &mut FileEncoder) -> FileEncodeResult { self.on_disk_cache.as_ref().map_or(Ok(()), |c| c.serialize(self, encoder)) } /// If `true`, we should use the MIR-based borrowck, but also /// fall back on the AST borrowck if the MIR-based one errors. pub fn migrate_borrowck(self) -> bool { self.borrowck_mode().migrate() } /// What mode(s) of borrowck should we run? AST? MIR? both? /// (Also considers the `#![feature(nll)]` setting.) pub fn borrowck_mode(self) -> BorrowckMode { // Here are the main constraints we need to deal with: // // 1. An opts.borrowck_mode of `BorrowckMode::Migrate` is // synonymous with no `-Z borrowck=...` flag at all. // // 2. We want to allow developers on the Nightly channel // to opt back into the "hard error" mode for NLL, // (which they can do via specifying `#![feature(nll)]` // explicitly in their crate). // // So, this precedence list is how pnkfelix chose to work with // the above constraints: // // * `#![feature(nll)]` *always* means use NLL with hard // errors. (To simplify the code here, it now even overrides // a user's attempt to specify `-Z borrowck=compare`, which // we arguably do not need anymore and should remove.) // // * Otherwise, if no `-Z borrowck=...` then use migrate mode // // * Otherwise, use the behavior requested via `-Z borrowck=...` if self.features().nll { return BorrowckMode::Mir; } self.sess.opts.borrowck_mode } /// If `true`, we should use lazy normalization for constants, otherwise /// we still evaluate them eagerly. #[inline] pub fn lazy_normalization(self) -> bool { let features = self.features(); // Note: We do not enable lazy normalization for `min_const_generics`. features.const_generics || features.lazy_normalization_consts } #[inline] pub fn local_crate_exports_generics(self) -> bool { debug_assert!(self.sess.opts.share_generics()); self.sess.crate_types().iter().any(|crate_type| { match crate_type { CrateType::Executable | CrateType::Staticlib | CrateType::ProcMacro | CrateType::Cdylib => false, // FIXME rust-lang/rust#64319, rust-lang/rust#64872: // We want to block export of generics from dylibs, // but we must fix rust-lang/rust#65890 before we can // do that robustly. CrateType::Dylib => true, CrateType::Rlib => true, } }) } // Returns the `DefId` and the `BoundRegionKind` corresponding to the given region. pub fn is_suitable_region(self, region: Region<'tcx>) -> Option<FreeRegionInfo> { let (suitable_region_binding_scope, bound_region) = match *region { ty::ReFree(ref free_region) => { (free_region.scope.expect_local(), free_region.bound_region) } ty::ReEarlyBound(ref ebr) => ( self.parent(ebr.def_id).unwrap().expect_local(), ty::BoundRegionKind::BrNamed(ebr.def_id, ebr.name), ), _ => return None, // not a free region }; let hir_id = self.hir().local_def_id_to_hir_id(suitable_region_binding_scope); let is_impl_item = match self.hir().find(hir_id) { Some(Node::Item(..) | Node::TraitItem(..)) => false, Some(Node::ImplItem(..)) => { self.is_bound_region_in_impl_item(suitable_region_binding_scope) } _ => return None, }; Some(FreeRegionInfo { def_id: suitable_region_binding_scope, boundregion: bound_region, is_impl_item, }) } /// Given a `DefId` for an `fn`, return all the `dyn` and `impl` traits in its return type. pub fn return_type_impl_or_dyn_traits( self, scope_def_id: LocalDefId, ) -> Vec<&'tcx hir::Ty<'tcx>> { let hir_id = self.hir().local_def_id_to_hir_id(scope_def_id); let hir_output = match self.hir().get(hir_id) { Node::Item(hir::Item { kind: ItemKind::Fn( hir::FnSig { decl: hir::FnDecl { output: hir::FnRetTy::Return(ty), .. }, .. }, .., ), .. }) | Node::ImplItem(hir::ImplItem { kind: hir::ImplItemKind::Fn( hir::FnSig { decl: hir::FnDecl { output: hir::FnRetTy::Return(ty), .. }, .. }, _, ), .. }) | Node::TraitItem(hir::TraitItem { kind: hir::TraitItemKind::Fn( hir::FnSig { decl: hir::FnDecl { output: hir::FnRetTy::Return(ty), .. }, .. }, _, ), .. }) => ty, _ => return vec![], }; let mut v = TraitObjectVisitor(vec![], self.hir()); v.visit_ty(hir_output); v.0 } pub fn return_type_impl_trait(self, scope_def_id: LocalDefId) -> Option<(Ty<'tcx>, Span)> { // HACK: `type_of_def_id()` will fail on these (#55796), so return `None`. let hir_id = self.hir().local_def_id_to_hir_id(scope_def_id); match self.hir().get(hir_id) { Node::Item(item) => { match item.kind { ItemKind::Fn(..) => { /* `type_of_def_id()` will work */ } _ => { return None; } } } _ => { /* `type_of_def_id()` will work or panic */ } } let ret_ty = self.type_of(scope_def_id); match ret_ty.kind() { ty::FnDef(_, _) => { let sig = ret_ty.fn_sig(self); let output = self.erase_late_bound_regions(sig.output()); if output.is_impl_trait() { let fn_decl = self.hir().fn_decl_by_hir_id(hir_id).unwrap(); Some((output, fn_decl.output.span())) } else { None } } _ => None, } } // Checks if the bound region is in Impl Item. pub fn is_bound_region_in_impl_item(self, suitable_region_binding_scope: LocalDefId) -> bool { let container_id = self.associated_item(suitable_region_binding_scope.to_def_id()).container.id(); if self.impl_trait_ref(container_id).is_some() { // For now, we do not try to target impls of traits. This is // because this message is going to suggest that the user // change the fn signature, but they may not be free to do so, // since the signature must match the trait. // // FIXME(#42706) -- in some cases, we could do better here. return true; } false } /// Determines whether identifiers in the assembly have strict naming rules. /// Currently, only NVPTX* targets need it. pub fn has_strict_asm_symbol_naming(self) -> bool { self.sess.target.arch.contains("nvptx") } /// Returns `&'static core::panic::Location<'static>`. pub fn caller_location_ty(self) -> Ty<'tcx> { self.mk_imm_ref( self.lifetimes.re_static, self.type_of(self.require_lang_item(LangItem::PanicLocation, None)) .subst(self, self.mk_substs([self.lifetimes.re_static.into()].iter())), ) } /// Returns a displayable description and article for the given `def_id` (e.g. `("a", "struct")`). pub fn article_and_description(self, def_id: DefId) -> (&'static str, &'static str) { match self.def_kind(def_id) { DefKind::Generator => match self.generator_kind(def_id).unwrap() { rustc_hir::GeneratorKind::Async(..) => ("an", "async closure"), rustc_hir::GeneratorKind::Gen => ("a", "generator"), }, def_kind => (def_kind.article(), def_kind.descr(def_id)), } } } /// A trait implemented for all `X<'a>` types that can be safely and /// efficiently converted to `X<'tcx>` as long as they are part of the /// provided `TyCtxt<'tcx>`. /// This can be done, for example, for `Ty<'tcx>` or `SubstsRef<'tcx>` /// by looking them up in their respective interners. /// /// However, this is still not the best implementation as it does /// need to compare the components, even for interned values. /// It would be more efficient if `TypedArena` provided a way to /// determine whether the address is in the allocated range. /// /// `None` is returned if the value or one of the components is not part /// of the provided context. /// For `Ty`, `None` can be returned if either the type interner doesn't /// contain the `TyKind` key or if the address of the interned /// pointer differs. The latter case is possible if a primitive type, /// e.g., `()` or `u8`, was interned in a different context. pub trait Lift<'tcx>: fmt::Debug { type Lifted: fmt::Debug + 'tcx; fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted>; } macro_rules! nop_lift { ($set:ident; $ty:ty => $lifted:ty) => { impl<'a, 'tcx> Lift<'tcx> for $ty { type Lifted = $lifted; fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> { if tcx.interners.$set.contains_pointer_to(&Interned(self)) { Some(unsafe { mem::transmute(self) }) } else { None } } } }; } macro_rules! nop_list_lift { ($set:ident; $ty:ty => $lifted:ty) => { impl<'a, 'tcx> Lift<'tcx> for &'a List<$ty> { type Lifted = &'tcx List<$lifted>; fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option<Self::Lifted> { if self.is_empty() { return Some(List::empty()); } if tcx.interners.$set.contains_pointer_to(&Interned(self)) { Some(unsafe { mem::transmute(self) }) } else { None } } } }; } nop_lift! {type_; Ty<'a> => Ty<'tcx>} nop_lift! {region; Region<'a> => Region<'tcx>} nop_lift! {const_; &'a Const<'a> => &'tcx Const<'tcx>} nop_lift! {allocation; &'a Allocation => &'tcx Allocation} nop_lift! {predicate; &'a PredicateInner<'a> => &'tcx PredicateInner<'tcx>} nop_list_lift! {type_list; Ty<'a> => Ty<'tcx>} nop_list_lift! {poly_existential_predicates; ty::Binder<'a, ExistentialPredicate<'a>> => ty::Binder<'tcx, ExistentialPredicate<'tcx>>} nop_list_lift! {predicates; Predicate<'a> => Predicate<'tcx>} nop_list_lift! {canonical_var_infos; CanonicalVarInfo<'a> => CanonicalVarInfo<'tcx>} nop_list_lift! {projs; ProjectionKind => ProjectionKind} nop_list_lift! {bound_variable_kinds; ty::BoundVariableKind => ty::BoundVariableKind} // This is the impl for `&'a InternalSubsts<'a>`. nop_list_lift! {substs; GenericArg<'a> => GenericArg<'tcx>} CloneLiftImpls! { for<'tcx> { Constness, } } pub mod tls { use super::{ptr_eq, GlobalCtxt, TyCtxt}; use crate::dep_graph::{DepKind, TaskDeps}; use crate::ty::query; use rustc_data_structures::sync::{self, Lock}; use rustc_data_structures::thin_vec::ThinVec; use rustc_errors::Diagnostic; use std::mem; #[cfg(not(parallel_compiler))] use std::cell::Cell; #[cfg(parallel_compiler)] use rustc_rayon_core as rayon_core; /// This is the implicit state of rustc. It contains the current /// `TyCtxt` and query. It is updated when creating a local interner or /// executing a new query. Whenever there's a `TyCtxt` value available /// you should also have access to an `ImplicitCtxt` through the functions /// in this module. #[derive(Clone)] pub struct ImplicitCtxt<'a, 'tcx> { /// The current `TyCtxt`. pub tcx: TyCtxt<'tcx>, /// The current query job, if any. This is updated by `JobOwner::start` in /// `ty::query::plumbing` when executing a query. pub query: Option<query::QueryJobId<DepKind>>, /// Where to store diagnostics for the current query job, if any. /// This is updated by `JobOwner::start` in `ty::query::plumbing` when executing a query. pub diagnostics: Option<&'a Lock<ThinVec<Diagnostic>>>, /// Used to prevent layout from recursing too deeply. pub layout_depth: usize, /// The current dep graph task. This is used to add dependencies to queries /// when executing them. pub task_deps: Option<&'a Lock<TaskDeps>>, } impl<'a, 'tcx> ImplicitCtxt<'a, 'tcx> { pub fn new(gcx: &'tcx GlobalCtxt<'tcx>) -> Self { let tcx = TyCtxt { gcx }; ImplicitCtxt { tcx, query: None, diagnostics: None, layout_depth: 0, task_deps: None } } } /// Sets Rayon's thread-local variable, which is preserved for Rayon jobs /// to `value` during the call to `f`. It is restored to its previous value after. /// This is used to set the pointer to the new `ImplicitCtxt`. #[cfg(parallel_compiler)] #[inline] fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R { rayon_core::tlv::with(value, f) } /// Gets Rayon's thread-local variable, which is preserved for Rayon jobs. /// This is used to get the pointer to the current `ImplicitCtxt`. #[cfg(parallel_compiler)] #[inline] pub fn get_tlv() -> usize { rayon_core::tlv::get() } #[cfg(not(parallel_compiler))] thread_local! { /// A thread local variable that stores a pointer to the current `ImplicitCtxt`. static TLV: Cell<usize> = const { Cell::new(0) }; } /// Sets TLV to `value` during the call to `f`. /// It is restored to its previous value after. /// This is used to set the pointer to the new `ImplicitCtxt`. #[cfg(not(parallel_compiler))] #[inline] fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R { let old = get_tlv(); let _reset = rustc_data_structures::OnDrop(move || TLV.with(|tlv| tlv.set(old))); TLV.with(|tlv| tlv.set(value)); f() } /// Gets the pointer to the current `ImplicitCtxt`. #[cfg(not(parallel_compiler))] #[inline] fn get_tlv() -> usize { TLV.with(|tlv| tlv.get()) } /// Sets `context` as the new current `ImplicitCtxt` for the duration of the function `f`. #[inline] pub fn enter_context<'a, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'tcx>, f: F) -> R where F: FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R, { set_tlv(context as *const _ as usize, || f(&context)) } /// Allows access to the current `ImplicitCtxt` in a closure if one is available. #[inline] pub fn with_context_opt<F, R>(f: F) -> R where F: for<'a, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'tcx>>) -> R, { let context = get_tlv(); if context == 0 { f(None) } else { // We could get a `ImplicitCtxt` pointer from another thread. // Ensure that `ImplicitCtxt` is `Sync`. sync::assert_sync::<ImplicitCtxt<'_, '_>>(); unsafe { f(Some(&*(context as *const ImplicitCtxt<'_, '_>))) } } } /// Allows access to the current `ImplicitCtxt`. /// Panics if there is no `ImplicitCtxt` available. #[inline] pub fn with_context<F, R>(f: F) -> R where F: for<'a, 'tcx> FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R, { with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls"))) } /// Allows access to the current `ImplicitCtxt` whose tcx field is the same as the tcx argument /// passed in. This means the closure is given an `ImplicitCtxt` with the same `'tcx` lifetime /// as the `TyCtxt` passed in. /// This will panic if you pass it a `TyCtxt` which is different from the current /// `ImplicitCtxt`'s `tcx` field. #[inline] pub fn with_related_context<'tcx, F, R>(tcx: TyCtxt<'tcx>, f: F) -> R where F: FnOnce(&ImplicitCtxt<'_, 'tcx>) -> R, { with_context(|context| unsafe { assert!(ptr_eq(context.tcx.gcx, tcx.gcx)); let context: &ImplicitCtxt<'_, '_> = mem::transmute(context); f(context) }) } /// Allows access to the `TyCtxt` in the current `ImplicitCtxt`. /// Panics if there is no `ImplicitCtxt` available. #[inline] pub fn with<F, R>(f: F) -> R where F: for<'tcx> FnOnce(TyCtxt<'tcx>) -> R, { with_context(|context| f(context.tcx)) } /// Allows access to the `TyCtxt` in the current `ImplicitCtxt`. /// The closure is passed None if there is no `ImplicitCtxt` available. #[inline] pub fn with_opt<F, R>(f: F) -> R where F: for<'tcx> FnOnce(Option<TyCtxt<'tcx>>) -> R, { with_context_opt(|opt_context| f(opt_context.map(|context| context.tcx))) } } macro_rules! sty_debug_print { ($fmt: expr, $ctxt: expr, $($variant: ident),*) => {{ // Curious inner module to allow variant names to be used as // variable names. #[allow(non_snake_case)] mod inner { use crate::ty::{self, TyCtxt}; use crate::ty::context::Interned; #[derive(Copy, Clone)] struct DebugStat { total: usize, lt_infer: usize, ty_infer: usize, ct_infer: usize, all_infer: usize, } pub fn go(fmt: &mut std::fmt::Formatter<'_>, tcx: TyCtxt<'_>) -> std::fmt::Result { let mut total = DebugStat { total: 0, lt_infer: 0, ty_infer: 0, ct_infer: 0, all_infer: 0, }; $(let mut $variant = total;)* let shards = tcx.interners.type_.lock_shards(); let types = shards.iter().flat_map(|shard| shard.keys()); for &Interned(t) in types { let variant = match t.kind() { ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) | ty::Float(..) | ty::Str | ty::Never => continue, ty::Error(_) => /* unimportant */ continue, $(ty::$variant(..) => &mut $variant,)* }; let lt = t.flags().intersects(ty::TypeFlags::HAS_RE_INFER); let ty = t.flags().intersects(ty::TypeFlags::HAS_TY_INFER); let ct = t.flags().intersects(ty::TypeFlags::HAS_CT_INFER); variant.total += 1; total.total += 1; if lt { total.lt_infer += 1; variant.lt_infer += 1 } if ty { total.ty_infer += 1; variant.ty_infer += 1 } if ct { total.ct_infer += 1; variant.ct_infer += 1 } if lt && ty && ct { total.all_infer += 1; variant.all_infer += 1 } } writeln!(fmt, "Ty interner total ty lt ct all")?; $(writeln!(fmt, " {:18}: {uses:6} {usespc:4.1}%, \ {ty:4.1}% {lt:5.1}% {ct:4.1}% {all:4.1}%", stringify!($variant), uses = $variant.total, usespc = $variant.total as f64 * 100.0 / total.total as f64, ty = $variant.ty_infer as f64 * 100.0 / total.total as f64, lt = $variant.lt_infer as f64 * 100.0 / total.total as f64, ct = $variant.ct_infer as f64 * 100.0 / total.total as f64, all = $variant.all_infer as f64 * 100.0 / total.total as f64)?; )* writeln!(fmt, " total {uses:6} \ {ty:4.1}% {lt:5.1}% {ct:4.1}% {all:4.1}%", uses = total.total, ty = total.ty_infer as f64 * 100.0 / total.total as f64, lt = total.lt_infer as f64 * 100.0 / total.total as f64, ct = total.ct_infer as f64 * 100.0 / total.total as f64, all = total.all_infer as f64 * 100.0 / total.total as f64) } } inner::go($fmt, $ctxt) }} } impl<'tcx> TyCtxt<'tcx> { pub fn debug_stats(self) -> impl std::fmt::Debug + 'tcx { struct DebugStats<'tcx>(TyCtxt<'tcx>); impl std::fmt::Debug for DebugStats<'tcx> { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { sty_debug_print!( fmt, self.0, Adt, Array, Slice, RawPtr, Ref, FnDef, FnPtr, Placeholder, Generator, GeneratorWitness, Dynamic, Closure, Tuple, Bound, Param, Infer, Projection, Opaque, Foreign )?; writeln!(fmt, "InternalSubsts interner: #{}", self.0.interners.substs.len())?; writeln!(fmt, "Region interner: #{}", self.0.interners.region.len())?; writeln!(fmt, "Stability interner: #{}", self.0.stability_interner.len())?; writeln!( fmt, "Const Stability interner: #{}", self.0.const_stability_interner.len() )?; writeln!(fmt, "Allocation interner: #{}", self.0.interners.allocation.len())?; writeln!(fmt, "Layout interner: #{}", self.0.layout_interner.len())?; Ok(()) } } DebugStats(self) } } /// An entry in an interner. struct Interned<'tcx, T: ?Sized>(&'tcx T); impl<'tcx, T: 'tcx + ?Sized> Clone for Interned<'tcx, T> { fn clone(&self) -> Self { Interned(self.0) } } impl<'tcx, T: 'tcx + ?Sized> Copy for Interned<'tcx, T> {} impl<'tcx, T: 'tcx + ?Sized> IntoPointer for Interned<'tcx, T> { fn into_pointer(&self) -> *const () { self.0 as *const _ as *const () } } // N.B., an `Interned<Ty>` compares and hashes as a `TyKind`. impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> { fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool { self.0.kind() == other.0.kind() } } impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {} impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> { fn hash<H: Hasher>(&self, s: &mut H) { self.0.kind().hash(s) } } #[allow(rustc::usage_of_ty_tykind)] impl<'tcx> Borrow<TyKind<'tcx>> for Interned<'tcx, TyS<'tcx>> { fn borrow<'a>(&'a self) -> &'a TyKind<'tcx> { &self.0.kind() } } // N.B., an `Interned<PredicateInner>` compares and hashes as a `PredicateKind`. impl<'tcx> PartialEq for Interned<'tcx, PredicateInner<'tcx>> { fn eq(&self, other: &Interned<'tcx, PredicateInner<'tcx>>) -> bool { self.0.kind == other.0.kind } } impl<'tcx> Eq for Interned<'tcx, PredicateInner<'tcx>> {} impl<'tcx> Hash for Interned<'tcx, PredicateInner<'tcx>> { fn hash<H: Hasher>(&self, s: &mut H) { self.0.kind.hash(s) } } impl<'tcx> Borrow<Binder<'tcx, PredicateKind<'tcx>>> for Interned<'tcx, PredicateInner<'tcx>> { fn borrow<'a>(&'a self) -> &'a Binder<'tcx, PredicateKind<'tcx>> { &self.0.kind } } // N.B., an `Interned<List<T>>` compares and hashes as its elements. impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, List<T>> { fn eq(&self, other: &Interned<'tcx, List<T>>) -> bool { self.0[..] == other.0[..] } } impl<'tcx, T: Eq> Eq for Interned<'tcx, List<T>> {} impl<'tcx, T: Hash> Hash for Interned<'tcx, List<T>> { fn hash<H: Hasher>(&self, s: &mut H) { self.0[..].hash(s) } } impl<'tcx, T> Borrow<[T]> for Interned<'tcx, List<T>> { fn borrow<'a>(&'a self) -> &'a [T] { &self.0[..] } } impl<'tcx> Borrow<RegionKind> for Interned<'tcx, RegionKind> { fn borrow(&self) -> &RegionKind { &self.0 } } impl<'tcx> Borrow<Const<'tcx>> for Interned<'tcx, Const<'tcx>> { fn borrow<'a>(&'a self) -> &'a Const<'tcx> { &self.0 } } impl<'tcx> Borrow<Allocation> for Interned<'tcx, Allocation> { fn borrow<'a>(&'a self) -> &'a Allocation { &self.0 } } impl<'tcx> PartialEq for Interned<'tcx, Allocation> { fn eq(&self, other: &Self) -> bool { self.0 == other.0 } } impl<'tcx> Eq for Interned<'tcx, Allocation> {} impl<'tcx> Hash for Interned<'tcx, Allocation> { fn hash<H: Hasher>(&self, s: &mut H) { self.0.hash(s) } } macro_rules! direct_interners { ($($name:ident: $method:ident($ty:ty),)+) => { $(impl<'tcx> PartialEq for Interned<'tcx, $ty> { fn eq(&self, other: &Self) -> bool { self.0 == other.0 } } impl<'tcx> Eq for Interned<'tcx, $ty> {} impl<'tcx> Hash for Interned<'tcx, $ty> { fn hash<H: Hasher>(&self, s: &mut H) { self.0.hash(s) } } impl<'tcx> TyCtxt<'tcx> { pub fn $method(self, v: $ty) -> &'tcx $ty { self.interners.$name.intern_ref(&v, || { Interned(self.interners.arena.alloc(v)) }).0 } })+ } } direct_interners! { region: mk_region(RegionKind), const_: mk_const(Const<'tcx>), } macro_rules! slice_interners { ($($field:ident: $method:ident($ty:ty)),+ $(,)?) => ( impl<'tcx> TyCtxt<'tcx> { $(pub fn $method(self, v: &[$ty]) -> &'tcx List<$ty> { self.interners.$field.intern_ref(v, || { Interned(List::from_arena(&*self.arena, v)) }).0 })+ } ); } slice_interners!( type_list: _intern_type_list(Ty<'tcx>), substs: _intern_substs(GenericArg<'tcx>), canonical_var_infos: _intern_canonical_var_infos(CanonicalVarInfo<'tcx>), poly_existential_predicates: _intern_poly_existential_predicates(ty::Binder<'tcx, ExistentialPredicate<'tcx>>), predicates: _intern_predicates(Predicate<'tcx>), projs: _intern_projs(ProjectionKind), place_elems: _intern_place_elems(PlaceElem<'tcx>), bound_variable_kinds: _intern_bound_variable_kinds(ty::BoundVariableKind), ); impl<'tcx> TyCtxt<'tcx> { /// Given a `fn` type, returns an equivalent `unsafe fn` type; /// that is, a `fn` type that is equivalent in every way for being /// unsafe. pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> { assert_eq!(sig.unsafety(), hir::Unsafety::Normal); self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig { unsafety: hir::Unsafety::Unsafe, ..sig })) } /// Given the def_id of a Trait `trait_def_id` and the name of an associated item `assoc_name` /// returns true if the `trait_def_id` defines an associated item of name `assoc_name`. pub fn trait_may_define_assoc_type(self, trait_def_id: DefId, assoc_name: Ident) -> bool { self.super_traits_of(trait_def_id).any(|trait_did| { self.associated_items(trait_did) .find_by_name_and_kind(self, assoc_name, ty::AssocKind::Type, trait_did) .is_some() }) } /// Computes the def-ids of the transitive super-traits of `trait_def_id`. This (intentionally) /// does not compute the full elaborated super-predicates but just the set of def-ids. It is used /// to identify which traits may define a given associated type to help avoid cycle errors. /// Returns a `DefId` iterator. fn super_traits_of(self, trait_def_id: DefId) -> impl Iterator<Item = DefId> + 'tcx { let mut set = FxHashSet::default(); let mut stack = vec![trait_def_id]; set.insert(trait_def_id); iter::from_fn(move || -> Option<DefId> { let trait_did = stack.pop()?; let generic_predicates = self.super_predicates_of(trait_did); for (predicate, _) in generic_predicates.predicates { if let ty::PredicateKind::Trait(data, _) = predicate.kind().skip_binder() { if set.insert(data.def_id()) { stack.push(data.def_id()); } } } Some(trait_did) }) } /// Given a closure signature, returns an equivalent fn signature. Detuples /// and so forth -- so e.g., if we have a sig with `Fn<(u32, i32)>` then /// you would get a `fn(u32, i32)`. /// `unsafety` determines the unsafety of the fn signature. If you pass /// `hir::Unsafety::Unsafe` in the previous example, then you would get /// an `unsafe fn (u32, i32)`. /// It cannot convert a closure that requires unsafe. pub fn signature_unclosure( self, sig: PolyFnSig<'tcx>, unsafety: hir::Unsafety, ) -> PolyFnSig<'tcx> { sig.map_bound(|s| { let params_iter = match s.inputs()[0].kind() { ty::Tuple(params) => params.into_iter().map(|k| k.expect_ty()), _ => bug!(), }; self.mk_fn_sig(params_iter, s.output(), s.c_variadic, unsafety, abi::Abi::Rust) }) } /// Same a `self.mk_region(kind)`, but avoids accessing the interners if /// `*r == kind`. #[inline] pub fn reuse_or_mk_region(self, r: Region<'tcx>, kind: RegionKind) -> Region<'tcx> { if *r == kind { r } else { self.mk_region(kind) } } #[allow(rustc::usage_of_ty_tykind)] #[inline] pub fn mk_ty(self, st: TyKind<'tcx>) -> Ty<'tcx> { self.interners.intern_ty(st) } #[inline] pub fn mk_predicate(self, binder: Binder<'tcx, PredicateKind<'tcx>>) -> Predicate<'tcx> { let inner = self.interners.intern_predicate(binder); Predicate { inner } } #[inline] pub fn reuse_or_mk_predicate( self, pred: Predicate<'tcx>, binder: Binder<'tcx, PredicateKind<'tcx>>, ) -> Predicate<'tcx> { if pred.kind() != binder { self.mk_predicate(binder) } else { pred } } pub fn mk_mach_int(self, tm: IntTy) -> Ty<'tcx> { match tm { IntTy::Isize => self.types.isize, IntTy::I8 => self.types.i8, IntTy::I16 => self.types.i16, IntTy::I32 => self.types.i32, IntTy::I64 => self.types.i64, IntTy::I128 => self.types.i128, } } pub fn mk_mach_uint(self, tm: UintTy) -> Ty<'tcx> { match tm { UintTy::Usize => self.types.usize, UintTy::U8 => self.types.u8, UintTy::U16 => self.types.u16, UintTy::U32 => self.types.u32, UintTy::U64 => self.types.u64, UintTy::U128 => self.types.u128, } } pub fn mk_mach_float(self, tm: FloatTy) -> Ty<'tcx> { match tm { FloatTy::F32 => self.types.f32, FloatTy::F64 => self.types.f64, } } #[inline] pub fn mk_static_str(self) -> Ty<'tcx> { self.mk_imm_ref(self.lifetimes.re_static, self.types.str_) } #[inline] pub fn mk_adt(self, def: &'tcx AdtDef, substs: SubstsRef<'tcx>) -> Ty<'tcx> { // Take a copy of substs so that we own the vectors inside. self.mk_ty(Adt(def, substs)) } #[inline] pub fn mk_foreign(self, def_id: DefId) -> Ty<'tcx> { self.mk_ty(Foreign(def_id)) } fn mk_generic_adt(self, wrapper_def_id: DefId, ty_param: Ty<'tcx>) -> Ty<'tcx> { let adt_def = self.adt_def(wrapper_def_id); let substs = InternalSubsts::for_item(self, wrapper_def_id, |param, substs| match param.kind { GenericParamDefKind::Lifetime | GenericParamDefKind::Const { .. } => bug!(), GenericParamDefKind::Type { has_default, .. } => { if param.index == 0 { ty_param.into() } else { assert!(has_default); self.type_of(param.def_id).subst(self, substs).into() } } }); self.mk_ty(Adt(adt_def, substs)) } #[inline] pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> { let def_id = self.require_lang_item(LangItem::OwnedBox, None); self.mk_generic_adt(def_id, ty) } #[inline] pub fn mk_lang_item(self, ty: Ty<'tcx>, item: LangItem) -> Option<Ty<'tcx>> { let def_id = self.lang_items().require(item).ok()?; Some(self.mk_generic_adt(def_id, ty)) } #[inline] pub fn mk_diagnostic_item(self, ty: Ty<'tcx>, name: Symbol) -> Option<Ty<'tcx>> { let def_id = self.get_diagnostic_item(name)?; Some(self.mk_generic_adt(def_id, ty)) } #[inline] pub fn mk_maybe_uninit(self, ty: Ty<'tcx>) -> Ty<'tcx> { let def_id = self.require_lang_item(LangItem::MaybeUninit, None); self.mk_generic_adt(def_id, ty) } #[inline] pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> { self.mk_ty(RawPtr(tm)) } #[inline] pub fn mk_ref(self, r: Region<'tcx>, tm: TypeAndMut<'tcx>) -> Ty<'tcx>
#[inline] pub fn mk_mut_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ref(r, TypeAndMut { ty, mutbl: hir::Mutability::Mut }) } #[inline] pub fn mk_imm_ref(self, r: Region<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ref(r, TypeAndMut { ty, mutbl: hir::Mutability::Not }) } #[inline] pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ptr(TypeAndMut { ty, mutbl: hir::Mutability::Mut }) } #[inline] pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ptr(TypeAndMut { ty, mutbl: hir::Mutability::Not }) } #[inline] pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> { self.mk_ty(Array(ty, ty::Const::from_usize(self, n))) } #[inline] pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ty(Slice(ty)) } #[inline] pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> { let kinds: Vec<_> = ts.iter().map(|&t| GenericArg::from(t)).collect(); self.mk_ty(Tuple(self.intern_substs(&kinds))) } pub fn mk_tup<I: InternAs<[Ty<'tcx>], Ty<'tcx>>>(self, iter: I) -> I::Output { iter.intern_with(|ts| { let kinds: Vec<_> = ts.iter().map(|&t| GenericArg::from(t)).collect(); self.mk_ty(Tuple(self.intern_substs(&kinds))) }) } #[inline] pub fn mk_unit(self) -> Ty<'tcx> { self.types.unit } #[inline] pub fn mk_diverging_default(self) -> Ty<'tcx> { if self.features().never_type_fallback { self.types.never } else { self.types.unit } } #[inline] pub fn mk_fn_def(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> { self.mk_ty(FnDef(def_id, substs)) } #[inline] pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> { self.mk_ty(FnPtr(fty)) } #[inline] pub fn mk_dynamic( self, obj: &'tcx List<ty::Binder<'tcx, ExistentialPredicate<'tcx>>>, reg: ty::Region<'tcx>, ) -> Ty<'tcx> { self.mk_ty(Dynamic(obj, reg)) } #[inline] pub fn mk_projection(self, item_def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> { self.mk_ty(Projection(ProjectionTy { item_def_id, substs })) } #[inline] pub fn mk_closure(self, closure_id: DefId, closure_substs: SubstsRef<'tcx>) -> Ty<'tcx> { self.mk_ty(Closure(closure_id, closure_substs)) } #[inline] pub fn mk_generator( self, id: DefId, generator_substs: SubstsRef<'tcx>, movability: hir::Movability, ) -> Ty<'tcx> { self.mk_ty(Generator(id, generator_substs, movability)) } #[inline] pub fn mk_generator_witness(self, types: ty::Binder<'tcx, &'tcx List<Ty<'tcx>>>) -> Ty<'tcx> { self.mk_ty(GeneratorWitness(types)) } #[inline] pub fn mk_ty_var(self, v: TyVid) -> Ty<'tcx> { self.mk_ty_infer(TyVar(v)) } #[inline] pub fn mk_const_var(self, v: ConstVid<'tcx>, ty: Ty<'tcx>) -> &'tcx Const<'tcx> { self.mk_const(ty::Const { val: ty::ConstKind::Infer(InferConst::Var(v)), ty }) } #[inline] pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> { self.mk_ty_infer(IntVar(v)) } #[inline] pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> { self.mk_ty_infer(FloatVar(v)) } #[inline] pub fn mk_ty_infer(self, it: InferTy) -> Ty<'tcx> { self.mk_ty(Infer(it)) } #[inline] pub fn mk_const_infer(self, ic: InferConst<'tcx>, ty: Ty<'tcx>) -> &'tcx ty::Const<'tcx> { self.mk_const(ty::Const { val: ty::ConstKind::Infer(ic), ty }) } #[inline] pub fn mk_ty_param(self, index: u32, name: Symbol) -> Ty<'tcx> { self.mk_ty(Param(ParamTy { index, name })) } #[inline] pub fn mk_const_param(self, index: u32, name: Symbol, ty: Ty<'tcx>) -> &'tcx Const<'tcx> { self.mk_const(ty::Const { val: ty::ConstKind::Param(ParamConst { index, name }), ty }) } pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> GenericArg<'tcx> { match param.kind { GenericParamDefKind::Lifetime => { self.mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())).into() } GenericParamDefKind::Type { .. } => self.mk_ty_param(param.index, param.name).into(), GenericParamDefKind::Const { .. } => { self.mk_const_param(param.index, param.name, self.type_of(param.def_id)).into() } } } #[inline] pub fn mk_opaque(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> { self.mk_ty(Opaque(def_id, substs)) } pub fn mk_place_field(self, place: Place<'tcx>, f: Field, ty: Ty<'tcx>) -> Place<'tcx> { self.mk_place_elem(place, PlaceElem::Field(f, ty)) } pub fn mk_place_deref(self, place: Place<'tcx>) -> Place<'tcx> { self.mk_place_elem(place, PlaceElem::Deref) } pub fn mk_place_downcast( self, place: Place<'tcx>, adt_def: &'tcx AdtDef, variant_index: VariantIdx, ) -> Place<'tcx> { self.mk_place_elem( place, PlaceElem::Downcast(Some(adt_def.variants[variant_index].ident.name), variant_index), ) } pub fn mk_place_downcast_unnamed( self, place: Place<'tcx>, variant_index: VariantIdx, ) -> Place<'tcx> { self.mk_place_elem(place, PlaceElem::Downcast(None, variant_index)) } pub fn mk_place_index(self, place: Place<'tcx>, index: Local) -> Place<'tcx> { self.mk_place_elem(place, PlaceElem::Index(index)) } /// This method copies `Place`'s projection, add an element and reintern it. Should not be used /// to build a full `Place` it's just a convenient way to grab a projection and modify it in /// flight. pub fn mk_place_elem(self, place: Place<'tcx>, elem: PlaceElem<'tcx>) -> Place<'tcx> { let mut projection = place.projection.to_vec(); projection.push(elem); Place { local: place.local, projection: self.intern_place_elems(&projection) } } pub fn intern_poly_existential_predicates( self, eps: &[ty::Binder<'tcx, ExistentialPredicate<'tcx>>], ) -> &'tcx List<ty::Binder<'tcx, ExistentialPredicate<'tcx>>> { assert!(!eps.is_empty()); assert!( eps.array_windows() .all(|[a, b]| a.skip_binder().stable_cmp(self, &b.skip_binder()) != Ordering::Greater) ); self._intern_poly_existential_predicates(eps) } pub fn intern_predicates(self, preds: &[Predicate<'tcx>]) -> &'tcx List<Predicate<'tcx>> { // FIXME consider asking the input slice to be sorted to avoid // re-interning permutations, in which case that would be asserted // here. if preds.is_empty() { // The macro-generated method below asserts we don't intern an empty slice. List::empty() } else { self._intern_predicates(preds) } } pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List<Ty<'tcx>> { if ts.is_empty() { List::empty() } else { self._intern_type_list(ts) } } pub fn intern_substs(self, ts: &[GenericArg<'tcx>]) -> &'tcx List<GenericArg<'tcx>> { if ts.is_empty() { List::empty() } else { self._intern_substs(ts) } } pub fn intern_projs(self, ps: &[ProjectionKind]) -> &'tcx List<ProjectionKind> { if ps.is_empty() { List::empty() } else { self._intern_projs(ps) } } pub fn intern_place_elems(self, ts: &[PlaceElem<'tcx>]) -> &'tcx List<PlaceElem<'tcx>> { if ts.is_empty() { List::empty() } else { self._intern_place_elems(ts) } } pub fn intern_canonical_var_infos( self, ts: &[CanonicalVarInfo<'tcx>], ) -> CanonicalVarInfos<'tcx> { if ts.is_empty() { List::empty() } else { self._intern_canonical_var_infos(ts) } } pub fn intern_bound_variable_kinds( self, ts: &[ty::BoundVariableKind], ) -> &'tcx List<ty::BoundVariableKind> { if ts.is_empty() { List::empty() } else { self._intern_bound_variable_kinds(ts) } } pub fn mk_fn_sig<I>( self, inputs: I, output: I::Item, c_variadic: bool, unsafety: hir::Unsafety, abi: abi::Abi, ) -> <I::Item as InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>::Output where I: Iterator<Item: InternIteratorElement<Ty<'tcx>, ty::FnSig<'tcx>>>, { inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig { inputs_and_output: self.intern_type_list(xs), c_variadic, unsafety, abi, }) } pub fn mk_poly_existential_predicates< I: InternAs< [ty::Binder<'tcx, ExistentialPredicate<'tcx>>], &'tcx List<ty::Binder<'tcx, ExistentialPredicate<'tcx>>>, >, >( self, iter: I, ) -> I::Output { iter.intern_with(|xs| self.intern_poly_existential_predicates(xs)) } pub fn mk_predicates<I: InternAs<[Predicate<'tcx>], &'tcx List<Predicate<'tcx>>>>( self, iter: I, ) -> I::Output { iter.intern_with(|xs| self.intern_predicates(xs)) } pub fn mk_type_list<I: InternAs<[Ty<'tcx>], &'tcx List<Ty<'tcx>>>>(self, iter: I) -> I::Output { iter.intern_with(|xs| self.intern_type_list(xs)) } pub fn mk_substs<I: InternAs<[GenericArg<'tcx>], &'tcx List<GenericArg<'tcx>>>>( self, iter: I, ) -> I::Output { iter.intern_with(|xs| self.intern_substs(xs)) } pub fn mk_place_elems<I: InternAs<[PlaceElem<'tcx>], &'tcx List<PlaceElem<'tcx>>>>( self, iter: I, ) -> I::Output { iter.intern_with(|xs| self.intern_place_elems(xs)) } pub fn mk_substs_trait(self, self_ty: Ty<'tcx>, rest: &[GenericArg<'tcx>]) -> SubstsRef<'tcx> { self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned())) } pub fn mk_bound_variable_kinds< I: InternAs<[ty::BoundVariableKind], &'tcx List<ty::BoundVariableKind>>, >( self, iter: I, ) -> I::Output { iter.intern_with(|xs| self.intern_bound_variable_kinds(xs)) } /// Walks upwards from `id` to find a node which might change lint levels with attributes. /// It stops at `bound` and just returns it if reached. pub fn maybe_lint_level_root_bounded(self, mut id: HirId, bound: HirId) -> HirId { let hir = self.hir(); loop { if id == bound { return bound; } if hir.attrs(id).iter().any(|attr| Level::from_symbol(attr.name_or_empty()).is_some()) { return id; } let next = hir.get_parent_node(id); if next == id { bug!("lint traversal reached the root of the crate"); } id = next; } } pub fn lint_level_at_node( self, lint: &'static Lint, mut id: hir::HirId, ) -> (Level, LintLevelSource) { let sets = self.lint_levels(()); loop { if let Some(pair) = sets.level_and_source(lint, id, self.sess) { return pair; } let next = self.hir().get_parent_node(id); if next == id { bug!("lint traversal reached the root of the crate"); } id = next; } } pub fn struct_span_lint_hir( self, lint: &'static Lint, hir_id: HirId, span: impl Into<MultiSpan>, decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>), ) { let (level, src) = self.lint_level_at_node(lint, hir_id); struct_lint_level(self.sess, lint, level, src, Some(span.into()), decorate); } pub fn struct_lint_node( self, lint: &'static Lint, id: HirId, decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>), ) { let (level, src) = self.lint_level_at_node(lint, id); struct_lint_level(self.sess, lint, level, src, None, decorate); } pub fn in_scope_traits(self, id: HirId) -> Option<&'tcx StableVec<TraitCandidate>> { self.in_scope_traits_map(id.owner).and_then(|map| map.get(&id.local_id)) } pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> { debug!(?id, "named_region"); self.named_region_map(id.owner).and_then(|map| map.get(&id.local_id).cloned()) } pub fn is_late_bound(self, id: HirId) -> bool { self.is_late_bound_map(id.owner) .map_or(false, |(owner, set)| owner == id.owner && set.contains(&id.local_id)) } pub fn object_lifetime_defaults(self, id: HirId) -> Option<Vec<ObjectLifetimeDefault>> { self.object_lifetime_defaults_map(id.owner) } pub fn late_bound_vars(self, id: HirId) -> &'tcx List<ty::BoundVariableKind> { self.mk_bound_variable_kinds( self.late_bound_vars_map(id.owner) .and_then(|map| map.get(&id.local_id).cloned()) .unwrap_or_else(|| { bug!("No bound vars found for {:?} ({:?})", self.hir().node_to_string(id), id) }) .iter(), ) } pub fn lifetime_scope(self, id: HirId) -> Option<LifetimeScopeForPath> { self.lifetime_scope_map(id.owner).and_then(|mut map| map.remove(&id.local_id)) } } impl TyCtxtAt<'tcx> { /// Constructs a `TyKind::Error` type and registers a `delay_span_bug` to ensure it gets used. #[track_caller] pub fn ty_error(self) -> Ty<'tcx> { self.tcx.ty_error_with_message(self.span, "TyKind::Error constructed but no error reported") } /// Constructs a `TyKind::Error` type and registers a `delay_span_bug` with the given `msg to /// ensure it gets used. #[track_caller] pub fn ty_error_with_message(self, msg: &str) -> Ty<'tcx> { self.tcx.ty_error_with_message(self.span, msg) } } pub trait InternAs<T: ?Sized, R> { type Output; fn intern_with<F>(self, f: F) -> Self::Output where F: FnOnce(&T) -> R; } impl<I, T, R, E> InternAs<[T], R> for I where E: InternIteratorElement<T, R>, I: Iterator<Item = E>, { type Output = E::Output; fn intern_with<F>(self, f: F) -> Self::Output where F: FnOnce(&[T]) -> R, { E::intern_with(self, f) } } pub trait InternIteratorElement<T, R>: Sized { type Output; fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output; } impl<T, R> InternIteratorElement<T, R> for T { type Output = R; fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { f(&iter.collect::<SmallVec<[_; 8]>>()) } } impl<'a, T, R> InternIteratorElement<T, R> for &'a T where T: Clone + 'a, { type Output = R; fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { f(&iter.cloned().collect::<SmallVec<[_; 8]>>()) } } impl<T, R, E> InternIteratorElement<T, R> for Result<T, E> { type Output = Result<R, E>; fn intern_with<I: Iterator<Item = Self>, F: FnOnce(&[T]) -> R>( mut iter: I, f: F, ) -> Self::Output { // This code is hot enough that it's worth specializing for the most // common length lists, to avoid the overhead of `SmallVec` creation. // The match arms are in order of frequency. The 1, 2, and 0 cases are // typically hit in ~95% of cases. We assume that if the upper and // lower bounds from `size_hint` agree they are correct. Ok(match iter.size_hint() { (1, Some(1)) => { let t0 = iter.next().unwrap()?; assert!(iter.next().is_none()); f(&[t0]) } (2, Some(2)) => { let t0 = iter.next().unwrap()?; let t1 = iter.next().unwrap()?; assert!(iter.next().is_none()); f(&[t0, t1]) } (0, Some(0)) => { assert!(iter.next().is_none()); f(&[]) } _ => f(&iter.collect::<Result<SmallVec<[_; 8]>, _>>()?), }) } } // We are comparing types with different invariant lifetimes, so `ptr::eq` // won't work for us. fn ptr_eq<T, U>(t: *const T, u: *const U) -> bool { t as *const () == u as *const () } pub fn provide(providers: &mut ty::query::Providers) { providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id); providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).map(|v| &v[..]); providers.crate_name = |tcx, id| { assert_eq!(id, LOCAL_CRATE); tcx.crate_name }; providers.maybe_unused_trait_import = |tcx, id| tcx.maybe_unused_trait_imports.contains(&id); providers.maybe_unused_extern_crates = |tcx, ()| &tcx.maybe_unused_extern_crates[..]; providers.names_imported_by_glob_use = |tcx, id| tcx.arena.alloc(tcx.glob_map.get(&id).cloned().unwrap_or_default()); providers.lookup_stability = |tcx, id| { let id = tcx.hir().local_def_id_to_hir_id(id.expect_local()); tcx.stability().local_stability(id) }; providers.lookup_const_stability = |tcx, id| { let id = tcx.hir().local_def_id_to_hir_id(id.expect_local()); tcx.stability().local_const_stability(id) }; providers.lookup_deprecation_entry = |tcx, id| { let id = tcx.hir().local_def_id_to_hir_id(id.expect_local()); tcx.stability().local_deprecation_entry(id) }; providers.extern_mod_stmt_cnum = |tcx, id| tcx.extern_crate_map.get(&id).cloned(); providers.all_crate_nums = |tcx, ()| tcx.arena.alloc_slice(&tcx.cstore.crates_untracked()); providers.output_filenames = |tcx, ()| tcx.output_filenames.clone(); providers.features_query = |tcx, ()| tcx.sess.features_untracked(); providers.is_panic_runtime = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); tcx.sess.contains_name(tcx.hir().krate_attrs(), sym::panic_runtime) }; providers.is_compiler_builtins = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); tcx.sess.contains_name(tcx.hir().krate_attrs(), sym::compiler_builtins) }; providers.has_panic_handler = |tcx, cnum| { assert_eq!(cnum, LOCAL_CRATE); // We want to check if the panic handler was defined in this crate tcx.lang_items().panic_impl().map_or(false, |did| did.is_local()) }; }
{ self.mk_ty(Ref(r, tm.ty, tm.mutbl)) }
gpio_int_mask1.rs
#[doc = "Register `GPIO_INT_MASK1` reader"] pub struct R(crate::R<GPIO_INT_MASK1_SPEC>); impl core::ops::Deref for R { type Target = crate::R<GPIO_INT_MASK1_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<GPIO_INT_MASK1_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<GPIO_INT_MASK1_SPEC>) -> Self { R(reader) } } #[doc = "Register `GPIO_INT_MASK1` writer"] pub struct W(crate::W<GPIO_INT_MASK1_SPEC>); impl core::ops::Deref for W { type Target = crate::W<GPIO_INT_MASK1_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<GPIO_INT_MASK1_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<GPIO_INT_MASK1_SPEC>) -> Self { W(writer) } } #[doc = "Field `reg_gpio_int_mask1` reader - "] pub struct REG_GPIO_INT_MASK1_R(crate::FieldReader<u32, u32>); impl REG_GPIO_INT_MASK1_R { pub(crate) fn new(bits: u32) -> Self { REG_GPIO_INT_MASK1_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for REG_GPIO_INT_MASK1_R { type Target = crate::FieldReader<u32, u32>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `reg_gpio_int_mask1` writer - "] pub struct REG_GPIO_INT_MASK1_W<'a> { w: &'a mut W, } impl<'a> REG_GPIO_INT_MASK1_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !0xffff_ffff) | (value as u32 & 0xffff_ffff); self.w } }
#[inline(always)] pub fn reg_gpio_int_mask1(&self) -> REG_GPIO_INT_MASK1_R { REG_GPIO_INT_MASK1_R::new((self.bits & 0xffff_ffff) as u32) } } impl W { #[doc = "Bits 0:31"] #[inline(always)] pub fn reg_gpio_int_mask1(&mut self) -> REG_GPIO_INT_MASK1_W { REG_GPIO_INT_MASK1_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "GPIO_INT_MASK1.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [gpio_int_mask1](index.html) module"] pub struct GPIO_INT_MASK1_SPEC; impl crate::RegisterSpec for GPIO_INT_MASK1_SPEC { type Ux = u32; } #[doc = "`read()` method returns [gpio_int_mask1::R](R) reader structure"] impl crate::Readable for GPIO_INT_MASK1_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [gpio_int_mask1::W](W) writer structure"] impl crate::Writable for GPIO_INT_MASK1_SPEC { type Writer = W; } #[doc = "`reset()` method sets GPIO_INT_MASK1 to value 0"] impl crate::Resettable for GPIO_INT_MASK1_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
impl R { #[doc = "Bits 0:31"]
manage.py
#!/usr/bin/env python import os import sys if __name__ == '__main__': os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'socialrating.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc
execute_from_command_line(sys.argv)
part_c.py
import misc_tools import random def create_routing(env, first_step='op1'): tasks = { 'op1': misc_tools.make_assembly_step( env=env, run_time=random.gauss(mu=12, sigma=0.5), route_to='op2'), 'op2': { 'location': env['machine_3'], 'worker': env['technician'], 'manned': False, 'setup_time': random.uniform(a=2, b=5), 'run_time': random.gauss(mu=15, sigma=0.25), 'teardown_time': 0, 'transit_time': 1, 'yield': 0.85, 'route_to_pass': 'op3', 'route_to_fail': 'rework' }, 'op3': { 'location': env['common_process'], 'worker': env['technician'], 'manned': True, 'setup_time': random.triangular(low=1, high=4, mode=2), 'run_time': random.gauss(mu=2, sigma=0.5), 'teardown_time': random.uniform(a=1, b=2), 'transit_time': 1, 'route_to': env['part_c_storage'] }, 'rework': { 'location': env['assembly_bench'], 'worker': env['assembler'], 'manned': True, 'setup_time': 0, 'run_time': random.expovariate(lambd=0.5)*15, 'teardown_time': 0, 'transit_time': 1, 'fail_count': 2, 'route_to_pass': 'op2', 'route_to_fail': env['scrap_storage'] } } return misc_tools.make_steps(first_step=first_step, tasks=tasks) def
(env): return { 'part_a': { 'location': env['part_a_kanban'], 'qty': 1 }, 'part_b': { 'location': env['part_b_kanban'], 'qty': 2 } }
get_bom
suite_test.go
package fluentd import ( "testing" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega"
RunSpecs(t, "Fluend Conf Generation") }
) func TestFluendConfGenerator(t *testing.T) { RegisterFailHandler(Fail)
analysis.go
// Copyright 2020 ZUP IT SERVICOS EM TECNOLOGIA E INOVACAO SA // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at
// distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package name const ( AnalysisFinish = "analysis-finish" )
// // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software
healthmonitor.py
# Copyright 2017 Rackspace, US Inc. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from octavia.common import constants rules = [ policy.DocumentedRuleDefault( '{rbac_obj}{action}'.format(rbac_obj=constants.RBAC_HEALTHMONITOR, action=constants.RBAC_GET_ALL), constants.RULE_API_READ, "List Health Monitors of a Pool", [{'method': 'GET', 'path': '/v2/lbaas/healthmonitors'}] ), policy.DocumentedRuleDefault( '{rbac_obj}{action}'.format(rbac_obj=constants.RBAC_HEALTHMONITOR, action=constants.RBAC_GET_ALL_GLOBAL), constants.RULE_API_READ_GLOBAL,
"List Health Monitors including resources owned by others", [{'method': 'GET', 'path': '/v2/lbaas/healthmonitors'}] ), policy.DocumentedRuleDefault( '{rbac_obj}{action}'.format(rbac_obj=constants.RBAC_HEALTHMONITOR, action=constants.RBAC_POST), constants.RULE_API_WRITE, "Create a Health Monitor", [{'method': 'POST', 'path': '/v2/lbaas/healthmonitors'}] ), policy.DocumentedRuleDefault( '{rbac_obj}{action}'.format(rbac_obj=constants.RBAC_HEALTHMONITOR, action=constants.RBAC_GET_ONE), constants.RULE_API_READ, "Show Health Monitor details", [{'method': 'GET', 'path': '/v2/lbaas/healthmonitors/{healthmonitor_id}'}] ), policy.DocumentedRuleDefault( '{rbac_obj}{action}'.format(rbac_obj=constants.RBAC_HEALTHMONITOR, action=constants.RBAC_PUT), constants.RULE_API_WRITE, "Update a Health Monitor", [{'method': 'PUT', 'path': '/v2/lbaas/healthmonitors/{healthmonitor_id}'}] ), policy.DocumentedRuleDefault( '{rbac_obj}{action}'.format(rbac_obj=constants.RBAC_HEALTHMONITOR, action=constants.RBAC_DELETE), constants.RULE_API_WRITE, "Remove a Health Monitor", [{'method': 'DELETE', 'path': '/v2/lbaas/healthmonitors/{healthmonitor_id}'}] ), ] def list_rules(): return rules
gitlab_modwrap.py
from __future__ import (absolute_import, division, print_function) __metaclass__ = type import collections ##from ansible.errors import AnsibleOptionsError, AnsibleModuleError##, AnsibleError ####from ansible.module_utils._text import to_native from ansible.module_utils.six import iteritems, string_types from ansible_collections.smabot.git.plugins.module_utils.plugins.gitlab_action import GitlabBase from ansible_collections.smabot.base.plugins.module_utils.utils.utils import ansible_assert class ActionModule(GitlabBase): def __init__(self, *args, **kwargs): super(ActionModule, self).__init__(*args, **kwargs) self._supports_check_mode = False self._supports_async = False @property def argspec(self):
def run_specific(self, result): cmdret = self.exec_gitlab_module( self.get_taskparam('modname'), modargs=self.get_taskparam('modargs') ) result.update(cmdret) return result
tmp = super(ActionModule, self).argspec tmp.update({ 'modname': (list(string_types)), 'modargs': ([collections.abc.Mapping], {}), }) return tmp
geo-feature-set.serializer.ts
import { Injectable } from '@nestjs/common'; import { PaginationMeta } from '@marxan-api/utils/app-base.service'; import { GeoFeatureSetService } from './geo-feature-set.service'; import { GeoFeatureSetSpecification } from './dto/geo-feature-set-specification.dto'; import { SimpleJobStatus } from '../scenarios/scenario.api.entity'; import { GeoFeatureSetResult } from '@marxan-api/modules/geo-features/geo-feature-set.api.entity'; import { plainToClass } from 'class-transformer'; import { AsyncJobDto } from '@marxan-api/dto/async-job.dto';
async serialize( entities: | Partial<GeoFeatureSetSpecification> | undefined | (Partial<GeoFeatureSetSpecification> | undefined)[], paginationMeta?: PaginationMeta, asyncJobTriggered?: boolean, ): Promise<GeoFeatureSetResult> { return plainToClass(GeoFeatureSetResult, { ...(await this.geoFeatureSetsService.serialize(entities, paginationMeta)), meta: asyncJobTriggered ? AsyncJobDto.forScenario() : undefined, }); } /** * @deprecated */ emptySpecification() { return this.geoFeatureSetsService.serialize({ status: SimpleJobStatus.draft, features: [], }); } }
@Injectable() export class GeoFeatureSetSerializer { constructor(private readonly geoFeatureSetsService: GeoFeatureSetService) {}
discount.ts
import * as ActionTypes from '../../constant/actionTypes' import { fetchDiscountList } from '../../dao' declare var global export function getDiscountList(page = 1, { ddstatus = 'all', pf = 'all', region = 'all', title = '' }) { return dispatch => { return fetchDiscountList({ page, title, ddstatus, region, pf }) .then(response => { dispatch(gotList(response, page)) }).catch(err => { console.error('communityError', err) dispatch(gotListError()) global.toast && global.toast('网络错误', 2000)
} function gotList(argument, page) { return { type: ActionTypes.GET_DISCOUNT_SUCCESS, value: argument, page: page } } function gotListError() { return { type: ActionTypes.GET_DISCOUNT_ERROR } }
}) }
short_weierstrass_jacobian.rs
use crate::curves::models::SWModelParameters as Parameters; use rand::{Rng, distributions::{Standard, Distribution}}; use crate::UniformRand; use std::{ fmt::{Display, Formatter, Result as FmtResult}, io::{Read, Result as IoResult, Write}, marker::PhantomData, }; use crate::{ bytes::{FromBytes, ToBytes}, curves::{AffineCurve, ProjectiveCurve}, fields::{BitIterator, Field, PrimeField, SquareRootField}, }; use std::ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign}; #[derive(Derivative)] #[derivative( Copy(bound = "P: Parameters"), Clone(bound = "P: Parameters"), PartialEq(bound = "P: Parameters"), Eq(bound = "P: Parameters"), Debug(bound = "P: Parameters"), Hash(bound = "P: Parameters") )] pub struct GroupAffine<P: Parameters> { pub x: P::BaseField, pub y: P::BaseField, pub infinity: bool, #[derivative(Debug = "ignore")] _params: PhantomData<P>, } impl<P: Parameters> Display for GroupAffine<P> { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { if self.infinity { write!(f, "GroupAffine(Infinity)") } else { write!(f, "GroupAffine(x={}, y={})", self.x, self.y) } } } impl<P: Parameters> GroupAffine<P> { pub fn new(x: P::BaseField, y: P::BaseField, infinity: bool) -> Self { Self { x, y, infinity, _params: PhantomData, } } pub fn scale_by_cofactor(&self) -> GroupProjective<P> { let cofactor = BitIterator::new(P::COFACTOR); self.mul_bits(cofactor) } pub(crate) fn mul_bits<S: AsRef<[u64]>>(&self, bits: BitIterator<S>) -> GroupProjective<P> { let mut res = GroupProjective::zero(); for i in bits { res.double_in_place(); if i { res.add_assign_mixed(self) } } res } /// Attempts to construct an affine point given an x-coordinate. The /// point is not guaranteed to be in the prime order subgroup. /// /// If and only if `greatest` is set will the lexicographically /// largest y-coordinate be selected. #[allow(dead_code)] pub(crate) fn get_point_from_x(x: P::BaseField, greatest: bool) -> Option<Self> { // Compute x^3 + ax + b let x3b = P::add_b(&((x.square() * &x) + &P::mul_by_a(&x))); x3b.sqrt().map(|y| { let negy = -y; let y = if (y < negy) ^ greatest { y } else { negy }; Self::new(x, y, false) }) } pub fn is_on_curve(&self) -> bool { if self.is_zero() { true } else { // Check that the point is on the curve let y2 = self.y.square(); let x3b = P::add_b(&((self.x.square() * &self.x) + &P::mul_by_a(&self.x))); y2 == x3b } } pub fn is_in_correct_subgroup_assuming_on_curve(&self) -> bool { self.mul_bits(BitIterator::new(P::ScalarField::characteristic())) .is_zero() } } impl<P: Parameters> AffineCurve for GroupAffine<P> { type BaseField = P::BaseField; type ScalarField = P::ScalarField; type Projective = GroupProjective<P>; #[inline] fn zero() -> Self { Self::new(Self::BaseField::zero(), Self::BaseField::one(), true) } #[inline] fn prime_subgroup_generator() -> Self { Self::new( P::AFFINE_GENERATOR_COEFFS.0, P::AFFINE_GENERATOR_COEFFS.1, false, ) } #[inline] fn is_zero(&self) -> bool { self.infinity } #[inline] fn mul<S: Into<<Self::ScalarField as PrimeField>::BigInt>>(&self, by: S) -> GroupProjective<P> { let bits = BitIterator::new(by.into()); self.mul_bits(bits) } fn mul_by_cofactor(&self) -> Self { self.scale_by_cofactor().into() } fn mul_by_cofactor_inv(&self) -> Self { self.mul(P::COFACTOR_INV).into() } #[inline] fn into_projective(&self) -> GroupProjective<P> { (*self).into() } } impl<P: Parameters> Neg for GroupAffine<P> { type Output = Self; #[inline] fn neg(self) -> Self { if !self.is_zero() { Self::new(self.x, -self.y, false) } else { self } } } impl<P: Parameters> ToBytes for GroupAffine<P> { #[inline] fn write<W: Write>(&self, mut writer: W) -> IoResult<()> { self.x.write(&mut writer)?; self.y.write(&mut writer) } } impl<P: Parameters> FromBytes for GroupAffine<P> { #[inline] fn read<R: Read>(mut reader: R) -> IoResult<Self> { let x = P::BaseField::read(&mut reader)?; let y = P::BaseField::read(&mut reader)?; let infinity = x.is_zero() && y.is_one(); Ok(Self::new(x, y, infinity)) } } impl<P: Parameters> Default for GroupAffine<P> { #[inline] fn default() -> Self { Self::zero() } } #[derive(Derivative)] #[derivative( Copy(bound = "P: Parameters"), Clone(bound = "P: Parameters"), Eq(bound = "P: Parameters"), Debug(bound = "P: Parameters"), Hash(bound = "P: Parameters") )] pub struct GroupProjective<P: Parameters> { pub x: P::BaseField, pub y: P::BaseField, pub z: P::BaseField, _params: PhantomData<P>, } impl<P: Parameters> Display for GroupProjective<P> { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { write!(f, "{}", self.into_affine()) } } impl<P: Parameters> PartialEq for GroupProjective<P> { fn eq(&self, other: &Self) -> bool { if self.is_zero() { return other.is_zero(); } if other.is_zero() { return false; } // The points (X, Y, Z) and (X', Y', Z') // are equal when (X * Z^2) = (X' * Z'^2) // and (Y * Z^3) = (Y' * Z'^3). let z1 = self.z.square(); let z2 = other.z.square(); if self.x * &z2 != other.x * &z1 { false } else { self.y * &(z2 * &other.z) == other.y * &(z1 * &self.z) } } } impl<P: Parameters> Distribution<GroupProjective<P>> for Standard { #[inline] fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> GroupProjective<P> { let res = GroupProjective::prime_subgroup_generator() * &P::ScalarField::rand(rng); debug_assert!(res.into_affine().is_in_correct_subgroup_assuming_on_curve()); res } } impl<P: Parameters> ToBytes for GroupProjective<P> { #[inline] fn write<W: Write>(&self, mut writer: W) -> IoResult<()> { self.x.write(&mut writer)?; self.y.write(&mut writer)?; self.z.write(writer) } } impl<P: Parameters> FromBytes for GroupProjective<P> { #[inline] fn read<R: Read>(mut reader: R) -> IoResult<Self> { let x = P::BaseField::read(&mut reader)?; let y = P::BaseField::read(&mut reader)?; let z = P::BaseField::read(reader)?; Ok(Self::new(x, y, z)) } } impl<P: Parameters> Default for GroupProjective<P> { #[inline] fn default() -> Self { Self::zero() } } impl<P: Parameters> GroupProjective<P> { pub fn new(x: P::BaseField, y: P::BaseField, z: P::BaseField) -> Self { Self { x, y, z, _params: PhantomData, } } } impl<P: Parameters> ProjectiveCurve for GroupProjective<P> { type BaseField = P::BaseField; type ScalarField = P::ScalarField; type Affine = GroupAffine<P>; // The point at infinity is always represented by // Z = 0. #[inline] fn zero() -> Self { Self::new( P::BaseField::zero(), P::BaseField::one(), P::BaseField::zero(), ) } #[inline] fn prime_subgroup_generator() -> Self { GroupAffine::prime_subgroup_generator().into() } // The point at infinity is always represented by // Z = 0. #[inline] fn is_zero(&self) -> bool { self.z.is_zero() } #[inline] fn is_normalized(&self) -> bool { self.is_zero() || self.z.is_one() } #[inline]
// First pass: compute [a, ab, abc, ...] let mut prod = Vec::with_capacity(v.len()); let mut tmp = P::BaseField::one(); for g in v.iter_mut() // Ignore normalized elements .filter(|g| !g.is_normalized()) { tmp.mul_assign(&g.z); prod.push(tmp); } // Invert `tmp`. tmp = tmp.inverse().unwrap(); // Guaranteed to be nonzero. // Second pass: iterate backwards to compute inverses for (g, s) in v.iter_mut() // Backwards .rev() // Ignore normalized elements .filter(|g| !g.is_normalized()) // Backwards, skip last element, fill in one for last term. .zip(prod.into_iter().rev().skip(1).chain(Some(P::BaseField::one()))) { // tmp := tmp * g.z; g.z := tmp * s = 1/z let newtmp = tmp * &g.z; g.z = tmp * &s; tmp = newtmp; } #[cfg(not(feature = "parallel"))] { // Perform affine transformations for g in v.iter_mut().filter(|g| !g.is_normalized()) { let z2 = g.z.square(); // 1/z g.x *= &z2; // x/z^2 g.y *= &(z2 * &g.z); // y/z^3 g.z = P::BaseField::one(); // z = 1 } } #[cfg(feature = "parallel")] { use rayon::prelude::*; // Perform affine transformations v.par_iter_mut() .filter(|g| !g.is_normalized()) .for_each(|g| { let z2 = g.z.square(); // 1/z g.x *= &z2; // x/z^2 g.y *= &(z2 * &g.z); // y/z^3 g.z = P::BaseField::one(); // z = 1 }); } } fn double_in_place(&mut self) -> &mut Self { if self.is_zero() { return self; } if P::COEFF_A.is_zero() { // A = X1^2 let mut a = self.x.square(); // B = Y1^2 let b = self.y.square(); // C = B^2 let mut c = b.square(); // D = 2*((X1+B)2-A-C) let d = ((self.x + &b).square() - &a - &c).double(); // E = 3*A let e = a + a.double_in_place(); // F = E^2 let f = e.square(); // Z3 = 2*Y1*Z1 self.z.mul_assign(&self.y); self.z.double_in_place(); // X3 = F-2*D self.x = f - &d - &d; // Y3 = E*(D-X3)-8*C self.y = (d - &self.x) * &e - c.double_in_place().double_in_place().double_in_place(); self } else { // http://www.hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#doubling-dbl-2009-l // XX = X1^2 let xx = self.x.square(); // YY = Y1^2 let yy = self.y.square(); // YYYY = YY^2 let mut yyyy = yy.square(); // ZZ = Z1^2 let zz = self.z.square(); // S = 2*((X1+YY)^2-XX-YYYY) let s = ((self.x + &yy).square() - &xx - &yyyy).double(); // M = 3*XX+a*ZZ^2 let m = xx + &xx + &xx + &P::mul_by_a(&zz.square()); // T = M^2-2*S let t = m.square() - &s.double(); // X3 = T self.x = t; // Y3 = M*(S-T)-8*YYYY let old_y = self.y; self.y = m * &(s - &t) - yyyy.double_in_place().double_in_place().double_in_place(); // Z3 = (Y1+Z1)^2-YY-ZZ self.z = (old_y + &self.z).square() - &yy - &zz; self } } fn add_assign_mixed(&mut self, other: &Self::Affine) { if other.is_zero() { return; } if self.is_zero() { self.x = other.x; self.y = other.y; self.z = P::BaseField::one(); return; } // http://www.hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#addition-madd-2007-bl // Works for all curves. // Z1Z1 = Z1^2 let z1z1 = self.z.square(); // U2 = X2*Z1Z1 let u2 = other.x * &z1z1; // S2 = Y2*Z1*Z1Z1 let s2 = (other.y * &self.z) * &z1z1; if self.x == u2 && self.y == s2 { // The two points are equal, so we double. self.double_in_place(); } else { // If we're adding -a and a together, self.z becomes zero as H becomes zero. // H = U2-X1 let h = u2 - &self.x; // HH = H^2 let hh = h.square(); // I = 4*HH let mut i = hh; i.double_in_place().double_in_place(); // J = H*I let mut j = h * &i; // r = 2*(S2-Y1) let r = (s2 - &self.y).double(); // V = X1*I let v = self.x * &i; // X3 = r^2 - J - 2*V self.x = r.square(); self.x -= &j; self.x -= &v; self.x -= &v; // Y3 = r*(V-X3)-2*Y1*J j *= &self.y; // J = 2*Y1*J j.double_in_place(); self.y = v - &self.x; self.y *= &r; self.y -= &j; // Z3 = (Z1+H)^2-Z1Z1-HH self.z += &h; self.z.square_in_place(); self.z -= &z1z1; self.z -= &hh; } } fn mul_assign<S: Into<<Self::ScalarField as PrimeField>::BigInt>>(&mut self, other: S) { let mut res = Self::zero(); let mut found_one = false; for i in BitIterator::new(other.into()) { if found_one { res.double_in_place(); } else { found_one = i; } if i { res.add_assign(self); } } *self = res; } #[inline] fn into_affine(&self) -> GroupAffine<P> { (*self).into() } #[inline] fn recommended_wnaf_for_scalar(scalar: <Self::ScalarField as PrimeField>::BigInt) -> usize { P::empirical_recommended_wnaf_for_scalar(scalar) } #[inline] fn recommended_wnaf_for_num_scalars(num_scalars: usize) -> usize { P::empirical_recommended_wnaf_for_num_scalars(num_scalars) } } impl<P: Parameters> Neg for GroupProjective<P> { type Output = Self; #[inline] fn neg(self) -> Self { if !self.is_zero() { Self::new(self.x, -self.y, self.z) } else { self } } } impl<'a, P: Parameters> Add<&'a Self> for GroupProjective<P> { type Output = Self; #[inline] fn add(self, other: &'a Self) -> Self { let mut copy = self; copy += other; copy } } impl<'a, P: Parameters> AddAssign<&'a Self> for GroupProjective<P> { fn add_assign(&mut self, other: &'a Self) { if self.is_zero() { *self = *other; return; } if other.is_zero() { return; } // http://www.hyperelliptic.org/EFD/g1p/auto-shortw-jacobian-0.html#addition-add-2007-bl // Works for all curves. // Z1Z1 = Z1^2 let z1z1 = self.z.square(); // Z2Z2 = Z2^2 let z2z2 = other.z.square(); // U1 = X1*Z2Z2 let u1 = self.x * &z2z2; // U2 = X2*Z1Z1 let u2 = other.x * &z1z1; // S1 = Y1*Z2*Z2Z2 let s1 = self.y * &other.z * &z2z2; // S2 = Y2*Z1*Z1Z1 let s2 = other.y * &self.z * &z1z1; if u1 == u2 && s1 == s2 { // The two points are equal, so we double. self.double_in_place(); } else { // If we're adding -a and a together, self.z becomes zero as H becomes zero. // H = U2-U1 let h = u2 - &u1; // I = (2*H)^2 let i = (h.double()).square(); // J = H*I let j = h * &i; // r = 2*(S2-S1) let r = (s2 - &s1).double(); // V = U1*I let v = u1 * &i; // X3 = r^2 - J - 2*V self.x = r.square() - &j - &(v.double()); // Y3 = r*(V - X3) - 2*S1*J self.y = r * &(v - &self.x) - (s1 * &j).double_in_place(); // Z3 = ((Z1+Z2)^2 - Z1Z1 - Z2Z2)*H self.z = ((self.z + &other.z).square() - &z1z1 - &z2z2) * &h; } } } impl<'a, P: Parameters> Sub<&'a Self> for GroupProjective<P> { type Output = Self; #[inline] fn sub(self, other: &'a Self) -> Self { let mut copy = self; copy -= other; copy } } impl<'a, P: Parameters> SubAssign<&'a Self> for GroupProjective<P> { fn sub_assign(&mut self, other: &'a Self) { *self += &(-(*other)); } } impl<'a, P: Parameters> Mul<&'a P::ScalarField> for GroupProjective<P> { type Output = Self; #[inline] fn mul(self, other: &'a P::ScalarField) -> Self { let mut copy = self; copy *= other; copy } } impl<'a, P: Parameters> MulAssign<&'a P::ScalarField> for GroupProjective<P> { #[inline] fn mul_assign(&mut self, other: &'a P::ScalarField) { <GroupProjective<P> as ProjectiveCurve>::mul_assign(self, *other); } } // The affine point X, Y is represented in the Jacobian // coordinates with Z = 1. impl<P: Parameters> From<GroupAffine<P>> for GroupProjective<P> { #[inline] fn from(p: GroupAffine<P>) -> GroupProjective<P> { if p.is_zero() { Self::zero() } else { Self::new(p.x, p.y, P::BaseField::one()) } } } // The projective point X, Y, Z is represented in the affine // coordinates as X/Z^2, Y/Z^3. impl<P: Parameters> From<GroupProjective<P>> for GroupAffine<P> { #[inline] fn from(p: GroupProjective<P>) -> GroupAffine<P> { if p.is_zero() { GroupAffine::zero() } else if p.z.is_one() { // If Z is one, the point is already normalized. GroupAffine::new(p.x, p.y, false) } else { // Z is nonzero, so it must have an inverse in a field. let zinv = p.z.inverse().unwrap(); let zinv_squared = zinv.square(); // X/Z^2 let x = p.x * &zinv_squared; // Y/Z^3 let y = p.y * &(zinv_squared * &zinv); GroupAffine::new(x, y, false) } } }
fn batch_normalization(v: &mut [Self]) { // Montgomery’s Trick and Fast Implementation of Masked AES // Genelle, Prouff and Quisquater // Section 3.2
fixture.rs
#![deny(warnings)] #![allow(clippy::if_same_then_else)] #![allow(clippy::needless_update)] #![allow(clippy::redundant_clone)] #![allow(clippy::while_let_on_iterator)] use std::{fs, mem::take, path::PathBuf}; use serde_json::Value; use swc_atoms::JsWord; use swc_common::{ collections::AHashSet, errors::Handler, input::{SourceFileInput, StringInput}, BytePos, Spanned, }; use swc_html_ast::*; use swc_html_parser::{ error::ErrorKind, lexer::{Lexer, State}, parser::{input::ParserInput, PResult, Parser, ParserConfig}, }; use swc_html_visit::{Visit, VisitMut, VisitMutWith, VisitWith}; use testing::NormalizedOutput; fn document_test(input: PathBuf, config: ParserConfig) { testing::run_test2(false, |cm, handler| { let json_path = input.parent().unwrap().join("output.json"); let fm = cm.load_file(&input).unwrap(); let lexer = Lexer::new(SourceFileInput::from(&*fm)); let mut parser = Parser::new(lexer, config); let document: PResult<Document> = parser.parse_document(); let errors = parser.take_errors(); for err in &errors { err.to_diagnostics(&handler).emit(); } if !errors.is_empty() { return Err(()); } match document { Ok(document) => { let actual_json = serde_json::to_string_pretty(&document) .map(NormalizedOutput::from) .expect("failed to serialize document"); actual_json.compare_to_file(&json_path).unwrap(); Ok(()) } Err(err) => { let mut d = err.to_diagnostics(&handler); d.note(&format!("current token = {}", parser.dump_cur())); d.emit(); Err(()) } } }) .unwrap(); } fn document_recovery_test(input: PathBuf, config: ParserConfig) { let stderr_path = input.parent().unwrap().join("output.stderr"); let mut recovered = false; let stderr = testing::run_test2(false, |cm, handler| { // Type annotation if false { return Ok(()); } let json_path = input.parent().unwrap().join("output.json"); let fm = cm.load_file(&input).unwrap(); let lexer = Lexer::new(SourceFileInput::from(&*fm)); let mut parser = Parser::new(lexer, config); let document: PResult<Document> = parser.parse_document(); let errors = parser.take_errors(); for err in &errors { err.to_diagnostics(&handler).emit(); } if !errors.is_empty() { recovered = true; } match document { Ok(document) => { let actual_json = serde_json::to_string_pretty(&document) .map(NormalizedOutput::from) .expect("failed to serialize document"); actual_json.compare_to_file(&json_path).unwrap(); Err(()) } Err(err) => { let mut d = err.to_diagnostics(&handler); d.note(&format!("current token = {}", parser.dump_cur())); d.emit(); Err(()) } } }) .unwrap_err(); if !recovered { panic!( "Parser should emit errors (recover mode), but parser parsed everything successfully \ {}", stderr ); } stderr.compare_to_file(&stderr_path).unwrap(); } fn document_span_visualizer(input: PathBuf, config: ParserConfig) { let dir = input.parent().unwrap().to_path_buf(); let output = testing::run_test2(false, |cm, handler| { // Type annotation if false { return Ok(()); } let fm = cm.load_file(&input).unwrap(); let lexer = Lexer::new(SourceFileInput::from(&*fm)); let mut parser = Parser::new(lexer, config); let document: PResult<Document> = parser.parse_document();
Err(()) } Err(err) => { let mut d = err.to_diagnostics(&handler); d.note(&format!("current token = {}", parser.dump_cur())); d.emit(); panic!(); } } }) .unwrap_err(); output .compare_to_file(&dir.join("span.rust-debug")) .unwrap(); } fn document_dom_visualizer(input: PathBuf, config: ParserConfig) { let dir = input.parent().unwrap().to_path_buf(); testing::run_test2(false, |cm, handler| { // Type annotation if false { return Ok(()); } let fm = cm.load_file(&input).unwrap(); let lexer = Lexer::new(SourceFileInput::from(&*fm)); let mut parser = Parser::new(lexer, config); let document: PResult<Document> = parser.parse_document(); match document { Ok(mut document) => { let mut dom_buf = String::new(); document.visit_mut_with(&mut DomVisualizer { dom_buf: &mut dom_buf, indent: 0, }); NormalizedOutput::from(dom_buf) .compare_to_file(&dir.join("dom.rust-debug")) .unwrap(); Ok(()) } Err(err) => { let mut d = err.to_diagnostics(&handler); d.note(&format!("current token = {}", parser.dump_cur())); d.emit(); panic!(); } } }) .unwrap(); } struct SpanVisualizer<'a> { handler: &'a Handler, } macro_rules! mtd { ($T:ty,$name:ident) => { fn $name(&mut self, n: &$T) { let span = n.span(); // We should not have dummy span in original parsing if span.lo == BytePos(0) && span.hi == BytePos(0) { panic!("Broken span"); } self.handler.struct_span_err(span, stringify!($T)).emit(); n.visit_children_with(self); } }; } impl Visit for SpanVisualizer<'_> { mtd!(Document, visit_document); mtd!(DocumentFragment, visit_document_fragment); mtd!(Child, visit_child); mtd!(DocumentType, visit_document_type); mtd!(Element, visit_element); mtd!(Attribute, visit_attribute); mtd!(Text, visit_text); mtd!(Comment, visit_comment); fn visit_token_and_span(&mut self, n: &TokenAndSpan) { self.handler .struct_span_err(n.span, &format!("{:?}", n.token)) .emit(); } } struct DomVisualizer<'a> { dom_buf: &'a mut String, indent: usize, } impl DomVisualizer<'_> { fn get_ident(&self) -> String { let mut indent = String::new(); indent.push_str("| "); indent.push_str(&" ".repeat(self.indent)); indent } } impl VisitMut for DomVisualizer<'_> { fn visit_mut_document_type(&mut self, n: &mut DocumentType) { let mut document_type = String::new(); document_type.push_str(&self.get_ident()); document_type.push_str("<!DOCTYPE "); if let Some(name) = &n.name { document_type.push_str(name); } if let Some(public_id) = &n.public_id { document_type.push(' '); document_type.push('"'); document_type.push_str(public_id); document_type.push('"'); if let Some(system_id) = &n.system_id { document_type.push(' '); document_type.push('"'); document_type.push_str(system_id); document_type.push('"'); } else { document_type.push(' '); document_type.push('"'); document_type.push('"'); } } else if let Some(system_id) = &n.system_id { document_type.push(' '); document_type.push('"'); document_type.push('"'); document_type.push(' '); document_type.push('"'); document_type.push_str(system_id); document_type.push('"'); } document_type.push('>'); document_type.push('\n'); self.dom_buf.push_str(&document_type); n.visit_mut_children_with(self); } fn visit_mut_element(&mut self, n: &mut Element) { let mut element = String::new(); element.push_str(&self.get_ident()); element.push('<'); match n.namespace { Namespace::SVG => { element.push_str("svg "); } Namespace::MATHML => { element.push_str("math "); } _ => {} } element.push_str(&n.tag_name); element.push('>'); element.push('\n'); let is_template = n.namespace == Namespace::HTML && &*n.tag_name == "template"; if is_template { self.indent += 1; element.push_str(&self.get_ident()); element.push_str("content"); element.push('\n'); } n.attributes .sort_by(|a, b| a.name.partial_cmp(&b.name).unwrap()); self.dom_buf.push_str(&element); let old_indent = self.indent; self.indent += 1; n.visit_mut_children_with(self); self.indent = old_indent; if is_template { self.indent -= 1; } } fn visit_mut_attribute(&mut self, n: &mut Attribute) { let mut attribute = String::new(); attribute.push_str(&self.get_ident()); if let Some(prefix) = &n.prefix { attribute.push_str(prefix); attribute.push(' '); } attribute.push_str(&n.name); attribute.push('='); attribute.push('"'); if let Some(value) = &n.value { attribute.push_str(value); } attribute.push('"'); attribute.push('\n'); self.dom_buf.push_str(&attribute); n.visit_mut_children_with(self); } fn visit_mut_text(&mut self, n: &mut Text) { let mut text = String::new(); text.push_str(&self.get_ident()); text.push('"'); text.push_str(&n.value); text.push('"'); text.push('\n'); self.dom_buf.push_str(&text); n.visit_mut_children_with(self); } fn visit_mut_comment(&mut self, n: &mut Comment) { let mut comment = String::new(); comment.push_str(&self.get_ident()); comment.push_str("<!-- "); comment.push_str(&n.data); comment.push_str(" -->"); comment.push('\n'); self.dom_buf.push_str(&comment); n.visit_mut_children_with(self); } } #[testing::fixture("tests/fixture/**/*.html")] fn pass(input: PathBuf) { document_test( input, ParserConfig { ..Default::default() }, ) } #[testing::fixture("tests/iframe_srcdoc/**/*.html")] fn pass_iframe_srcdoc(input: PathBuf) { document_test( input, ParserConfig { iframe_srcdoc: true, ..Default::default() }, ) } #[testing::fixture("tests/recovery/**/*.html")] fn recovery(input: PathBuf) { document_recovery_test( input, ParserConfig { ..Default::default() }, ) } #[testing::fixture("tests/fixture/**/*.html")] #[testing::fixture("tests/recovery/**/*.html")] #[testing::fixture("tests/iframe_srcdoc/**/*.html")] fn span_visualizer(input: PathBuf) { document_span_visualizer( input, ParserConfig { ..Default::default() }, ) } #[testing::fixture("tests/fixture/**/*.html")] #[testing::fixture("tests/recovery/**/*.html")] #[testing::fixture("tests/iframe_srcdoc/**/*.html")] fn dom_visualizer(input: PathBuf) { document_dom_visualizer( input, ParserConfig { ..Default::default() }, ) } // TODO add span visualizer for html5test_lib tests fn unescape(s: &str) -> Option<String> { let mut out = String::with_capacity(s.len()); let mut it = s.chars().peekable(); loop { match it.next() { None => return Some(out), Some('\\') => { if it.peek() != Some(&'u') { panic!("can't understand escape"); } let hex: String = it.by_ref().take(4).collect(); match u32::from_str_radix(&hex, 16).ok().and_then(char::from_u32) { // TODO fix me surrogate paris // Some of the tests use lone surrogates, but we have no // way to represent them in the UTF-8 input to our parser. // Since these can only come from script, we will catch // them there. None => return None, Some(c) => out.push(c), } } Some(c) => out.push(c), } } } // TODO we need to enable `preserve_order` for serde, but we can't https://github.com/tkaitchuck/aHash/issues/95, so we sort attributes #[testing::fixture("tests/html5lib-tests/tokenizer/**/*.test")] fn html5lib_test_tokenizer(input: PathBuf) { let filename = input.to_str().expect("failed to parse path"); let contents = fs::read_to_string(filename).expect("Something went wrong reading the file"); let obj: Value = serde_json::from_str(&contents).expect("json parse error"); let tests = match obj.get(&"tests".to_string()) { Some(&Value::Array(ref tests)) => tests, _ => return, }; for test in tests.iter() { let description = test .get("description") .expect("failed to get input in test"); let states = if let Some(initial_states) = test.get("initialStates") { let mut states = vec![]; let json_states: Vec<String> = serde_json::from_value(initial_states.clone()) .expect("failed to get input in test"); for json_state in json_states { match &*json_state { "Data state" => { states.push(State::Data); } "PLAINTEXT state" => { states.push(State::PlainText); } "RCDATA state" => { states.push(State::Rcdata); } "RAWTEXT state" => { states.push(State::Rawtext); } "Script data state" => { states.push(State::ScriptData); } "CDATA section state" => { states.push(State::CdataSection); } _ => { unreachable!() } } } states } else { vec![State::Data] }; for state in states.iter() { eprintln!("==== ==== Description ==== ====\n{}\n", description); let json_input = test["input"].clone(); let mut input: String = serde_json::from_value(json_input).expect("failed to get input in test"); let need_double_escaped = test.get("doubleEscaped").is_some(); if need_double_escaped { input = match unescape(&input) { Some(unescaped) => unescaped, _ => { continue; } }; } eprintln!("==== ==== Input ==== ====\n{}\n", input); let json_output = test["output"].clone(); let output = json_output.to_string(); eprintln!("==== ==== Output ==== ====\n{}\n", output); let lexer_str_input = StringInput::new(&input, BytePos(0), BytePos(input.len() as u32)); let mut lexer = Lexer::new(lexer_str_input); lexer.set_input_state(state.clone()); if let Some(last_start_tag) = test.get("lastStartTag") { let last_start_tag: String = serde_json::from_value(last_start_tag.clone()) .expect("failed to get lastStartTag in test"); lexer.set_last_start_tag_name(&last_start_tag); } let mut actual_tokens = vec![]; loop { let token_and_span = lexer.next(); if token_and_span.is_none() { break; } let mut new_token = token_and_span.unwrap().token.clone(); match new_token { Token::Doctype { ref mut raw_keyword, ref mut raw_name, ref mut public_quote, ref mut raw_public_keyword, ref mut system_quote, ref mut raw_system_keyword, .. } => { *raw_keyword = None; *raw_name = None; *public_quote = None; *raw_public_keyword = None; *system_quote = None; *raw_system_keyword = None; } Token::StartTag { ref mut raw_tag_name, ref mut attributes, .. } => { *raw_tag_name = None; let mut new_attributes = vec![]; let mut already_seen: AHashSet<JsWord> = Default::default(); for mut attribute in take(attributes) { if already_seen.contains(&attribute.name) { continue; } already_seen.insert(attribute.name.clone()); if attribute.value.is_none() { attribute.value = Some("".into()); } attribute.span = Default::default(); attribute.raw_name = None; attribute.raw_value = None; new_attributes.push(attribute); } new_attributes.sort_by(|a, b| a.name.partial_cmp(&b.name).unwrap()); *attributes = new_attributes; } Token::EndTag { ref mut raw_tag_name, ref mut attributes, ref mut self_closing, .. } => { *raw_tag_name = None; *self_closing = false; *attributes = vec![]; } Token::Character { ref mut raw, .. } => { *raw = None; } _ => {} } actual_tokens.push(new_token); } let mut expected_tokens: Vec<Token> = vec![]; if let Some(output_tokens) = json_output.as_array() { for output_token in output_tokens { match output_token { Value::Array(token_parts) => { let tokens = match &*token_parts[0].as_str().expect("failed") { "DOCTYPE" => { let name: Option<String> = serde_json::from_value(token_parts[1].clone()) .expect("failed to deserialize"); let public_id: Option<String> = serde_json::from_value(token_parts[2].clone()) .expect("failed to deserialize"); let system_id: Option<String> = serde_json::from_value(token_parts[3].clone()) .expect("failed to deserialize"); let correctness: bool = serde_json::from_value(token_parts[4].clone()) .expect("failed to deserialize"); vec![Token::Doctype { raw_keyword: None, name: name.map(|v| v.into()), raw_name: None, force_quirks: !correctness, raw_public_keyword: None, public_quote: None, public_id: public_id.map(|v| v.into()), raw_system_keyword: None, system_quote: None, system_id: system_id.map(|v| v.into()), }] } "StartTag" => { let tag_name: String = serde_json::from_value(token_parts[1].clone()) .expect("failed to deserialize"); let mut attributes = vec![]; if let Some(json_attributes) = token_parts.get(2) { let obj_attributes: Value = serde_json::from_value(json_attributes.clone()) .expect("failed to deserialize"); match obj_attributes { Value::Object(obj) => { for key in obj.keys() { let json_value = obj.get(key).expect( "failed to get value for attribute", ); let value: Option<String> = serde_json::from_value(json_value.clone()) .expect("failed to deserialize"); attributes.push(AttributeToken { span: Default::default(), name: key.clone().into(), raw_name: None, value: value.map(|v| v.into()), raw_value: None, }) } } _ => { unreachable!(); } } } let mut self_closing = false; if let Some(json_self_closing) = token_parts.get(3) { let value: bool = serde_json::from_value(json_self_closing.clone()) .expect("failed to deserialize"); self_closing = value; } attributes.sort_by(|a, b| a.name.partial_cmp(&b.name).unwrap()); vec![Token::StartTag { tag_name: tag_name.into(), raw_tag_name: None, self_closing, attributes, }] } "EndTag" => { let tag_name: String = serde_json::from_value(token_parts[1].clone()) .expect("failed to deserialize"); vec![Token::EndTag { tag_name: tag_name.into(), raw_tag_name: None, self_closing: false, attributes: vec![], }] } "Character" => { let mut data: String = serde_json::from_value(token_parts[1].clone()) .expect("failed to deserialize"); if need_double_escaped { data = match unescape(&data) { Some(v) => v, _ => { continue; } }; } let mut tokens = vec![]; for c in data.chars() { tokens.push(Token::Character { value: c, raw: None, }) } tokens } "Comment" => { let mut data: String = serde_json::from_value(token_parts[1].clone()) .expect("failed to deserialize"); if need_double_escaped { data = match unescape(&data) { Some(v) => v, _ => { continue; } }; } vec![Token::Comment { data: data.into() }] } _ => { unreachable!("unknown token {}", token_parts[0]) } }; expected_tokens.extend(tokens); } _ => { unreachable!(); } } } } let actual = serde_json::to_string(&actual_tokens).expect("failed to serialize actual tokens"); let expected = serde_json::to_string(&expected_tokens) .expect("failed to serialize expected tokens"); if let Some(json_errors) = test.get("errors") { let expected_errors = json_errors.as_array().expect("failed to deserialize error"); let actual_errors = lexer.take_errors(); eprintln!("==== ==== Errors ==== ====\n{:?}\n", actual_errors); assert_eq!(actual_errors.len(), expected_errors.len()); for expected_error in expected_errors.iter() { let obj_expected_code = expected_error.as_object().expect("failed to get error"); let expected_code = match obj_expected_code.get("code") { Some(expected_code) => match expected_code.as_str() { Some("eof-in-doctype") => ErrorKind::EofInDoctype, Some("eof-in-comment") => ErrorKind::EofInComment, Some("eof-in-cdata") => ErrorKind::EofInCdata, Some("eof-in-tag") => ErrorKind::EofInTag, Some("eof-before-tag-name") => ErrorKind::EofBeforeTagName, Some("eof-in-script-html-comment-like-text") => { ErrorKind::EofInScriptHtmlCommentLikeText } Some("unknown-named-character-reference") => { ErrorKind::UnknownNamedCharacterReference } Some("incorrectly-opened-comment") => { ErrorKind::IncorrectlyOpenedComment } Some("abrupt-closing-of-empty-comment") => { ErrorKind::AbruptClosingOfEmptyComment } Some("abrupt-doctype-public-identifier") => { ErrorKind::AbruptDoctypePublicIdentifier } Some("abrupt-doctype-system-identifier") => { ErrorKind::AbruptDoctypeSystemIdentifier } Some("absence-of-digits-in-numeric-character-reference") => { ErrorKind::AbsenceOfDigitsInNumericCharacterReference } Some("surrogate-character-reference") => { ErrorKind::SurrogateCharacterReference } Some("nested-comment") => ErrorKind::NestedComment, Some("end-tag-with-trailing-solidus") => { ErrorKind::EndTagWithTrailingSolidus } Some("null-character-reference") => ErrorKind::NullCharacterReference, Some("cdata-in-html-content") => ErrorKind::CdataInHtmlContent, Some("character-reference-outside-unicode-range") => { ErrorKind::CharacterReferenceOutsideUnicodeRange } Some("control-character-in-input-stream") => { ErrorKind::ControlCharacterInInputStream } Some("control-character-reference") => { ErrorKind::ControlCharacterReference } Some("noncharacter-in-input-stream") => { ErrorKind::NoncharacterInInputStream } Some("noncharacter-character-reference") => { ErrorKind::NoncharacterCharacterReference } Some("unexpected-equals-sign-before-attribute-name") => { ErrorKind::UnexpectedEqualsSignBeforeAttributeName } Some("unexpected-question-mark-instead-of-tag-name") => { ErrorKind::UnexpectedQuestionMarkInsteadOfTagName } Some("unexpected-character-after-doctype-system-identifier") => { ErrorKind::UnexpectedCharacterAfterDoctypeSystemIdentifier } Some("unexpected-null-character") => ErrorKind::UnexpectedNullCharacter, Some("unexpected-solidus-in-tag") => ErrorKind::UnexpectedSolidusInTag, Some("unexpected-character-in-attribute-name") => { ErrorKind::UnexpectedCharacterInAttributeName } Some("unexpected-character-in-unquoted-attribute-value") => { ErrorKind::UnexpectedCharacterInUnquotedAttributeValue } Some("duplicate-attribute") => ErrorKind::DuplicateAttribute, Some("end-tag-with-attributes") => ErrorKind::EndTagWithAttributes, Some("missing-whitespace-before-doctype-name") => { ErrorKind::MissingWhitespaceBeforeDoctypeName } Some("missing-attribute-value") => ErrorKind::MissingAttributeValue, Some("missing-doctype-public-identifier") => { ErrorKind::MissingDoctypePublicIdentifier } Some("missing-end-tag-name") => ErrorKind::MissingEndTagName, Some("missing-doctype-name") => ErrorKind::MissingDoctypeName, Some("missing-doctype-system-identifier") => { ErrorKind::MissingDoctypeSystemIdentifier } Some("missing-whitespace-after-doctype-system-keyword") => { ErrorKind::MissingWhitespaceAfterDoctypeSystemKeyword } Some("missing-whitespace-after-doctype-public-keyword") => { ErrorKind::MissingWhitespaceAfterDoctypePublicKeyword } Some("missing-quote-before-doctype-public-identifier") => { ErrorKind::MissingQuoteBeforeDoctypePublicIdentifier } Some("missing-quote-before-doctype-system-identifier") => { ErrorKind::MissingQuoteBeforeDoctypeSystemIdentifier } Some("incorrectly-closed-comment") => { ErrorKind::IncorrectlyClosedComment } Some("invalid-character-sequence-after-doctype-name") => { ErrorKind::InvalidCharacterSequenceAfterDoctypeName } Some( "missing-whitespace-between-doctype-public-and-system-identifiers", ) => { ErrorKind::MissingWhitespaceBetweenDoctypePublicAndSystemIdentifiers } Some("missing-whitespace-between-attributes") => { ErrorKind::MissingWhitespaceBetweenAttributes } Some("missing-semicolon-after-character-reference") => { ErrorKind::MissingSemicolonAfterCharacterReference } Some("invalid-first-character-of-tag-name") => { ErrorKind::InvalidFirstCharacterOfTagName } _ => { unreachable!("unknown error {:?}", expected_code); } }, _ => { unreachable!(); } }; // TODO validate error positions assert!(actual_errors .iter() .any(|error| *error.kind() == expected_code)); } } else { let errors = lexer.take_errors(); assert_eq!(errors.len(), 0); } assert_eq!(actual, expected); } } } enum TestState { Data, Document, DocumentFragment, Errors, NewErrors, } enum DocumentOrDocumentFragment { Document(PResult<Document>), DocumentFragment(PResult<DocumentFragment>), } #[testing::fixture("tests/html5lib-tests/tree-construction/**/*.dat")] #[testing::fixture("tests/html5lib-tests-fixture/**/*.html")] fn html5lib_test_tree_construction(input: PathBuf) { if input.extension().unwrap() == "dat" { let mut tree_construction_base = None; let mut tests_base = None; let mut path_buf = input.to_path_buf(); while path_buf.pop() { if path_buf.ends_with("tree-construction") { tree_construction_base = Some(path_buf.clone()); } if path_buf.ends_with("tests") { tests_base = Some(path_buf.clone()); break; } } let tree_construction_base = tree_construction_base.unwrap(); let relative_path_to_test = input .strip_prefix(tree_construction_base) .expect("failed to get relative filename") .to_str() .unwrap() .replace('/', "_") .replace('.', "_"); let tests_base = tests_base.unwrap(); let dir = tests_base .join("html5lib-tests-fixture") .join(&relative_path_to_test); fs::create_dir_all(dir.clone()).expect("failed to create directory for fixtures"); let tests_file = fs::read_to_string(input).expect("Something went wrong reading the file"); let mut tests = tests_file.split("#data\n"); tests.next(); let mut counter = 0; while let Some(test) = tests.next() { let mut data: Vec<&str> = vec![]; let mut document: Vec<&str> = vec![]; let mut document_fragment: Vec<&str> = vec![]; let mut errors: Vec<&str> = vec![]; let mut new_errors: Vec<&str> = vec![]; let mut scripting_enabled = false; let mut state = Some(TestState::Data); let lines = test.lines(); for line in lines { match line { "#data" => { state = Some(TestState::Data); continue; } "#errors" => { state = Some(TestState::Errors); continue; } "#new-errors" => { state = Some(TestState::NewErrors); continue; } "#document" => { state = Some(TestState::Document); continue; } "#document-fragment" => { state = Some(TestState::DocumentFragment); continue; } "#script-on" => { scripting_enabled = true; state = None; continue; } "#script-off" => { scripting_enabled = false; state = None; continue; } _ => {} } match &state { Some(TestState::Data) => { data.push(line); } Some(TestState::Document) => { document.push(line); } Some(TestState::DocumentFragment) => { document_fragment.push(line); } Some(TestState::Errors) => { errors.push(line); } Some(TestState::NewErrors) => { new_errors.push(line); } _ => { unreachable!(); } } } let mut file_stem = counter.to_string(); if !document_fragment.is_empty() { file_stem += ".fragment."; file_stem += &document_fragment.join("").replace(' ', "_"); } if scripting_enabled { file_stem += ".script_on"; } let html_path = dir.join(file_stem.clone() + ".html"); fs::write(html_path, data.join("\n")) .expect("Something went wrong when writing to the file"); let dom_snapshot_path = dir.join(file_stem.clone() + ".dom"); let mut dom = document.join("\n"); if !dom.ends_with('\n') { dom.push('\n'); } fs::write(dom_snapshot_path, dom) .expect("Something went wrong when writing to the file"); let errors = errors.join("\n"); let errors_snapshot_path = dir.join(file_stem + ".errors"); fs::write(errors_snapshot_path, errors) .expect("Something went wrong when writing to the file"); counter += 1; } return; } testing::run_test2(false, |cm, handler| { // Type annotation if false { return Ok(()); } let file_stem = input.file_stem().unwrap().to_str().unwrap().to_owned(); let scripting_enabled = file_stem.contains("script_on"); let json_path = input.parent().unwrap().join(file_stem.clone() + ".json"); let fm = cm.load_file(&input).unwrap(); let lexer = Lexer::new(SourceFileInput::from(&*fm)); let config = ParserConfig { scripting_enabled, iframe_srcdoc: false, }; let mut parser = Parser::new(lexer, config); let document_or_document_fragment = if file_stem.contains("fragment") { let mut context_element_namespace = Namespace::HTML; let mut context_element_tag_name = "unknown"; let context_element = file_stem .split('.') .last() .expect("failed to get context element from filename"); if context_element.contains('_') { let mut splited = context_element.split('_'); if let Some(namespace) = splited.next() { context_element_namespace = match namespace { "math" => Namespace::MATHML, "svg" => Namespace::SVG, _ => { unreachable!(); } }; } if let Some(tag_name) = splited.next() { context_element_tag_name = tag_name; } } else { context_element_tag_name = context_element; } let context_element = Element { span: Default::default(), namespace: context_element_namespace, tag_name: context_element_tag_name.into(), attributes: vec![], children: vec![], content: None, }; DocumentOrDocumentFragment::DocumentFragment( parser.parse_document_fragment(context_element), ) } else { DocumentOrDocumentFragment::Document(parser.parse_document()) }; let parent_name = input.parent().unwrap().to_string_lossy(); // `scripted` for browser tests with JS // `search` proposed, but not merged in spec let need_skip_tests = parent_name.contains("scripted") || parent_name.contains("search"); if !need_skip_tests { let errors = parser.take_errors(); let errors_path = input.parent().unwrap().join(file_stem.clone() + ".errors"); let contents = fs::read_to_string(errors_path).expect("Something went wrong reading the file"); // TODO bug in tests - https://github.com/html5lib/html5lib-tests/issues/138 let actual_number_of_errors = if parent_name.contains("tests19_dat") && file_stem.contains("84") { errors.len() + 1 } else if (parent_name.contains("math_dat") || parent_name.contains("svg_dat")) && (file_stem.contains("5.fragment.tbody") || file_stem.contains("6.fragment.tbody") || file_stem.contains("7.fragment.tbody")) { errors.len() - 1 } else if parent_name.contains("foreign-fragment_dat") && file_stem.contains("3.fragment.svg_path") { errors.len() - 1 } else { errors.len() }; let expected_number_of_errors = contents.lines().count(); assert_eq!(actual_number_of_errors, expected_number_of_errors); } match document_or_document_fragment { DocumentOrDocumentFragment::Document(Ok(mut document)) => { let actual_json = serde_json::to_string_pretty(&document) .map(NormalizedOutput::from) .expect("failed to serialize document"); actual_json.compare_to_file(&json_path).unwrap(); if parent_name.contains("scripted") || parent_name.contains("search") { return Ok(()); } let mut dom_buf = String::new(); document.visit_mut_with(&mut DomVisualizer { dom_buf: &mut dom_buf, indent: 0, }); let dir = input.parent().unwrap().to_path_buf(); NormalizedOutput::from(dom_buf) .compare_to_file(&dir.join(file_stem + ".dom")) .unwrap(); Ok(()) } DocumentOrDocumentFragment::DocumentFragment(Ok(mut document_fragment)) => { let actual_json = serde_json::to_string_pretty(&document_fragment) .map(NormalizedOutput::from) .expect("failed to serialize document"); actual_json.compare_to_file(&json_path).unwrap(); if need_skip_tests { return Ok(()); } let mut dom_buf = String::new(); document_fragment.visit_mut_with(&mut DomVisualizer { dom_buf: &mut dom_buf, indent: 0, }); let dir = input.parent().unwrap().to_path_buf(); NormalizedOutput::from(dom_buf) .compare_to_file(&dir.join(file_stem + ".dom")) .unwrap(); Ok(()) } DocumentOrDocumentFragment::Document(Err(err)) | DocumentOrDocumentFragment::DocumentFragment(Err(err)) => { let mut d = err.to_diagnostics(&handler); d.note(&format!("current token = {}", parser.dump_cur())); d.emit(); panic!(); } } }) .unwrap(); }
match document { Ok(document) => { document.visit_with(&mut SpanVisualizer { handler: &handler });
host.go
package service import ( "encoding/json" "errors" "io/ioutil" "os" "strconv" "strings" "time" "github.com/360EntSecGroup-Skylar/excelize" "github.com/KubeOperator/KubeOperator/pkg/constant" "github.com/KubeOperator/KubeOperator/pkg/controller/page" "github.com/KubeOperator/KubeOperator/pkg/db" "github.com/KubeOperator/KubeOperator/pkg/dto" "github.com/KubeOperator/KubeOperator/pkg/errorf" "github.com/KubeOperator/KubeOperator/pkg/model" "github.com/KubeOperator/KubeOperator/pkg/model/common" "github.com/KubeOperator/KubeOperator/pkg/repository" "github.com/KubeOperator/KubeOperator/pkg/util/kobe" "github.com/KubeOperator/KubeOperator/pkg/util/ssh" "github.com/KubeOperator/kobe/api" uuid "github.com/satori/go.uuid" "k8s.io/apimachinery/pkg/util/wait" ) type HostService interface { Get(name string) (dto.Host, error) List(projectName string) ([]dto.Host, error) Page(num, size int) (page.Page, error) Create(creation dto.HostCreate) (dto.Host, error) Delete(name string) error Sync(name string) (dto.Host, error) Batch(op dto.HostOp) error DownloadTemplateFile() error ImportHosts(file []byte) error } type hostService struct { hostRepo repository.HostRepository credentialRepo repository.CredentialRepository } func NewHostService() HostService { return &hostService{ hostRepo: repository.NewHostRepository(), credentialRepo: repository.NewCredentialRepository(), } } func (h hostService) Get(name string) (dto.Host, error) { var hostDTO dto.Host mo, err := h.hostRepo.Get(name) if err != nil { return hostDTO, err } hostDTO = dto.Host{ Host: mo, ClusterName: mo.Cluster.Name, ZoneName: mo.Zone.Name, } return hostDTO, err } func (h hostService) List(projectName string) ([]dto.Host, error) { var hostDTOs []dto.Host mos, err := h.hostRepo.List(projectName) if err != nil { return hostDTOs, err } for _, mo := range mos { hostDTOs = append(hostDTOs, dto.Host{ Host: mo, ClusterName: mo.Cluster.Name, ZoneName: mo.Zone.Name, }) } return hostDTOs, err } func (h hostService) Page(num, size int) (page.Page, error) { var page page.Page var hostDTOs []dto.Host total, mos, err := h.hostRepo.Page(num, size) if err != nil { return page, err } for _, mo := range mos { hostDTOs = append(hostDTOs, dto.Host{ Host: mo, ClusterName: mo.Cluster.Name, ZoneName: mo.Zone.Name, }) } page.Total = total page.Items = hostDTOs return page, err } func (h hostService) Delete(name string) error { err := h.hostRepo.Delete(name) if err != nil { return err } return nil } func (h hostService) Create(creation dto.HostCreate) (dto.Host, error) { credential, err := repository.NewCredentialRepository().GetById(creation.CredentialID) if err != nil { return dto.Host{}, err } host := model.Host{ BaseModel: common.BaseModel{}, Name: creation.Name, Ip: creation.Ip, Port: creation.Port, CredentialID: creation.CredentialID, Credential: credential, Status: constant.ClusterInitializing, } err = h.hostRepo.Save(&host) if err != nil { return dto.Host{}, err } go h.RunGetHostConfig(&host) return dto.Host{Host: host}, err } func (h hostService) Sync(name string) (dto.Host, error) { host, err := h.hostRepo.Get(name) if err != nil { return dto.Host{Host: host}, err } err = h.GetHostConfig(&host) if err != nil { host.Status = constant.ClusterFailed host.Message = err.Error() _ = h.hostRepo.Save(&host) return dto.Host{Host: host}, err } host.Status = constant.ClusterRunning err = h.hostRepo.Save(&host) if err != nil { return dto.Host{Host: host}, err } return dto.Host{Host: host}, nil } func (h hostService) Batch(op dto.HostOp) error { var deleteItems []model.Host for _, item := range op.Items { deleteItems = append(deleteItems, model.Host{ BaseModel: common.BaseModel{}, ID: item.ID, Name: item.Name, }) } return h.hostRepo.Batch(op.Operation, deleteItems) } func (h hostService) GetHostGpu(host *model.Host) error { password, privateKey, err := host.GetHostPasswordAndPrivateKey() if err != nil { return err } client, err := ssh.New(&ssh.Config{ User: host.Credential.Username, Host: host.Ip, Port: host.Port, Password: password, PrivateKey: privateKey, PassPhrase: nil, DialTimeOut: 5 * time.Second, Retry: 3, }) if err != nil { host.Status = model.SshError return err } if err := client.Ping(); err != nil { host.Status = model.Disconnect return err } result, _, _, err := client.Exec("lspci|grep -i NVIDIA") if err != nil { host.HasGpu = false host.GpuNum = 0 } host.GpuNum = strings.Count(result, "NVIDIA") if host.GpuNum > 0 { host.HasGpu = true s := strings.Index(result, "[") t := strings.Index(result, "]") host.GpuInfo = result[s+1 : t] } _ = h.hostRepo.Save(host) return err } func (h hostService) GetHostMem(host *model.Host) error { password, privateKey, err := host.GetHostPasswordAndPrivateKey() if err != nil { return err } client, err := ssh.New(&ssh.Config{ User: host.Credential.Username, Host: host.Ip, Port: host.Port, Password: password, PrivateKey: privateKey, PassPhrase: nil, DialTimeOut: 5 * time.Second, Retry: 3, }) if err != nil { host.Status = model.SshError return err } if err := client.Ping(); err != nil { host.Status = model.Disconnect return err } result, _, _, err := client.Exec("dmidecode -t 17 | grep \"Size.*MB\" | awk '{s+=$2} END {print s}'") if err != nil { return err } host.Memory, _ = strconv.Atoi(strings.Trim(result, "\n")) return err } func (h hostService) RunGetHostConfig(host *model.Host) { host.Status = constant.ClusterInitializing _ = h.hostRepo.Save(host) err := h.GetHostConfig(host) if err != nil { host.Status = constant.ClusterFailed host.Message = err.Error() _ = h.hostRepo.Save(host) return } host.Status = constant.ClusterRunning _ = h.hostRepo.Save(host) } func (h hostService) GetHostConfig(host *model.Host) error { defer func() { if err := recover(); err != nil { log.Error("gather fact error!") } }() password, privateKey, err := host.GetHostPasswordAndPrivateKey() if err != nil { return err } ansible := kobe.NewAnsible(&kobe.Config{ Inventory: &api.Inventory{ Hosts: []*api.Host{ { Ip: host.Ip, Name: host.Name, Port: int32(host.Port), User: host.Credential.Username, Password: password, PrivateKey: string(privateKey), Vars: map[string]string{}, }, }, Groups: []*api.Group{ { Name: "master", Children: []string{}, Vars: map[string]string{}, Hosts: []string{host.Name}, }, }, }, }) resultId, err := ansible.RunAdhoc("master", "setup", "") if err != nil { return err } var result kobe.Result err = wait.Poll(5*time.Second, 5*time.Minute, func() (done bool, err error) { res, err := ansible.GetResult(resultId) if err != nil { return true, err } if res.Finished { if res.Success { result, err = kobe.ParseResult(res.Content) if err != nil { return true, err } } else { if res.Content != "" { result, err = kobe.ParseResult(res.Content) if err != nil { return true, err } result.GatherFailedInfo() if result.HostFailedInfo != nil && len(result.HostFailedInfo) > 0 { by, _ := json.Marshal(&result.HostFailedInfo) return true, errors.New(string(by)) } } } return true, nil } return false, nil }) if err != nil { return err } var facts interface{} if len(result.Plays) > 0 && len(result.Plays[0].Tasks) > 0 { facts = result.Plays[0].Tasks[0].Hosts[host.Name]["ansible_facts"] } else { return errors.New("no result return") } if facts == nil { return err } else { result, ok := facts.(map[string]interface{})
host.Os = result["ansible_distribution"].(string) host.OsVersion = result["ansible_distribution_version"].(string) if result["ansible_processor_vcpus"] != nil { host.CpuCore = int(result["ansible_processor_vcpus"].(float64)) } devices := result["ansible_devices"].(map[string]interface{}) var volumes []model.Volume for i := range devices { device := devices[i].(map[string]interface{}) if "Virtual disk" == device["model"] { v := model.Volume{ ID: uuid.NewV4().String(), Name: "/dev/" + i, Size: device["size"].(string), HostID: host.ID, } volumes = append(volumes, v) } } host.Volumes = volumes } err = h.GetHostMem(host) if err != nil { return err } err = h.GetHostGpu(host) if err != nil { host.GpuNum = 0 host.GpuInfo = "" host.HasGpu = false return nil } return nil } func (h hostService) DownloadTemplateFile() error { f := excelize.NewFile() f.SetCellValue("Sheet1", "A1", "name") f.SetCellValue("Sheet1", "B1", "ip") f.SetCellValue("Sheet1", "C1", "port") f.SetCellValue("Sheet1", "D1", "credential (系统设置-凭据中的名称)") file, err := os.Create("./demo.xlsx") if err != nil { return err } defer file.Close() _, err = f.WriteTo(file) if err != nil { return err } return nil } func (h hostService) ImportHosts(file []byte) error { f, err := os.Create("./import.xlsx") if err != nil { return err } defer f.Close() err = ioutil.WriteFile("./import.xlsx", file, 0775) if err != nil { return err } xlsx, err := excelize.OpenFile("./import.xlsx") if err != nil { return err } rows := xlsx.GetRows("Sheet1") if len(rows) == 0 { return errors.New("HOST_IMPORT_ERROR_NULL") } var hosts []model.Host //var errMsg string var failedNum int var errs errorf.CErrFs for index, row := range rows { if index == 0 { continue } if row[0] == "" || row[1] == "" || row[2] == "" || row[3] == "" { errs = errs.Add(errorf.New("HOST_IMPORT_NULL_VALUE", strconv.Itoa(index))) failedNum++ continue } port, err := strconv.Atoi(row[2]) if err != nil { errs = errs.Add(errorf.New("HOST_IMPORT_WRONG_FORMAT", strconv.Itoa(index))) failedNum++ continue } credential, err := h.credentialRepo.Get(row[3]) if err != nil { errs = errs.Add(errorf.New("HOST_IMPORT_CREDENTIAL_NOT_FOUND", strconv.Itoa(index))) failedNum++ continue } host := model.Host{ Name: strings.Trim(row[0], " "), Ip: strings.Trim(row[1], " "), Port: port, CredentialID: credential.ID, Status: constant.ClusterInitializing, Credential: credential, } hosts = append(hosts, host) } if len(errs) > 0 { errs = errs.Add(errorf.New("HOST_IMPORT_FAILED_NUM", strconv.Itoa(failedNum))) } for _, host := range hosts { err = h.hostRepo.Save(&host) if err != nil { errs = errs.Add(errorf.New("HOST_IMPORT_FAILED_SAVE", host.Name, err.Error())) continue } go h.RunGetHostConfig(&host) var ip model.Ip db.DB.Where(&model.Ip{Address: host.Ip}).First(&ip) if ip.ID != "" { ip.Status = constant.IpUsed db.DB.Save(&ip) } } if len(errs) > 0 { return errs } else { return nil } }
if !ok { return err }
desulfotomaculumcopahuensis.py
""" This file offers the methods to automatically retrieve the graph Desulfotomaculum copahuensis. The graph is automatically retrieved from the STRING repository. References --------------------- Please cite the following if you use the data: ```bib @article{szklarczyk2019string, title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets}, author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others}, journal={Nucleic acids research}, volume={47}, number={D1}, pages={D607--D613}, year={2019}, publisher={Oxford University Press} } ``` """ from typing import Dict from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph from ...ensmallen import Graph # pylint: disable=import-error def
( directed: bool = False, preprocess: bool = True, load_nodes: bool = True, verbose: int = 2, cache: bool = True, cache_path: str = "graphs/string", version: str = "links.v11.5", **additional_graph_kwargs: Dict ) -> Graph: """Return new instance of the Desulfotomaculum copahuensis graph. The graph is automatically retrieved from the STRING repository. Parameters ------------------- directed: bool = False Wether to load the graph as directed or undirected. By default false. preprocess: bool = True Whether to preprocess the graph to be loaded in optimal time and memory. load_nodes: bool = True, Whether to load the nodes vocabulary or treat the nodes simply as a numeric range. verbose: int = 2, Wether to show loading bars during the retrieval and building of the graph. cache: bool = True Whether to use cache, i.e. download files only once and preprocess them only once. cache_path: str = "graphs" Where to store the downloaded graphs. version: str = "links.v11.5" The version of the graph to retrieve. The available versions are: - homology.v11.5 - physical.links.v11.5 - links.v11.5 additional_graph_kwargs: Dict Additional graph kwargs. Returns ----------------------- Instace of Desulfotomaculum copahuensis graph. References --------------------- Please cite the following if you use the data: ```bib @article{szklarczyk2019string, title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets}, author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others}, journal={Nucleic acids research}, volume={47}, number={D1}, pages={D607--D613}, year={2019}, publisher={Oxford University Press} } ``` """ return AutomaticallyRetrievedGraph( graph_name="DesulfotomaculumCopahuensis", repository="string", version=version, directed=directed, preprocess=preprocess, load_nodes=load_nodes, verbose=verbose, cache=cache, cache_path=cache_path, additional_graph_kwargs=additional_graph_kwargs )()
DesulfotomaculumCopahuensis
error.rs
use crate::ffi; use libc::c_int; use std::error; use std::error::Error as StdError; use std::fmt; use std::io; use crate::error::ErrorStack; use crate::ssl::MidHandshakeSslStream; use crate::x509::X509VerifyResult; /// An error code returned from SSL functions. #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct
(c_int); impl ErrorCode { /// The SSL session has been closed. pub const ZERO_RETURN: ErrorCode = ErrorCode(ffi::SSL_ERROR_ZERO_RETURN); /// An attempt to read data from the underlying socket returned `WouldBlock`. /// /// Wait for read readiness and retry the operation. pub const WANT_READ: ErrorCode = ErrorCode(ffi::SSL_ERROR_WANT_READ); /// An attempt to write data to the underlying socket returned `WouldBlock`. /// /// Wait for write readiness and retry the operation. pub const WANT_WRITE: ErrorCode = ErrorCode(ffi::SSL_ERROR_WANT_WRITE); /// A non-recoverable IO error occurred. pub const SYSCALL: ErrorCode = ErrorCode(ffi::SSL_ERROR_SYSCALL); /// An error occurred in the SSL library. pub const SSL: ErrorCode = ErrorCode(ffi::SSL_ERROR_SSL); pub fn from_raw(raw: c_int) -> ErrorCode { ErrorCode(raw) } #[allow(clippy::trivially_copy_pass_by_ref)] pub fn as_raw(&self) -> c_int { self.0 } } #[derive(Debug)] pub(crate) enum InnerError { Io(io::Error), Ssl(ErrorStack), } /// An SSL error. #[derive(Debug)] pub struct Error { pub(crate) code: ErrorCode, pub(crate) cause: Option<InnerError>, } impl Error { pub fn code(&self) -> ErrorCode { self.code } pub fn io_error(&self) -> Option<&io::Error> { match self.cause { Some(InnerError::Io(ref e)) => Some(e), _ => None, } } pub fn into_io_error(self) -> Result<io::Error, Error> { match self.cause { Some(InnerError::Io(e)) => Ok(e), _ => Err(self), } } pub fn ssl_error(&self) -> Option<&ErrorStack> { match self.cause { Some(InnerError::Ssl(ref e)) => Some(e), _ => None, } } } impl From<ErrorStack> for Error { fn from(e: ErrorStack) -> Error { Error { code: ErrorCode::SSL, cause: Some(InnerError::Ssl(e)), } } } impl fmt::Display for Error { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match self.code { ErrorCode::ZERO_RETURN => fmt.write_str("the SSL session has been shut down"), ErrorCode::WANT_READ => match self.io_error() { Some(_) => fmt.write_str("a nonblocking read call would have blocked"), None => fmt.write_str("the operation should be retried"), }, ErrorCode::WANT_WRITE => match self.io_error() { Some(_) => fmt.write_str("a nonblocking write call would have blocked"), None => fmt.write_str("the operation should be retried"), }, ErrorCode::SYSCALL => match self.io_error() { Some(err) => write!(fmt, "{}", err), None => fmt.write_str("unexpected EOF"), }, ErrorCode::SSL => match self.ssl_error() { Some(e) => write!(fmt, "{}", e), None => fmt.write_str("unknown BoringSSL error"), }, ErrorCode(code) => write!(fmt, "unknown error code {}", code), } } } impl error::Error for Error { fn source(&self) -> Option<&(dyn error::Error + 'static)> { match self.cause { Some(InnerError::Io(ref e)) => Some(e), Some(InnerError::Ssl(ref e)) => Some(e), None => None, } } } /// An error or intermediate state after a TLS handshake attempt. // FIXME overhaul #[derive(Debug)] pub enum HandshakeError<S> { /// Setup failed. SetupFailure(ErrorStack), /// The handshake failed. Failure(MidHandshakeSslStream<S>), /// The handshake encountered a `WouldBlock` error midway through. /// /// This error will never be returned for blocking streams. WouldBlock(MidHandshakeSslStream<S>), } impl<S: fmt::Debug> StdError for HandshakeError<S> { fn source(&self) -> Option<&(dyn StdError + 'static)> { match *self { HandshakeError::SetupFailure(ref e) => Some(e), HandshakeError::Failure(ref s) | HandshakeError::WouldBlock(ref s) => Some(s.error()), } } } impl<S> fmt::Display for HandshakeError<S> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { HandshakeError::SetupFailure(ref e) => { write!(f, "TLS stream setup failed {}", e) } HandshakeError::Failure(ref s) => fmt_mid_handshake_error(s, f, "TLS handshake failed"), HandshakeError::WouldBlock(ref s) => { fmt_mid_handshake_error(s, f, "TLS handshake interrupted") } } } } fn fmt_mid_handshake_error( s: &MidHandshakeSslStream<impl Sized>, f: &mut fmt::Formatter, prefix: &str, ) -> fmt::Result { match s.ssl().verify_result() { X509VerifyResult::OK => write!(f, "{}", prefix)?, verify => write!(f, "{}: cert verification failed - {}", prefix, verify)?, } write!(f, " {}", s.error()) } impl<S> From<ErrorStack> for HandshakeError<S> { fn from(e: ErrorStack) -> HandshakeError<S> { HandshakeError::SetupFailure(e) } }
ErrorCode
intcode.rs
use anyhow::{anyhow, Result}; use std::collections::HashMap; use std::convert::TryInto; use std::fs::read_to_string; use std::path::Path; #[derive(Debug)] pub enum Mode { Pos, Immediate, Relative, } #[derive(Debug, PartialEq, Eq)] pub enum State { Input(PausedInterpreterInput), Output(PausedInterpreterOutput), Halt(HashMap<usize, isize>), } #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct Opcode { inner: usize, } #[derive(Clone, Debug, PartialEq, Eq)] pub struct Interpreter { memory: HashMap<usize, isize>, pc: usize, rel_base: isize, } #[derive(Debug, PartialEq, Eq)] pub struct PausedInterpreterInput { inner: Interpreter, pos: usize, } #[derive(Debug, PartialEq, Eq)] pub struct PausedInterpreterOutput { inner: Interpreter, value: isize, } impl Opcode { pub fn new(inner: usize) -> Self { Self { inner } } pub fn code(&self) -> usize { self.inner % 100 } pub fn param_mode(&self, i: u32) -> Result<Mode> { match (self.inner / 10usize.pow(i + 2)) % 10 { 0 => Ok(Mode::Pos), 1 => Ok(Mode::Immediate), 2 => Ok(Mode::Relative), mode => Err(anyhow!( "Invalid parameter mode {} for parameter {}", mode, i )), } } } impl Interpreter { pub fn new(memory: HashMap<usize, isize>) -> Self { Self { memory, pc: 0, rel_base: 0, } } pub fn from_path<P: AsRef<Path>>(path: P) -> Result<Self> { let str_input = read_to_string(path)?; let program = str_input .trim_end() .split(",") .enumerate() .map(|(i, x)| -> Result<(usize, isize)> { Ok((i, x.parse()?)) }) .collect::<Result<HashMap<usize, isize>, _>>()?; if program.len() == 0 { return Err(anyhow!( "Program is too short (expected at least 1 element, got 0)" )); } Ok(Self::new(program)) } pub fn from_iter<T: IntoIterator<Item = isize>>(iter: T) -> Self { Self::new(iter.into_iter().enumerate().collect()) } pub fn get(&self, i: usize) -> isize { self.memory.get(&i).unwrap_or(&0).clone() } pub fn put(&mut self, pos: usize, value: isize) { *self.memory.entry(pos).or_insert(0) = value; } pub fn read_opcode(&mut self) -> Result<Opcode> { Ok(Opcode::new( self.read_input_param(Mode::Immediate)?.try_into()?, )) } pub fn read_input_param(&mut self, mode: Mode) -> Result<isize>
pub fn read_output_param(&mut self, mode: Mode) -> Result<usize> { let value = match mode { Mode::Pos => self.get(self.pc), Mode::Immediate => { return Err(anyhow!("Output parameter must not be in immediate mode")) } Mode::Relative => self.rel_base + self.get(self.pc), }; self.pc += 1; Ok(value.try_into()?) } fn read_binop_params(&mut self, op: Opcode) -> Result<(isize, isize, usize)> { Ok(( self.read_input_param(op.param_mode(0)?)?, self.read_input_param(op.param_mode(1)?)?, self.read_output_param(op.param_mode(2)?)?, )) } pub fn add(&mut self, op: Opcode) -> Result<()> { let (a, b, output) = self.read_binop_params(op)?; self.put(output, a + b); Ok(()) } pub fn multiply(&mut self, op: Opcode) -> Result<()> { let (a, b, output) = self.read_binop_params(op)?; self.put(output, a * b); Ok(()) } pub fn jump(&mut self, op: Opcode) -> Result<()> { let cmp = self.read_input_param(op.param_mode(0)?)?; let jmp_target = self.read_input_param(op.param_mode(1)?)?; if (cmp != 0) == (op.code() == 5) { self.pc = jmp_target.try_into()?; } Ok(()) } pub fn less_than(&mut self, op: Opcode) -> Result<()> { let (a, b, output) = self.read_binop_params(op)?; if a < b { self.put(output, 1); } else { self.put(output, 0); } Ok(()) } pub fn equal(&mut self, op: Opcode) -> Result<()> { let (a, b, output) = self.read_binop_params(op)?; if a == b { self.put(output, 1); } else { self.put(output, 0); } Ok(()) } pub fn set_rel_base(&mut self, op: Opcode) -> Result<()> { self.rel_base += self.read_input_param(op.param_mode(0)?)?; Ok(()) } pub fn run(mut self) -> Result<State> { loop { let op = self.read_opcode()?; match op.code() { 1 => self.add(op)?, 2 => self.multiply(op)?, 3 => { let pos = self.read_output_param(op.param_mode(0)?)?; return Ok(State::Input(PausedInterpreterInput { inner: self, pos })); } 4 => { let value = self.read_input_param(op.param_mode(0)?)?; return Ok(State::Output(PausedInterpreterOutput { inner: self, value, })); } 5 | 6 => self.jump(op)?, 7 => self.less_than(op)?, 8 => self.equal(op)?, 9 => self.set_rel_base(op)?, 99 => return Ok(State::Halt(self.memory)), op => return Err(anyhow!("Got invalid opcode {}", op)), } } } } impl PausedInterpreterInput { pub fn resume(mut self, value: isize) -> Result<State> { self.inner.put(self.pos, value); self.inner.run() } } impl PausedInterpreterOutput { pub fn get(&self) -> isize { self.value } pub fn resume(self) -> Result<State> { self.inner.run() } } #[cfg(test)] mod tests { use super::*; fn run(mem: Vec<isize>) -> Result<State> { Interpreter::from_iter(mem).run() } fn halt(mem: Vec<isize>) -> State { State::Halt(mem.into_iter().enumerate().collect()) } #[test] fn test_add() -> Result<()> { assert_eq!(run(vec![1, 0, 0, 0, 99])?, halt(vec![2, 0, 0, 0, 99])); assert_eq!( run(vec![1, 1, 1, 4, 99, 5, 6, 0, 99])?, halt(vec![30, 1, 1, 4, 2, 5, 6, 0, 99]), ); Ok(()) } #[test] fn test_mul() -> Result<()> { assert_eq!(run(vec![2, 3, 0, 3, 99])?, halt(vec![2, 3, 0, 6, 99])); assert_eq!( run(vec![2, 4, 4, 5, 99, 0])?, halt(vec![2, 4, 4, 5, 99, 9801]), ); Ok(()) } }
{ let value = match mode { Mode::Pos => self.get(self.get(self.pc).try_into()?), Mode::Immediate => self.get(self.pc), Mode::Relative => self.get((self.rel_base + self.get(self.pc)).try_into()?), }; self.pc += 1; Ok(value) }
normalDate.py
#!/usr/bin/env python # normalDate.py - version 1.0 - 20000717 #hacked by Robin Becker 10/Apr/2001 #major changes include # using Types instead of type(0) etc # BusinessDate class # __radd__, __rsub__ methods # formatMS stuff # derived from an original version created # by Jeff Bauer of Rubicon Research and used # with his kind permission __version__=''' $Id: normalDate.py 3342 2008-12-12 15:55:34Z andy $ ''' __doc__="Jeff Bauer's lightweight date class, extended by us. Predates Python's datetime module." _bigBangScalar = -4345732 # based on (-9999, 1, 1) BC/BCE minimum _bigCrunchScalar = 2958463 # based on (9999,12,31) AD/CE maximum _daysInMonthNormal = [31,28,31,30,31,30,31,31,30,31,30,31] _daysInMonthLeapYear = [31,29,31,30,31,30,31,31,30,31,30,31] _dayOfWeekName = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] _monthName = ['January', 'February', 'March', 'April', 'May', 'June', 'July','August','September','October','November','December'] from types import IntType, StringType, ListType, TupleType import string, re, time, datetime if hasattr(time,'struct_time'): _DateSeqTypes = (ListType,TupleType,time.struct_time) else: _DateSeqTypes = (ListType,TupleType) _fmtPat = re.compile('\\{(m{1,5}|yyyy|yy|d{1,4})\\}',re.MULTILINE|re.IGNORECASE) _iso_re = re.compile(r'(\d\d\d\d|\d\d)-(\d\d)-(\d\d)') def getStdMonthNames(): return map(string.lower,_monthName) def getStdShortMonthNames(): return map(lambda x: x[:3],getStdMonthNames()) def getStdDayNames(): return map(string.lower,_dayOfWeekName) def getStdShortDayNames(): return map(lambda x: x[:3],getStdDayNames()) def isLeapYear(year): """determine if specified year is leap year, returns Python boolean""" if year < 1600: if year % 4: return 0 else: return 1 elif year % 4 != 0: return 0 elif year % 100 != 0: return 1 elif year % 400 != 0: return 0 else: return 1 class NormalDateException(Exception): """Exception class for NormalDate""" pass class NormalDate: """ NormalDate is a specialized class to handle dates without all the excess baggage (time zones, daylight savings, leap seconds, etc.) of other date structures. The minimalist strategy greatly simplifies its implementation and use. Internally, NormalDate is stored as an integer with values in a discontinuous range of -99990101 to 99991231. The integer value is used principally for storage and to simplify the user interface. Internal calculations are performed by a scalar based on Jan 1, 1900. Valid NormalDate ranges include (-9999,1,1) B.C.E. through (9999,12,31) C.E./A.D. 1.0 No changes, except the version number. After 3 years of use by various parties I think we can consider it stable. 0.8 Added Prof. Stephen Walton's suggestion for a range method - module author resisted the temptation to use lambda <0.5 wink> 0.7 Added Dan Winkler's suggestions for __add__, __sub__ methods 0.6 Modifications suggested by Kevin Digweed to fix: - dayOfWeek, dayOfWeekAbbrev, clone methods - Permit NormalDate to be a better behaved superclass 0.5 Minor tweaking 0.4 - Added methods __cmp__, __hash__ - Added Epoch variable, scoped to the module - Added setDay, setMonth, setYear methods 0.3 Minor touch-ups 0.2 - Fixed bug for certain B.C.E leap years - Added Jim Fulton's suggestions for short alias class name =ND and __getstate__, __setstate__ methods Special thanks: Roedy Green """ def __init__(self, normalDate=None): """ Accept 1 of 4 values to initialize a NormalDate: 1. None - creates a NormalDate for the current day 2. integer in yyyymmdd format 3. string in yyyymmdd format 4. tuple in (yyyy, mm, dd) - localtime/gmtime can also be used """ if normalDate is None: self.setNormalDate(time.localtime(time.time())) else: self.setNormalDate(normalDate) def add(self, days): """add days to date; use negative integers to subtract""" if not type(days) is IntType: raise NormalDateException( \ 'add method parameter must be integer type') self.normalize(self.scalar() + days) def __add__(self, days): """add integer to normalDate and return a new, calculated value""" if not type(days) is IntType: raise NormalDateException( \ '__add__ parameter must be integer type') cloned = self.clone() cloned.add(days) return cloned def __radd__(self,days): '''for completeness''' return self.__add__(days) def clone(self): """return a cloned instance of this normalDate""" return self.__class__(self.normalDate) def __cmp__(self, target): if target is None: return 1 elif not hasattr(target, 'normalDate'): return 1 else: return cmp(self.normalDate, target.normalDate) def day(self): """return the day as integer 1-31""" return int(repr(self.normalDate)[-2:]) def dayOfWeek(self): """return integer representing day of week, Mon=0, Tue=1, etc.""" return apply(dayOfWeek, self.toTuple()) def dayOfWeekAbbrev(self): """return day of week abbreviation for current date: Mon, Tue, etc.""" return _dayOfWeekName[self.dayOfWeek()][:3] def dayOfWeekName(self): """return day of week name for current date: Monday, Tuesday, etc.""" return _dayOfWeekName[self.dayOfWeek()] def dayOfYear(self): """day of year""" if self.isLeapYear(): daysByMonth = _daysInMonthLeapYear else: daysByMonth = _daysInMonthNormal priorMonthDays = 0 for m in xrange(self.month() - 1): priorMonthDays = priorMonthDays + daysByMonth[m] return self.day() + priorMonthDays def daysBetweenDates(self, normalDate): """ return value may be negative, since calculation is self.scalar() - arg """ if type(normalDate) is _NDType: return self.scalar() - normalDate.scalar() else: return self.scalar() - NormalDate(normalDate).scalar() def equals(self, target): if type(target) is _NDType: if target is None: return self.normalDate is None else: return self.normalDate == target.normalDate else: return 0 def endOfMonth(self): """returns (cloned) last day of month""" return self.__class__(self.__repr__()[-8:-2]+str(self.lastDayOfMonth())) def firstDayOfMonth(self): """returns (cloned) first day of month""" return self.__class__(self.__repr__()[-8:-2]+"01") def formatUS(self): """return date as string in common US format: MM/DD/YY""" d = self.__repr__() return "%s/%s/%s" % (d[-4:-2], d[-2:], d[-6:-4]) def formatUSCentury(self): """return date as string in 4-digit year US format: MM/DD/YYYY""" d = self.__repr__() return "%s/%s/%s" % (d[-4:-2], d[-2:], d[-8:-4]) def _fmtM(self): return str(self.month()) def _fmtMM(self): return '%02d' % self.month() def _fmtMMM(self): return self.monthAbbrev() def _fmtMMMM(self): return self.monthName() def _fmtMMMMM(self): return self.monthName()[0] def _fmtD(self): return str(self.day()) def _fmtDD(self): return '%02d' % self.day() def _fmtDDD(self): return self.dayOfWeekAbbrev() def _fmtDDDD(self): return self.dayOfWeekName() def _fmtYY(self): return '%02d' % (self.year()%100) def _fmtYYYY(self): return str(self.year()) def formatMS(self,fmt): '''format like MS date using the notation {YY} --> 2 digit year {YYYY} --> 4 digit year {M} --> month as digit {MM} --> 2 digit month {MMM} --> abbreviated month name {MMMM} --> monthname {MMMMM} --> first character of monthname {D} --> day of month as digit {DD} --> 2 digit day of month {DDD} --> abrreviated weekday name {DDDD} --> weekday name ''' r = fmt[:] f = 0 while 1: m = _fmtPat.search(r,f) if m: y = getattr(self,'_fmt'+string.upper(m.group()[1:-1]))() i, j = m.span() r = (r[0:i] + y) + r[j:] f = i + len(y) else: return r def __getstate__(self): """minimize persistent storage requirements""" return self.normalDate def __hash__(self): return hash(self.normalDate) def __int__(self): return self.normalDate def isLeapYear(self): """ determine if specified year is leap year, returning true (1) or false (0) """ return isLeapYear(self.year()) def _isValidNormalDate(self, normalDate): """checks for date validity in [-]yyyymmdd format""" if type(normalDate) is not IntType: return 0 if len(repr(normalDate)) > 9: return 0 if normalDate < 0: dateStr = "%09d" % normalDate else: dateStr = "%08d" % normalDate if len(dateStr) < 8: return 0 elif len(dateStr) == 9: if (dateStr[0] != '-' and dateStr[0] != '+'): return 0 year = int(dateStr[:-4]) if year < -9999 or year > 9999 or year == 0: return 0 # note: zero (0) is not a valid year month = int(dateStr[-4:-2]) if month < 1 or month > 12: return 0 if isLeapYear(year): maxDay = _daysInMonthLeapYear[month - 1] else: maxDay = _daysInMonthNormal[month - 1] day = int(dateStr[-2:]) if day < 1 or day > maxDay: return 0 if year == 1582 and month == 10 and day > 4 and day < 15: return 0 # special case of 10 days dropped: Oct 5-14, 1582 return 1 def lastDayOfMonth(self): """returns last day of the month as integer 28-31""" if self.isLeapYear(): return _daysInMonthLeapYear[self.month() - 1] else: return _daysInMonthNormal[self.month() - 1] def localeFormat(self): """override this method to use your preferred locale format""" return self.formatUS() def month(self): """returns month as integer 1-12""" return int(repr(self.normalDate)[-4:-2]) def monthAbbrev(self): """returns month as a 3-character abbreviation, i.e. Jan, Feb, etc.""" return _monthName[self.month() - 1][:3] def monthName(self): """returns month name, i.e. January, February, etc.""" return _monthName[self.month() - 1] def normalize(self, scalar): """convert scalar to normalDate""" if scalar < _bigBangScalar: msg = "normalize(%d): scalar below minimum" % \ _bigBangScalar raise NormalDateException(msg) if scalar > _bigCrunchScalar: msg = "normalize(%d): scalar exceeds maximum" % \ _bigCrunchScalar raise NormalDateException(msg) from math import floor if scalar >= -115860: year = 1600 + int(floor((scalar + 109573) / 365.2425)) elif scalar >= -693597: year = 4 + int(floor((scalar + 692502) / 365.2425)) else: year = -4 + int(floor((scalar + 695058) / 365.2425)) days = scalar - firstDayOfYear(year) + 1 if days <= 0: year = year - 1 days = scalar - firstDayOfYear(year) + 1 daysInYear = 365 if isLeapYear(year): daysInYear = daysInYear + 1 if days > daysInYear: year = year + 1 days = scalar - firstDayOfYear(year) + 1 # add 10 days if between Oct 15, 1582 and Dec 31, 1582 if (scalar >= -115860 and scalar <= -115783): days = days + 10 if isLeapYear(year): daysByMonth = _daysInMonthLeapYear else: daysByMonth = _daysInMonthNormal dc = 0; month = 12 for m in xrange(len(daysByMonth)): dc = dc + daysByMonth[m] if dc >= days: month = m + 1 break # add up the days in prior months priorMonthDays = 0 for m in xrange(month - 1): priorMonthDays = priorMonthDays + daysByMonth[m] day = days - priorMonthDays self.setNormalDate((year, month, day)) def range(self, days): """Return a range of normalDates as a list. Parameter may be an int or normalDate.""" if type(days) is not IntType: days = days - self # if not int, assume arg is normalDate type r = [] for i in range(days): r.append(self + i) return r def __repr__(self): """print format: [-]yyyymmdd""" # Note: When disassembling a NormalDate string, be sure to # count from the right, i.e. epochMonth = int(`Epoch`[-4:-2]), # or the slice won't work for dates B.C. if self.normalDate < 0: return "%09d" % self.normalDate else: return "%08d" % self.normalDate def scalar(self): """days since baseline date: Jan 1, 1900""" (year, month, day) = self.toTuple() days = firstDayOfYear(year) + day - 1 if self.isLeapYear(): for m in xrange(month - 1): days = days + _daysInMonthLeapYear[m] else: for m in xrange(month - 1): days = days + _daysInMonthNormal[m] if year == 1582: if month > 10 or (month == 10 and day > 4): days = days - 10 return days def setDay(self, day): """set the day of the month""" maxDay = self.lastDayOfMonth() if day < 1 or day > maxDay: msg = "day is outside of range 1 to %d" % maxDay raise NormalDateException(msg) (y, m, d) = self.toTuple() self.setNormalDate((y, m, day)) def setMonth(self, month): """set the month [1-12]""" if month < 1 or month > 12: raise NormalDateException('month is outside range 1 to 12') (y, m, d) = self.toTuple() self.setNormalDate((y, month, d)) def setNormalDate(self, normalDate): """ accepts date as scalar string/integer (yyyymmdd) or tuple (year, month, day, ...)""" tn=type(normalDate) if tn is IntType: self.normalDate = normalDate elif tn is StringType: try: self.normalDate = int(normalDate) except: m = _iso_re.match(normalDate) if m: self.setNormalDate(m.group(1)+m.group(2)+m.group(3)) else: raise NormalDateException("unable to setNormalDate(%s)" % `normalDate`) elif tn in _DateSeqTypes: self.normalDate = int("%04d%02d%02d" % normalDate[:3]) elif tn is _NDType: self.normalDate = normalDate.normalDate elif isinstance(normalDate,(datetime.datetime,datetime.date)): self.normalDate = (normalDate.year*100+normalDate.month)*100+normalDate.day if not self._isValidNormalDate(self.normalDate): raise NormalDateException("unable to setNormalDate(%s)" % `normalDate`) def setYear(self, year): if year == 0: raise NormalDateException('cannot set year to zero') elif year < -9999: raise NormalDateException('year cannot be less than -9999') elif year > 9999: raise NormalDateException('year cannot be greater than 9999') (y, m, d) = self.toTuple() self.setNormalDate((year, m, d)) __setstate__ = setNormalDate def __sub__(self, v): if type(v) is IntType: return self.__add__(-v) return self.scalar() - v.scalar() def __rsub__(self,v): if type(v) is IntType: return NormalDate(v) - self else: return v.scalar() - self.scalar() def toTuple(self): """return date as (year, month, day) tuple""" return (self.year(), self.month(), self.day()) def year(self): """return year in yyyy format, negative values indicate B.C.""" return int(repr(self.normalDate)[:-4]) ################# Utility functions ################# def bigBang(): """return lower boundary as a NormalDate""" return NormalDate((-9999, 1, 1)) def bigCrunch(): """return upper boundary as a NormalDate""" return NormalDate((9999, 12, 31)) def dayOfWeek(y, m, d): """return integer representing day of week, Mon=0, Tue=1, etc.""" if m == 1 or m == 2: m = m + 12 y = y - 1 return (d + 2*m + 3*(m+1)/5 + y + y/4 - y/100 + y/400) % 7 def firstDayOfYear(year): """number of days to the first of the year, relative to Jan 1, 1900""" if type(year) is not IntType: msg = "firstDayOfYear() expected integer, got %s" % type(year) raise NormalDateException(msg) if year == 0:
elif year < 0: # BCE calculation firstDay = (year * 365) + int((year - 1) / 4) - 693596 else: # CE calculation leapAdjust = int((year + 3) / 4) if year > 1600: leapAdjust = leapAdjust - int((year + 99 - 1600) / 100) + \ int((year + 399 - 1600) / 400) firstDay = year * 365 + leapAdjust - 693963 if year > 1582: firstDay = firstDay - 10 return firstDay def FND(d): '''convert to ND if required''' return (type(d) is _NDType) and d or ND(d) Epoch=bigBang() ND=NormalDate _NDType = type(Epoch) BDEpoch=ND(15821018) BDEpochScalar = -115857 class BusinessDate(NormalDate): """ Specialised NormalDate """ def add(self, days): """add days to date; use negative integers to subtract""" if not type(days) is IntType: raise NormalDateException('add method parameter must be integer type') self.normalize(self.scalar() + days) def __add__(self, days): """add integer to BusinessDate and return a new, calculated value""" if not type(days) is IntType: raise NormalDateException('__add__ parameter must be integer type') cloned = self.clone() cloned.add(days) return cloned def __sub__(self, v): return type(v) is IntType and self.__add__(-v) or self.scalar() - v.scalar() def asNormalDate(self): return ND(self.normalDate) def daysBetweenDates(self, normalDate): return self.asNormalDate.daysBetweenDates(normalDate) def _checkDOW(self): if self.dayOfWeek()>4: raise NormalDateException("%s isn't a business day" % `self.normalDate`) def normalize(self, i): i = int(i) NormalDate.normalize(self,(i/5)*7+i%5+BDEpochScalar) def scalar(self): d = self.asNormalDate() i = d - BDEpoch #luckily BDEpoch is a Monday so we don't have a problem #concerning the relative weekday return 5*(i/7) + i%7 def setNormalDate(self, normalDate): NormalDate.setNormalDate(self,normalDate) self._checkDOW() if __name__ == '__main__': today = NormalDate() print "NormalDate test:" print " Today (%s) is: %s %s" % (today, today.dayOfWeekAbbrev(), today.localeFormat()) yesterday = today - 1 print " Yesterday was: %s %s" % (yesterday.dayOfWeekAbbrev(), yesterday.localeFormat()) tomorrow = today + 1 print " Tomorrow will be: %s %s" % (tomorrow.dayOfWeekAbbrev(), tomorrow.localeFormat()) print " Days between tomorrow and yesterday: %d" % (tomorrow - yesterday) print today.formatMS('{d}/{m}/{yy}') print today.formatMS('{dd}/{m}/{yy}') print today.formatMS('{ddd} {d}/{m}/{yy}') print today.formatMS('{dddd} {d}/{m}/{yy}') print today.formatMS('{d}/{mm}/{yy}') print today.formatMS('{d}/{mmm}/{yy}') print today.formatMS('{d}/{mmmm}/{yy}') print today.formatMS('{d}/{m}/{yyyy}') b = BusinessDate('20010116') print 'b=',b,'b.scalar()', b.scalar()
raise NormalDateException('first day of year cannot be zero (0)')
test_a2c.py
import shutil from genrl.agents import A2C from genrl.environments import VectorEnv from genrl.trainers import OnPolicyTrainer def test_a2c(): env = VectorEnv("CartPole-v0", 1) algo = A2C("mlp", env, rollout_size=128) trainer = OnPolicyTrainer(algo, env, log_mode=["csv"], logdir="./logs", epochs=1) trainer.train() shutil.rmtree("./logs") def test_a2c_cnn(): env = VectorEnv("Pong-v0", 1, env_type="atari") algo = A2C("cnn", env, rollout_size=128) trainer = OnPolicyTrainer(algo, env, log_mode=["csv"], logdir="./logs", epochs=1) trainer.train() shutil.rmtree("./logs") def
(): env = VectorEnv("CartPole-v0", 1) algo = A2C("mlp", env, shared_layers=(32, 32), rollout_size=128) trainer = OnPolicyTrainer(algo, env, log_mode=["csv"], logdir="./logs", epochs=1) trainer.train() shutil.rmtree("./logs")
test_a2c_shared
app.module.ts
import { NgModule } from '@angular/core'; import { BrowserModule } from '@angular/platform-browser';
@NgModule({ imports: [ BrowserModule,FormsModule ], declarations: [ AppComponent,ProductListComponent ], bootstrap: [ AppComponent ] }) export class AppModule { }
import {FormsModule } from '@angular/forms'; import { AppComponent } from './app.component'; import { ProductListComponent} from './products/product-list.component';
d_page.go
// simple Page struct layout for RESTFul API pagination package dto // Page is a struct for RESTful API pagination type Page struct { List interface{} `json:"list"` Pagination *Pagination `json:"pagination"` Meta interface{} `json:"meta,omitempty"` } // Pagination respects the pagination of the current page type Pagination struct { Total int64 `json:"total"` // 总条数 PageSize int `json:"pageSize"` // 页大小 Current int `json:"current"` // 当前页码 } const ( defaultMinPageSize = 20 defaultMaxPageSize = 100 ) // NewPage create page instance func NewPage() *Page { return &Page{ L
truct for Page request type PageForm struct { Page int `json:"page" form:"page" query:"page"` PageSize int `json:"pageSize" form:"pageSize" query:"pageSize"` } // GetPage get current page number func (p *PageForm) GetPage() int { if p.Page == 0 { return 1 } return p.Page } // GetPageSize get current page size func (p *PageForm) GetPageSize() int { if p.PageSize == 0 { return defaultMinPageSize } if p.PageSize > defaultMaxPageSize { return defaultMaxPageSize } return p.PageSize }
ist: []interface{}{}, Pagination: &Pagination{ Total: 0, PageSize: 0, Current: 1, }, } } // PageForm is a s
importer-registry.ts
// Copyright 2021 Google LLC. Use of this source code is governed by an // MIT-style license that can be found in the LICENSE file or at // https://opensource.org/licenses/MIT. import * as p from 'path'; import {URL} from 'url'; import {inspect} from 'util'; import * as utils from './utils'; import {FileImporter, Importer, Options} from './vendor/sass'; import { InboundMessage, OutboundMessage, } from './vendor/embedded-protocol/embedded_sass_pb'; import {catchOr, thenOr, PromiseOr} from './utils'; /** * A registry of importers defined in the host that can be invoked by the * compiler. */ export class ImporterRegistry<sync extends 'sync' | 'async'> { /** Protocol buffer representations of the registered importers. */ readonly importers: InboundMessage.CompileRequest.Importer[]; /** A map from importer IDs to their corresponding importers. */ private readonly importersById = new Map<number, Importer<sync>>(); /** A map from file importer IDs to their corresponding importers. */ private readonly fileImportersById = new Map<number, FileImporter<sync>>(); /** The next ID to use for an importer. */ private id = 0; constructor(options?: Options<sync>) { this.importers = (options?.importers ?? []) .map(importer => this.register(importer)) .concat( (options?.loadPaths ?? []).map(path => { const proto = new InboundMessage.CompileRequest.Importer(); proto.setPath(p.resolve(path)); return proto; }) ); } /** Converts an importer to a proto without adding it to `this.importers`. */ register( importer: Importer<sync> | FileImporter<sync> ): InboundMessage.CompileRequest.Importer { const proto = new InboundMessage.CompileRequest.Importer(); if ('canonicalize' in importer) { if ('findFileUrl' in importer) { throw new Error( 'Importer may not contain both canonicalize() and findFileUrl(): ' + inspect(importer) ); } proto.setImporterId(this.id); this.importersById.set(this.id, importer); } else { proto.setFileImporterId(this.id); this.fileImportersById.set(this.id, importer); } this.id += 1; return proto; } /** Handles a canonicalization request. */ canonicalize( request: OutboundMessage.CanonicalizeRequest ): PromiseOr<InboundMessage.CanonicalizeResponse, sync> {
throw utils.compilerError('Unknown CanonicalizeRequest.importer_id'); } return catchOr( () => { return thenOr( importer.canonicalize(request.getUrl(), { fromImport: request.getFromImport(), }), url => { const proto = new InboundMessage.CanonicalizeResponse(); if (url !== null) proto.setUrl(url.toString()); return proto; } ); }, error => { const proto = new InboundMessage.CanonicalizeResponse(); proto.setError(`${error}`); return proto; } ); } /** Handles an import request. */ import( request: OutboundMessage.ImportRequest ): PromiseOr<InboundMessage.ImportResponse, sync> { const importer = this.importersById.get(request.getImporterId()); if (!importer) { throw utils.compilerError('Unknown ImportRequest.importer_id'); } return catchOr( () => { return thenOr(importer.load(new URL(request.getUrl())), result => { const proto = new InboundMessage.ImportResponse(); if (result) { const success = new InboundMessage.ImportResponse.ImportSuccess(); success.setContents(result.contents); success.setSyntax(utils.protofySyntax(result.syntax)); if (result.sourceMapUrl) { success.setSourceMapUrl(result.sourceMapUrl.toString()); } proto.setSuccess(success); } return proto; }); }, error => { const proto = new InboundMessage.ImportResponse(); proto.setError(`${error}`); return proto; } ); } /** Handles a file import request. */ fileImport( request: OutboundMessage.FileImportRequest ): PromiseOr<InboundMessage.FileImportResponse, sync> { const importer = this.fileImportersById.get(request.getImporterId()); if (!importer) { throw utils.compilerError('Unknown FileImportRequest.importer_id'); } return catchOr( () => { return thenOr( importer.findFileUrl(request.getUrl(), { fromImport: request.getFromImport(), }), url => { const proto = new InboundMessage.FileImportResponse(); if (url) { if (url.protocol !== 'file:') { throw ( `FileImporter ${inspect(importer)} returned non-file: URL ` + +`"${url}" for URL "${request.getUrl()}".` ); } proto.setFileUrl(url.toString()); } return proto; } ); }, error => { const proto = new InboundMessage.FileImportResponse(); proto.setError(`${error}`); return proto; } ); } }
const importer = this.importersById.get(request.getImporterId()); if (!importer) {
config.tsx
export class Config {
public static DBX_CLIENT_REDIRECT = process.env.REACT_APP_DBX_REDIRECT_URL || ''; }
public static DBX_CLIENT_ID = process.env.REACT_APP_DBX_CLIENT_ID || '';
test_button06.py
############################################################################### # # Tests for XlsxWriter. # # Copyright (c), 2013-2017, John McNamara, [email protected] # from ..excel_comparsion_test import ExcelComparisonTest from ...workbook import Workbook class TestCompareXLSXFiles(ExcelComparisonTest): """ Test file created by XlsxWriter against a file created by Excel. """ def setUp(self): self.maxDiff = None filename = 'button05.xlsx' test_dir = 'xlsxwriter/test/comparison/' self.got_filename = test_dir + '_test2_' + filename self.exp_filename = test_dir + 'xlsx_files/' + filename self.ignore_files = [] self.ignore_elements = {} def
(self): """Test the creation of a simple XlsxWriter file.""" workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() worksheet.insert_button('C2', {'macro': 'my_macro', 'width': 128, 'height': 30 }) workbook.close() self.assertExcelEqual()
test_create_file
watcher.go
// Copyright 2018 zebra project. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package rib import ( pb "github.com/coreswitch/zebra/proto" ) const ( WATCH_TYPE_INTERFACE = 0 WATCH_TYPE_ROUTER_ID = 1 WATCH_TYPE_REDIST = 2 WATCH_TYPE_REDIST_DEFAULT = 3 WATCH_TYPE_MAX = 4 ) type Watcher interface { Notify(interface{}) } type Watchers []Watcher func NewInterfaceUpdate(op pb.Op, ifp *Interface) *pb.InterfaceUpdate { return &pb.InterfaceUpdate{ Op: op, VrfId: uint32(ifp.VrfIndex), Name: ifp.Name, Index: uint32(ifp.Index), Flags: ifp.Flags, Mtu: ifp.Mtu, Metric: ifp.Metric, HwAddr: &pb.HwAddr{Addr: ifp.HwAddr}, } } func NewAddress(addr *IfAddr) *pb.Address { return &pb.Address{ Addr: &pb.Prefix{ Addr: addr.Prefix.IP, Length: uint32(addr.Prefix.Length), }, } } func NewInterfaceUpdateFull(op pb.Op, ifp *Interface) *pb.InterfaceUpdate { update := NewInterfaceUpdate(op, ifp) for _, addr := range ifp.Addrs[AFI_IP] { update.AddrIpv4 = append(update.AddrIpv4, NewAddress(addr)) } for _, addr := range ifp.Addrs[AFI_IP6] { update.AddrIpv6 = append(update.AddrIpv6, NewAddress(addr)) } return update } func NotifyInterfaces(w Watcher, vrf *Vrf) { for _, ifp := range vrf.IfMap { w.Notify(NewInterfaceUpdateFull(pb.Op_InterfaceAdd, ifp)) } } func (ifp *Interface) NotifyInterface(op pb.Op) { for _, w := range ifp.Vrf.Watchers[WATCH_TYPE_INTERFACE] { w.Notify(NewInterfaceUpdate(op, ifp)) } } func (ifp *Interface) NotifyInterfaceAdd() { ifp.NotifyInterface(pb.Op_InterfaceAdd) } func (ifp *Interface) NotifyInterfaceDelete() { ifp.NotifyInterface(pb.Op_InterfaceDelete) } func (ifp *Interface) NotifyInterfaceNameChange() { ifp.NotifyInterface(pb.Op_InterfaceNameChange) } func (ifp *Interface) NotifyInterfaceMtuChange() { ifp.NotifyInterface(pb.Op_InterfaceMtuChange) } func (ifp *Interface) NotifyInterfaceUp() { ifp.NotifyInterface(pb.Op_InterfaceUp) } func (ifp *Interface) NotifyInterfaceDown() { ifp.NotifyInterface(pb.Op_InterfaceDown) } func (ifp *Interface) NotifyInterfaceFlagChange() { ifp.NotifyInterface(pb.Op_InterfaceFlagChange) } func NewInterfaceAddrUpdate(op pb.Op, ifp *Interface, addr *IfAddr) *pb.InterfaceUpdate { update := NewInterfaceUpdate(op, ifp) switch addr.Prefix.AFI() { case AFI_IP: update.AddrIpv4 = append(update.AddrIpv4, NewAddress(addr)) case AFI_IP6: update.AddrIpv6 = append(update.AddrIpv6, NewAddress(addr)) } return update } func watcherNotifyIfAddr(op pb.Op, ifp *Interface, addr *IfAddr) { for _, w := range ifp.Vrf.Watchers[WATCH_TYPE_INTERFACE] { w.Notify(NewInterfaceAddrUpdate(op, ifp, addr)) } } func WatcherNotifyAddressAdd(ifp *Interface, addr *IfAddr) { watcherNotifyIfAddr(pb.Op_InterfaceAddrAdd, ifp, addr) } func WatcherNotifyAddressDelete(ifp *Interface, addr *IfAddr) { watcherNotifyIfAddr(pb.Op_InterfaceAddrDelete, ifp, addr) } func NotifyRouterId(w Watcher, vrf *Vrf) { w.Notify(&pb.RouterIdUpdate{ VrfId: uint32(vrf.Id),
func (vrf *Vrf) NotifyRouterId() { for _, w := range vrf.Watchers[WATCH_TYPE_ROUTER_ID] { NotifyRouterId(w, vrf) } }
RouterId: vrf.RouterId(), }) }
networking_perf.go
/* Copyright 2015 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package network // Tests network performance using iperf or other containers. import ( "fmt" "math" "time" "github.com/onsi/ginkgo" v1 "k8s.io/api/core/v1" "k8s.io/kubernetes/test/e2e/framework" e2elog "k8s.io/kubernetes/test/e2e/framework/log" imageutils "k8s.io/kubernetes/test/utils/image" ) const ( // empirically derived as a baseline for expectations from running this test using kube-up.sh. gceBandwidthBitsEstimate = int64(30000000000) // on 4 node clusters, we found this test passes very quickly, generally in less then 100 seconds. smallClusterTimeout = 200 * time.Second ) // networkingIPerf test runs iperf on a container in either IPv4 or IPv6 mode. func networkingIPerfTest(isIPv6 bool) { f := framework.NewDefaultFramework("network-perf") // A few simple bandwidth tests which are capped by nodes. // TODO replace the 1 with the scale option implementation // TODO: Make this a function parameter, once we distribute iperf endpoints, possibly via session affinity. numClient := 1 numServer := 1 maxBandwidthBits := gceBandwidthBitsEstimate familyStr := "" if isIPv6 { familyStr = "-V " } ginkgo.It(fmt.Sprintf("should transfer ~ 1GB onto the service endpoint %v servers (maximum of %v clients)", numServer, numClient), func() { nodes := framework.GetReadySchedulableNodesOrDie(f.ClientSet) totalPods := len(nodes.Items) // for a single service, we expect to divide bandwidth between the network. Very crude estimate. expectedBandwidth := int(float64(maxBandwidthBits) / float64(totalPods)) framework.ExpectNotEqual(totalPods, 0) appName := "iperf-e2e" _, err := f.CreateServiceForSimpleAppWithPods( 8001, 8002, appName, func(n v1.Node) v1.PodSpec { return v1.PodSpec{ Containers: []v1.Container{{ Name: "iperf-server", Image: imageutils.GetE2EImage(imageutils.Agnhost), Args: []string{ "/bin/sh", "-c", "/usr/local/bin/iperf " + familyStr + "-s -p 8001 ", }, Ports: []v1.ContainerPort{{ContainerPort: 8001}}, }}, NodeName: n.Name, RestartPolicy: v1.RestartPolicyOnFailure, } }, // this will be used to generate the -service name which all iperf clients point at. numServer, // Generally should be 1 server unless we do affinity or use a version of iperf that supports LB true, // Make sure we wait, otherwise all the clients will die and need to restart. ) if err != nil { e2elog.Failf("Fatal error waiting for iperf server endpoint : %v", err) } iperfClientPodLabels := f.CreatePodsPerNodeForSimpleApp( "iperf-e2e-cli", func(n v1.Node) v1.PodSpec { return v1.PodSpec{ Containers: []v1.Container{ { Name: "iperf-client", Image: imageutils.GetE2EImage(imageutils.Agnhost), Args: []string{ "/bin/sh", "-c", "/usr/local/bin/iperf " + familyStr + "-c service-for-" + appName + " -p 8002 --reportstyle C && sleep 5", }, }, }, RestartPolicy: v1.RestartPolicyOnFailure, // let them successfully die. } }, numClient, ) e2elog.Logf("Reading all perf results to stdout.") e2elog.Logf("date,cli,cliPort,server,serverPort,id,interval,transferBits,bandwidthBits") // Calculate expected number of clients based on total nodes. expectedCli := func() int { nodes := framework.GetReadySchedulableNodesOrDie(f.ClientSet) return int(math.Min(float64(len(nodes.Items)), float64(numClient))) }() // Extra 1/10 second per client. iperfTimeout := smallClusterTimeout + (time.Duration(expectedCli/10) * time.Second) iperfResults := &IPerfResults{} iperfClusterVerification := f.NewClusterVerification( f.Namespace, framework.PodStateVerification{ Selectors: iperfClientPodLabels, ValidPhases: []v1.PodPhase{v1.PodSucceeded}, }, ) pods, err2 := iperfClusterVerification.WaitFor(expectedCli, iperfTimeout) if err2 != nil { e2elog.Failf("Error in wait...") } else if len(pods) < expectedCli { e2elog.Failf("IPerf restuls : Only got %v out of %v, after waiting %v", len(pods), expectedCli, iperfTimeout) } else { // For each builds up a collection of IPerfRecords
if err == nil { e2elog.Logf(resultS) iperfResults.Add(NewIPerf(resultS)) } else { e2elog.Failf("Unexpected error, %v when running forEach on the pods.", err) } }) } fmt.Println("[begin] Node,Bandwidth CSV") fmt.Println(iperfResults.ToTSV()) fmt.Println("[end] Node,Bandwidth CSV") for ipClient, bandwidth := range iperfResults.BandwidthMap { e2elog.Logf("%v had bandwidth %v. Ratio to expected (%v) was %f", ipClient, bandwidth, expectedBandwidth, float64(bandwidth)/float64(expectedBandwidth)) } }) } // Declared as Flakey since it has not been proven to run in parallel on small nodes or slow networks in CI // TODO jayunit100 : Retag this test according to semantics from #22401 var _ = SIGDescribe("Networking IPerf IPv4 [Experimental] [Feature:Networking-IPv4] [Slow] [Feature:Networking-Performance]", func() { networkingIPerfTest(false) }) // Declared as Flakey since it has not been proven to run in parallel on small nodes or slow networks in CI // TODO jayunit100 : Retag this test according to semantics from #22401 var _ = SIGDescribe("Networking IPerf IPv6 [Experimental] [Feature:Networking-IPv6] [Slow] [Feature:Networking-Performance]", func() { networkingIPerfTest(true) })
iperfClusterVerification.ForEach( func(p v1.Pod) { resultS, err := framework.LookForStringInLog(f.Namespace.Name, p.Name, "iperf-client", "0-", 1*time.Second)
mqtt_le1.py
# -*- coding: utf-8 -*- """ 时间: 2019/11/24 16:29 作者: lyf 更改记录: 重要说明: """ # 关键指令: # # 1.导入包 # import paho.mqtt.client as mqtt # # 2.创建client对象 # client = mqtt.Client(id) # # 3.连接 # client.connect(host, post) # # 4.订阅 # client.subscribe(topic) # client.on_message=func #接收到信息后的处理函数 # # 5.发布 # client.publish(topic,payload) import paho.mqtt.client as mqtt import sys # host="192.168.45.3" host="127.0.0.1" topic_sub = "Question" topic_pub = "temperature" def on_connect(client, userdata, flags, rc): print("Connected with result code " + str(rc)) client.subscribe(topic_sub) def on_message(client, userdata, msg): print(msg.payload) client.publish(top
7°") def main(argv=None): # argv是sys模块下的方法用于接收命令行传参 # 声明客户端 client=mqtt.Client() # 连接 client.connect(host,1883,60) # 两个回调函数,用于执行连接成功和接收到信息要做的事 client.on_connect = on_connect client.on_message = on_message client.loop_forever() # if __name__ == "__main__": # sys.exit(main()) main()
ic_pub, "3
serializer.go
package serializer import ( "encoding/json" "io/ioutil" "os" ) // DeserializePath - reads file specified in the path argument // and deserializes the JSON object to the destination object func DeserializePath(path string, destination interface{}) error { template, err := os.Open(path) if err != nil { return err } defer template.Close() byteValue, err := ioutil.ReadAll(template) if err != nil { return err } return Deserialize([]byte(byteValue), &destination) } // Serialize the object parameter to a JSON byte array func Serialize(object interface{}) ([]byte, error) { serialized, err := json.Marshal(object) if err != nil { return []byte{}, err } return serialized, nil } // Deserialize the byte array to the destination interface func Deserialize(buffer []byte, destination interface{}) error
{ return json.Unmarshal(buffer, &destination) }
justification.rs
// Copyright 2018-2019 Parity Technologies (UK) Ltd. // This file is part of Substrate. // Substrate is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Substrate is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Substrate. If not, see <http://www.gnu.org/licenses/>. use crate::std::collections::{HashMap, HashSet}; use crate::std::string::ToString; use crate::std::vec::Vec; #[cfg(test)] use sc_client::Client; #[cfg(test)] use sc_client_api::{backend::Backend, CallExecutor}; use super::error::JustificationError as ClientError; use codec::{Decode, Encode}; use finality_grandpa::voter_set::VoterSet; use finality_grandpa::Error as GrandpaError; use sp_finality_grandpa::{AuthorityId, AuthoritySignature}; use sp_runtime::traits::{Block as BlockT, Header as HeaderT, NumberFor}; /// A GRANDPA justification for block finality, it includes a commit message and /// an ancestry proof including all headers routing all precommit target blocks /// to the commit target block. Due to the current voting strategy the precommit /// targets should be the same as the commit target, since honest voters don't /// vote past authority set change blocks. /// /// This is meant to be stored in the db and passed around the network to other /// nodes, and are used by syncing nodes to prove authority set handoffs. #[derive(Encode, Decode)] pub struct GrandpaJustification<Block: BlockT> { round: u64, pub(crate) commit: Commit<Block>, votes_ancestries: Vec<Block::Header>, } impl<Block: BlockT> GrandpaJustification<Block> { /// Create a GRANDPA justification from the given commit. This method /// assumes the commit is valid and well-formed. #[cfg(test)] pub(crate) fn from_commit<C>( client: &Arc<C>, round: u64, commit: Commit<Block>, ) -> Result<GrandpaJustification<Block>, Error> where C: HeaderBackend<Block>, { let mut votes_ancestries_hashes = HashSet::new(); let mut votes_ancestries = Vec::new(); let error = || { let msg = "invalid precommits for target commit".to_string(); Err(Error::Client(ClientError::BadJustification(msg))) }; for signed in commit.precommits.iter() { let mut current_hash = signed.precommit.target_hash.clone(); loop { if current_hash == commit.target_hash { break; } match client.header(&BlockId::Hash(current_hash))? { Some(current_header) => { if *current_header.number() <= commit.target_number { return error(); } let parent_hash = current_header.parent_hash().clone(); if votes_ancestries_hashes.insert(current_hash) { votes_ancestries.push(current_header); } current_hash = parent_hash; } _ => return error(), } } } Ok(GrandpaJustification { round, commit, votes_ancestries, }) } /// Decode a GRANDPA justification and validate the commit and the votes' /// ancestry proofs finalize the given block. pub(crate) fn decode_and_verify_finalizes( encoded: &[u8], finalized_target: (Block::Hash, NumberFor<Block>), set_id: u64, voters: &VoterSet<AuthorityId>, ) -> Result<GrandpaJustification<Block>, ClientError> where NumberFor<Block>: finality_grandpa::BlockNumberOps, { let justification = GrandpaJustification::<Block>::decode(&mut &*encoded) .map_err(|_| ClientError::JustificationDecode)?; if ( justification.commit.target_hash, justification.commit.target_number, ) != finalized_target { let msg = "invalid commit target in grandpa justification".to_string(); Err(ClientError::BadJustification(msg)) } else { justification.verify(set_id, voters).map(|_| justification) } } /// Validate the commit and the votes' ancestry proofs. pub(crate) fn verify( &self, set_id: u64, voters: &VoterSet<AuthorityId>, ) -> Result<(), ClientError> where NumberFor<Block>: finality_grandpa::BlockNumberOps,
} /// A utility trait implementing `finality_grandpa::Chain` using a given set of headers. /// This is useful when validating commits, using the given set of headers to /// verify a valid ancestry route to the target commit block. struct AncestryChain<Block: BlockT> { ancestry: HashMap<Block::Hash, Block::Header>, } impl<Block: BlockT> AncestryChain<Block> { fn new(ancestry: &[Block::Header]) -> AncestryChain<Block> { let ancestry: HashMap<_, _> = ancestry .iter() .cloned() .map(|h: Block::Header| (h.hash(), h)) .collect(); AncestryChain { ancestry } } } impl<Block: BlockT> finality_grandpa::Chain<Block::Hash, NumberFor<Block>> for AncestryChain<Block> where NumberFor<Block>: finality_grandpa::BlockNumberOps, { fn ancestry( &self, base: Block::Hash, block: Block::Hash, ) -> Result<Vec<Block::Hash>, GrandpaError> { let mut route = Vec::new(); let mut current_hash = block; loop { if current_hash == base { break; } match self.ancestry.get(&current_hash) { Some(current_header) => { current_hash = *current_header.parent_hash(); route.push(current_hash); } _ => return Err(GrandpaError::NotDescendent), } } route.pop(); // remove the base Ok(route) } fn best_chain_containing( &self, _block: Block::Hash, ) -> Option<(Block::Hash, NumberFor<Block>)> { None } } // copied /// A commit message for this chain's block type. pub type Commit<Block> = finality_grandpa::Commit< <Block as BlockT>::Hash, NumberFor<Block>, AuthoritySignature, AuthorityId, >; mod communication { use crate::std::vec::Vec; use codec::Encode; use log::debug; use sp_core::Pair; use sp_finality_grandpa::{ AuthorityId, AuthorityPair, AuthoritySignature, RoundNumber, SetId as SetIdNumber, }; use sp_runtime::traits::{Block as BlockT, NumberFor}; pub type Message<Block> = finality_grandpa::Message<<Block as BlockT>::Hash, NumberFor<Block>>; // Check the signature of a Grandpa message. // This was originally taken from `communication/mod.rs` /// Check a message signature by encoding the message as a localized payload and /// verifying the provided signature using the expected authority id. /// The encoding necessary to verify the signature will be done using the given /// buffer, the original content of the buffer will be cleared. pub(crate) fn check_message_sig_with_buffer<Block: BlockT>( message: &Message<Block>, id: &AuthorityId, signature: &AuthoritySignature, round: RoundNumber, set_id: SetIdNumber, buf: &mut Vec<u8>, ) -> Result<(), ()> { let as_public = id.clone(); localized_payload_with_buffer(round, set_id, message, buf); if AuthorityPair::verify(signature, buf, &as_public) { Ok(()) } else { debug!(target: "afg", "Bad signature on message from {:?}", id); Err(()) } } /// Encode round message localized to a given round and set id using the given /// buffer. The given buffer will be cleared and the resulting encoded payload /// will always be written to the start of the buffer. pub(crate) fn localized_payload_with_buffer<E: Encode>( round: RoundNumber, set_id: SetIdNumber, message: &E, buf: &mut Vec<u8>, ) { buf.clear(); (message, round, set_id).encode_to(buf) } }
{ use finality_grandpa::Chain; let ancestry_chain = AncestryChain::<Block>::new(&self.votes_ancestries); match finality_grandpa::validate_commit(&self.commit, voters, &ancestry_chain) { Ok(ref result) if result.ghost().is_some() => {} _ => { let msg = "invalid commit in grandpa justification".to_string(); return Err(ClientError::BadJustification(msg)); } } let mut buf = Vec::new(); let mut visited_hashes = HashSet::new(); for signed in self.commit.precommits.iter() { if communication::check_message_sig_with_buffer::<Block>( &finality_grandpa::Message::Precommit(signed.precommit.clone()), &signed.id, &signed.signature, self.round, set_id, &mut buf, ) .is_err() { return Err(ClientError::BadJustification( "invalid signature for precommit in grandpa justification".to_string(), )); } if self.commit.target_hash == signed.precommit.target_hash { continue; } match ancestry_chain.ancestry(self.commit.target_hash, signed.precommit.target_hash) { Ok(route) => { // ancestry starts from parent hash but the precommit target hash has been visited visited_hashes.insert(signed.precommit.target_hash); for hash in route { visited_hashes.insert(hash); } } _ => { return Err(ClientError::BadJustification( "invalid precommit ancestry proof in grandpa justification".to_string(), )); } } } let ancestry_hashes = self .votes_ancestries .iter() .map(|h: &Block::Header| h.hash()) .collect(); if visited_hashes != ancestry_hashes { return Err(ClientError::BadJustification( "invalid precommit ancestries in grandpa justification with unused headers" .to_string(), )); } Ok(()) }
cli.go
package main import ( "log" "github.com/dollarkillerx/dog" ) func main()
{ log.SetFlags(log.Llongfile | log.LstdFlags) dog.Execute() }
configuration.tsx
import React from 'react'; import * as microsoftTeams from "@microsoft/teams-js"; const Configuration = () => { const [tabId, setTabId] = React.useState(''); React.useEffect(() => { microsoftTeams.app.initialize().then(() => { microsoftTeams.app.getContext().then(async (context: microsoftTeams.app.Context) => { setTabId(context.page.id) }); microsoftTeams.pages.config.registerOnSaveHandler(async (saveEvent: microsoftTeams.settings.SaveEvent) => { microsoftTeams.pages.config.setConfig({ entityId: tabId, contentUrl: `${window.location.origin}/tab`, suggestedDisplayName: 'Live coding', }); saveEvent.notifySuccess(); }); microsoftTeams.pages.config.setValidityState(true); });
<div className="config-container"> Please click on save to configure this tab </div> ) } export default (Configuration);
}, []); return (
lib.rs
use std::isize; use failure::Error; use ndarray::Array2; use t_ba5e::{align, AlignmentParameters, read_scoring_matrix}; use utility; /// Find a Highest-Scoring Fitting Alignment of Two Strings /// /// Given: Two DNA strings v and w, where v has length at most 10000 and w has length at most 1000. /// /// Return: The maximum score of a fitting alignment of v and w, followed by a fitting alignment /// achieving this maximum score. Use the simple scoring method in which matches count +1 and both /// the mismatch and indel penalties are equal to 1. (If multiple fitting alignments achieving /// the maximum score exist, you may return any one.) pub fn rosalind_ba5h(filename: &str) -> Result<(), Error> { let contents = utility::io::input_from_file(filename)?; let lines: Vec<_> = contents.split('\n').collect(); let (_, amino_acids) = read_scoring_matrix(utility::io::PAM_FILE)?; let mut scoring_matrix = Array2::<isize>::zeros((amino_acids.len(), amino_acids.len())); scoring_matrix.fill(-1); scoring_matrix.diag_mut().fill(1); let parameters = AlignmentParameters::new(scoring_matrix, amino_acids, 1); let (score, aln_string_1, aln_string_2) = fitting_align(lines[0], lines[1], &parameters); println!("{}\n{}\n{}", score, aln_string_1, aln_string_2); Ok(()) } pub fn fitting_alignment_backtrack( string_1: &str, string_2: &str, parameters: &AlignmentParameters, ) -> (Array2<isize>, Array2<usize>) { let chars_1: Vec<_> = string_1.chars().collect(); let chars_2: Vec<_> = string_2.chars().collect(); let mut scores = Array2::zeros((chars_1.len() + 1, chars_2.len() + 1)); let mut backtrack = Array2::zeros((chars_1.len() + 1, chars_2.len() + 1)); for j in 1..=chars_2.len() { scores[(0, j)] = scores[(0, j - 1)] - parameters.gap_penalty; backtrack[(0, j)] = 2; }
let values: Vec<isize> = vec![ (scores[(i - 1, j)] - parameters.gap_penalty), (scores[(i, j - 1)] - parameters.gap_penalty), (scores[(i - 1, j - 1)] + parameters.scoring_matrix[( parameters.amino_acid_order[&chars_1[i - 1]], parameters.amino_acid_order[&chars_2[j - 1]], )]), ]; let (max_index, max_value) = values .into_iter() .enumerate() .max_by(|a, b| a.1.cmp(&b.1)) .unwrap(); scores[(i, j)] = max_value; backtrack[(i, j)] = max_index + 1; if j == chars_2.len() && max_value <= 0 { scores[(i, j)] = 0; backtrack[(i, j)] = 0; } } } (scores, backtrack) } fn fitting_align( string_1: &str, string_2: &str, parameters: &AlignmentParameters, ) -> (isize, String, String) { let (scores, backtrack) = fitting_alignment_backtrack(string_1, string_2, parameters); let (string_1, string_2): (Vec<_>, Vec<_>) = (string_1.chars().collect(), string_2.chars().collect()); let mut n = 0; let mut max_score = isize::MIN; for i in 0..=string_1.len() { if scores[(i, string_2.len())] > max_score { max_score = scores[(i, string_2.len())]; n = i; } } let (aln_1, aln_2) = align(&backtrack, &string_1, &string_2, n, string_2.len()); (max_score, aln_1, aln_2) }
for i in 1..=chars_1.len() { for j in 1..=chars_2.len() {
chapter10.concurrency.go
package main import ( "fmt" "time" "strconv" ) // Concurrency // to run this $ go run $0 /* GoRoutines */ func printSquare (idx int, sleeptime int)
func PrintN(n int) () { for idx := 0; idx < n; idx++{ go printSquare(idx, idx * 100) } } /* Channels */ // Pingr -> Recvr func Pingr (c chan string) { for i := 97; i <= 122 ; i++ { c <- "[" + string(i) + "] P!ng" } } func Recvr (c chan string) { for { msg := <-c fmt.Println(msg) time.Sleep(time.Millisecond * 100) } } // Pingr -> Recvr <- Pongr func Pongr (c chan string) { for i := 65; i <= 90 ; i++ { c <- "(" + string(i) + ") P0ng" } } // SendOnly -> RecvOnly func SendOnly(c chan<- string) { for i := 0; i <= 10 ; i++ { c <- "(" + strconv.Itoa(i) + ") I can only send." } } func RecvOnly(c <-chan string) { for { msg := <-c fmt.Println(msg) time.Sleep(time.Millisecond * 100) } } // Select func SelectChannel() { c1 := make(chan string) c2 := make(chan string) go func(){ c1 <- "~~~~~~~~~" time.Sleep(time.Millisecond * 250) }() go func(){ c2 <- "=========" time.Sleep(time.Millisecond * 250) }() go func(){ for { select { case msg1 := <-c1: fmt.Println(msg1) case msg2 := <-c2: fmt.Println(msg2) case <- time.After(time.Second * 2): fmt.Println("Timeout") // best place for Timeout // default: // fmt.Println("waiting...") } } }() } // Ping <-> Pong func PingPong (msg string, ping <-chan string, pong chan<- string) { for reciv := range ping { <-time.After(2 * time.Second) fmt.Println(">>>>>>>", reciv) pong <- msg } } // Sleep using time.After func SoJaao(n int){ for { select{ case <- time.After(time.Second * time.Duration(n)): return } } } /* main */ func main(){ PrintN(10) time.Sleep(time.Second * 2) var c chan string = make(chan string) go Pingr(c) go Recvr(c) time.Sleep(time.Second * 4) go Pingr(c) go Pongr(c) go Recvr(c) time.Sleep(time.Second * 4) go SendOnly(c) go RecvOnly(c) time.Sleep(time.Second * 2) // buffered async var cc chan string = make(chan string, 5) go Pingr(cc) go Recvr(cc) time.Sleep(time.Second * 4) SelectChannel() SoJaao(1) ping, pong := make(chan string), make(chan string) go PingPong("piing", ping, pong) go PingPong("poong", pong, ping) ping <- "Ping!" var input string fmt.Println("\nEnter anytime to exit.\n", ) fmt.Scanln(&input) fmt.Println("It was a formality to delay loop counts, why you entered", input) }
{ if idx % 2 == 0 { time.Sleep(time.Millisecond * time.Duration(sleeptime)) } fmt.Println("square of ", idx, "is", idx*idx) }
tests.rs
// Copyright 2018-2021 Parity Technologies (UK) Ltd. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::ext::{ Engine, Error, }; /// The public methods of the `contracts` pallet write their result into an /// `output` buffer instead of returning them. Since we aim to emulate this /// behavior, we have to provide some buffer for our tests to pass into these /// emulated methods, so that they can write their result into it. /// /// The number 1024 is more or less arbitrary, it just satisfies the need of /// our tests without being too large. fn get_buffer() -> [u8; 1024]
#[test] fn store_load_clear() { let mut engine = Engine::new(); engine.set_callee(vec![1; 32]); let key: &[u8; 32] = &[0x42; 32]; let output = &mut &mut get_buffer()[..]; let res = engine.get_storage(key, output); assert_eq!(res, Err(Error::KeyNotFound)); engine.set_storage(key, &[0x05_u8; 5]); let res = engine.get_storage(key, output); assert_eq!(res, Ok(()),); assert_eq!(output[..5], [0x05; 5]); engine.clear_storage(key); let res = engine.get_storage(key, output); assert_eq!(res, Err(Error::KeyNotFound)); } #[test] fn setting_getting_balance() { // given let mut engine = Engine::new(); let account_id = vec![1; 32]; let balance = 1337; engine.set_callee(account_id.clone()); engine.set_balance(account_id, balance); // when let mut output = get_buffer(); engine.balance(&mut &mut output[..]); // then let output = <u128 as scale::Decode>::decode(&mut &output[..16]) .expect("decoding balance failed"); assert_eq!(output, balance); } #[test] fn setting_getting_caller() { // given let mut engine = Engine::new(); let account_id = vec![1; 32]; // when engine.set_caller(account_id.clone()); // then let mut output = get_buffer(); engine.caller(&mut &mut output[..]); assert_eq!(&output[..account_id.len()], &account_id); } #[test] fn address() { // given let mut engine = Engine::new(); let account_id = vec![1; 32]; engine.set_callee(account_id.clone()); // when let mut output = get_buffer(); engine.address(&mut &mut output[..]); // then assert_eq!(&output[..account_id.len()], &account_id); } #[test] fn transfer() { // given let mut engine = Engine::new(); let alice = vec![1; 32]; let bob = vec![2; 32]; engine.set_callee(alice.clone()); engine.set_balance(alice.clone(), 1337); // when let val = scale::Encode::encode(&337u128); assert_eq!(engine.transfer(&bob, &val), Ok(())); // then assert_eq!(engine.get_balance(alice), Ok(1000)); assert_eq!(engine.get_balance(bob), Ok(337)); } #[test] fn debug_messages() { let mut engine = Engine::new(); engine.debug_message("foobar"); let mut recorded = engine.get_emitted_debug_messages().into_iter(); assert_eq!(recorded.next(), Some("foobar".into())); assert_eq!(recorded.next(), None); } #[test] fn events() { // given let mut engine = Engine::new(); let topics_count: scale::Compact<u32> = scale::Compact(2u32); let mut enc_topics_count = scale::Encode::encode(&topics_count); let topic1 = vec![12u8, 13]; let topic2 = vec![14u8, 15]; let data = &vec![21, 22, 23]; // when let mut enc_topics_info: Vec<u8> = Vec::new(); enc_topics_info.append(&mut enc_topics_count); enc_topics_info.append(&mut topic1.clone()); enc_topics_info.append(&mut topic2.clone()); engine.deposit_event(&enc_topics_info, data); // then let mut events = engine.get_emitted_events(); let event = events.next().expect("event must exist"); assert_eq!(event.topics.len(), 2); assert_eq!( event.topics.get(0).expect("first topic must exist"), &topic1 ); assert_eq!( event.topics.get(1).expect("second topic must exist"), &topic2 ); assert_eq!(&event.data, data); assert!(events.next().is_none()); } #[test] fn value_transferred() { // given let mut engine = Engine::new(); let value = 1337; engine.set_value_transferred(value); // when let output = &mut &mut get_buffer()[..]; engine.value_transferred(output); // then let output = <u128 as scale::Decode>::decode(&mut &output[..16]) .expect("decoding value transferred failed"); assert_eq!(output, value); } #[test] #[should_panic( expected = "the output buffer is too small! the decoded storage is of size 16 bytes, but the output buffer has only room for 8." )] fn must_panic_when_buffer_too_small() { // given let mut engine = Engine::new(); engine.set_callee(vec![1; 32]); let key: &[u8; 32] = &[0x42; 32]; engine.set_storage(key, &[0x05_u8; 16]); // when let mut small_buffer = [0; 8]; let output = &mut &mut small_buffer[..]; let _ = engine.get_storage(key, output); // then unreachable!("`get_storage` must already have panicked"); }
{ [0; 1024] }
test_job_queue.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_job_queue.py Test suite for zcomx/modules/job_queue.py """ import datetime import os import subprocess import time import unittest from gluon import * from applications.zcomx.modules.job_queue import \ CLIOption, \ Daemon, \ DaemonSignalError, \ IgnorableJob, \ InvalidCLIOptionError, \ InvalidJobOptionError, \ InvalidStatusError, \ Job, \ JobHistory, \ JobQueuer, \ Queue, \ QueueEmptyError, \ QueueLockedError, \ QueueLockedExtendedError, \ Queuer, \ Requeuer from applications.zcomx.modules.tests.runner import LocalTestCase from applications.zcomx.modules.tests.trackers import TableTracker # C0111: *Missing docstring* # R0904: *Too many public methods (%s/%s)* # pylint: disable=C0111,R0904 TMP_DIR = '/tmp/test_suite/job_queue' if not os.path.exists(TMP_DIR): os.makedirs(TMP_DIR) class SubQueuer(Queuer): """Sub class of Queuer used for testing.""" class_factory_id = 'some_program' program = 'some_program.py' default_job_options = { 'priority': 1, 'status': 'd' } default_cli_options = { '-a': False, '-b': True, '-c': 'ccc', '-d': ['d1', 'd2'] } valid_cli_options = ['-a', '-b', '-c', '-d', '-e'] queue_class = Queue def __init__( self, tbl, job_options=None, cli_options=None, cli_args=None, delay_seconds=0): Queuer.__init__( self, tbl, job_options=job_options, cli_options=cli_options, cli_args=cli_args, delay_seconds=delay_seconds ) class TestCLIOption(LocalTestCase): def test____init__(self): cli_option = CLIOption('-a') self.assertTrue(cli_option) def test____str__(self): tests = [ # (option, value, expect) ('-a', None, ''), ('-a', False, ''), ('-a', True, '-a'), ('--action', True, '--action'), ('-a', 'list', '-a list'), ('-a', 111, '-a 111'), ('-a', ['opt1'], '-a opt1'), ('-a', ['opt1', 'opt2'], '-a opt1 -a opt2'), ( '-a', """my "list" of 'items'""", '-a \'my "list" of \'"\'"\'items\'"\'"\'\'' ), ( '-a', ["""a'b"c""", """d"e'f"""], '-a \'a\'"\'"\'b"c\' -a \'d"e\'"\'"\'f\'' ), ] for t in tests: cli_option = CLIOption(t[0], value=t[1]) self.assertEqual(str(cli_option), t[2]) class TestDaemon(LocalTestCase): name = 'zco_queued' pid_filename = '/tmp/test_suite/job_queue/pid' def test____init__(self): daemon = Daemon(self.name) self.assertEqual(daemon.pid_filename, '/tmp/zco_queued/pid') daemon = Daemon(self.name, pid_filename='/tmp/testing') self.assertEqual(daemon.pid_filename, '/tmp/testing') def test__read_pid(self): daemon = Daemon(self.name, self.pid_filename) open(self.pid_filename, 'w').close() # Empty file self.assertEqual(daemon.read_pid(), {}) with open(self.pid_filename, 'w') as f: f.write("a: 1\n") f.write("first name: John\n") f.write("start time: 2000-01-01 12:59:59\n") f.write("nada: \n") f.write("empty:\n") self.assertEqual(daemon.read_pid(), { 'a': '1', 'first name': 'John', 'start time': '2000-01-01 12:59:59', 'nada': '', 'empty': '', }) def test__signal(self): daemon = Daemon(self.name, self.pid_filename) self.assertRaises(DaemonSignalError, daemon.signal) # The details of the method are not easily tested. The method issues # an os.kill() command and not recommend to run. def test__update_pid(self): daemon = Daemon(self.name, self.pid_filename) open(self.pid_filename, 'w').close() # Empty file daemon.update_pid() params = daemon.read_pid() self.assertEqual(list(params.keys()), ['last']) data = { 'pid': '1234', 'start': '2003-03-03 03:30:33', 'last': '', } daemon.write_pid(data) daemon.update_pid() params = daemon.read_pid() self.assertEqual(sorted(params.keys()), ['last', 'pid', 'start']) self.assertEqual(params['pid'], data['pid']) self.assertEqual(params['start'], data['start']) self.assertNotEqual(params['last'], data['last']) def test__write_pid(self):
class TestIgnorableJob(LocalTestCase): def test__is_ignored(self): now = datetime.datetime.now() nine_minutes_ago = now - datetime.timedelta(minutes=9) eleven_minutes_ago = now - datetime.timedelta(minutes=11) command = 'test__is_ignored' priority = 10 data = dict( command=command, priority=priority, start=now, status='d', ignorable=True, ) reset_data = dict(data) def reset(job): return IgnorableJob.from_updated(job, reset_data) job_1 = IgnorableJob.from_add(data) self._objects.append(job_1) job_2 = IgnorableJob.from_add(data) self._objects.append(job_2) job_1 = reset(job_1) job_2 = reset(job_2) self.assertTrue(job_1.is_ignored(status='d')) for ignorable in [True, False]: data = dict(ignorable=ignorable) job_1 = IgnorableJob.from_updated(job_1, data) self.assertEqual(job_1.is_ignored(status='d'), ignorable) job_1 = reset(job_1) tests = [ # (job_1.start, start_limit_seconds, expect) (now, None, True), (nine_minutes_ago, None, True), (eleven_minutes_ago, None, False), (nine_minutes_ago, 539, False), (nine_minutes_ago, 540, False), (nine_minutes_ago, 541, True), ] for t in tests: data = dict(start=t[0]) job_1 = IgnorableJob.from_updated(job_1, data) if t[1] is None: self.assertEqual(job_1.is_ignored(status='d'), t[2]) else: self.assertEqual( job_1.is_ignored(status='d', start_limit_seconds=t[1]), t[2] ) class TestJob(LocalTestCase): pass # Record subclass class TestJobHistory(LocalTestCase): def test_init__(self): query = (db.job_history) job_history = JobHistory.from_query(query) self.assertTrue(job_history) class TestJobQueuer(LocalTestCase): def test_init__(self): query = (db.job_queuer.code == 'search_prefetch') job_queuer = JobQueuer.from_query(query) self.assertTrue(job_queuer) class TestQueue(LocalTestCase): @classmethod def clear_queue(cls): db(db.job.id > 0).delete() db.commit() def test____init__(self): queue = Queue(db.job) self.assertTrue(queue) def test__add_job(self): queue = Queue(db.job) TestQueue.clear_queue() self.assertEqual(len(queue.jobs()), 0) now = datetime.datetime.now() job_data = dict( command='pwd', priority=1, start=now, ) ret = queue.add_job(job_data) self._objects.append(ret) self.assertEqual(ret.command, job_data['command']) self.assertTrue(ret.id > 0) self.assertEqual(len(queue.jobs()), 1) # Test pre- and post- processiong. class MyQueue(Queue): """Queue subclass for testing""" def __init__(self, tbl): Queue.__init__(self, tbl) self.trace = [] def pre_add_job(self): """Test override.""" self.trace.append('pre') def post_add_job(self): """Test override.""" self.trace.append('post') my_queue = MyQueue(db.job) TestQueue.clear_queue() self.assertEqual(len(my_queue.jobs()), 0) ret = my_queue.add_job(job_data) self._objects.append(ret) self.assertTrue(ret.id > 0) self.assertEqual(my_queue.trace, ['pre', 'post']) def test__job_generator(self): queue = Queue(db.job) gen = queue.job_generator() # No jobs self.assertRaises(StopIteration, gen.__next__) job_data = [ # (command, start, priority, status) ('do_a', '2010-01-01 10:00:00', 1, 'a'), ('do_b', '2010-01-01 10:00:00', 5, 'a'), ('do_c', '2010-01-01 10:00:00', 9, 'a'), ] all_jobs = [] for j in job_data: job = queue.add_job( dict(command=j[0], start=j[1], priority=j[2], status=j[3]) ) all_jobs.append(job) gen = queue.job_generator() job = next(gen) self.assertEqual(job.command, 'do_c') all_jobs[2].delete() job = next(gen) self.assertEqual(job.command, 'do_b') all_jobs[1].delete() job = next(gen) self.assertEqual(job.command, 'do_a') all_jobs[0].delete() self.assertRaises(StopIteration, gen.__next__) for j in all_jobs: try: j.delete() except LookupError: pass self.assertEqual(queue.stats(), {}) def test__jobs(self): # Add a new 'z' status to test with. db.job.status.requires = IS_IN_SET(['a', 'd', 'p', 'z']) queue = Queue(db.job) TestQueue.clear_queue() self.assertEqual(len(queue.jobs()), 0) job_data = [ # (start, priority, status) # Do not use status='a' or status='p' or jobs will be run. ('2010-01-01 10:00:00', 0, 'z'), ('2010-01-01 10:00:00', 0, 'd'), ('2010-01-01 10:00:01', -1, 'z'), ('2010-01-01 10:00:01', -1, 'd'), ('2010-01-01 10:00:02', 1, 'z'), ('2010-01-01 10:00:02', 1, 'd'), ] all_jobs = [] for j in job_data: job_d = dict(command='pwd', start=j[0], priority=j[1], status=j[2]) job = Job.from_add(job_d) self._objects.append(job) all_jobs.append(job) job_set = queue.jobs() self.assertEqual(len(job_set), 6) self.assertEqual(job_set, all_jobs) # Test query query = (db.job.status == 'z') job_set = queue.jobs(query=query) self.assertEqual(len(job_set), 3) self.assertEqual( job_set, [all_jobs[0], all_jobs[2], all_jobs[4]] ) query = (db.job.status == 'd') & \ (db.job.start <= '2010-01-01 10:00:01') job_set = queue.jobs(query=query) self.assertEqual(len(job_set), 2) self.assertEqual( job_set, [all_jobs[1], all_jobs[3]] ) # Test orderby # Orderby priority ASC query = (db.job.status == 'z') job_set = queue.jobs(query=query, orderby=db.job.priority) self.assertEqual(len(job_set), 3) self.assertEqual( job_set, [all_jobs[2], all_jobs[0], all_jobs[4]] ) # Orderby priority DESC query = (db.job.status == 'z') job_set = queue.jobs(query=query, orderby=~db.job.priority) self.assertEqual(len(job_set), 3) self.assertEqual( job_set, [all_jobs[4], all_jobs[0], all_jobs[2]] ) # Test limitby # Highest priority job query = (db.job.status == 'z') job_set = queue.jobs(query=query, orderby=~db.job.priority, limitby=1) self.assertEqual(len(job_set), 1) self.assertEqual(job_set, [all_jobs[4]]) def test__lock(self): queue = Queue(db.job) # Test lock using default lock file. This test only works if the queue # is not currently locked by an outside program. if os.path.exists(queue.lock_filename): os.unlink(queue.lock_filename) self.assertFalse(os.path.exists(queue.lock_filename)) queue.lock() self.assertTrue(os.path.exists(queue.lock_filename)) queue.unlock() self.assertFalse(os.path.exists(queue.lock_filename)) # Test lock with custom filename. lock_file = os.path.join(TMP_DIR, 'test__lock.pid') if os.path.exists(lock_file): os.unlink(lock_file) self.assertFalse(os.path.exists(lock_file)) queue.lock(filename=lock_file) self.assertTrue(os.path.exists(lock_file)) # Test raise QueueLockedError self.assertRaises(QueueLockedError, queue.lock, filename=lock_file) # Test raise QueueLockedExtendedError time.sleep(2) # Lock period < extended seconds, raises QueueLockedError self.assertRaises( QueueLockedError, queue.lock, filename=lock_file, extended_seconds=9999 ) # Lock period > extended seconds, raises QueueLockedExtendedError self.assertRaises( QueueLockedExtendedError, queue.lock, filename=lock_file, extended_seconds=1 ) queue.unlock(filename=lock_file) self.assertFalse(os.path.exists(lock_file)) def test__post_add_job(self): # See test__add_job pass def test__pre_add_job(self): # See test__add_job pass def test__run_job(self): queue = Queue(db.job) def do_run(job): try: queue.run_job(job) except subprocess.CalledProcessError: return 1 else: return 0 job = Job(dict(command=None, status='a')) # No command defined, should fail. self.assertFalse(do_run(job)) tmp_file = os.path.join(TMP_DIR, 'test__run_output.txt') text = 'Hello World!' script = """ #!/usr/bin/env python def main(): import sys with open('{file}', 'w') as f: f.write("{text}") f.write("\\n") for c, arg in enumerate(sys.argv): if c == 0: continue f.write(str(c) + ': ' + arg + "\\n") if __name__ == '__main__': main() """.format(file=tmp_file, text=text) script_name = os.path.join(TMP_DIR, 'test__run.py') with open(script_name, 'w') as f: f.write(script.strip()) os.chmod(script_name, 0o700) # Test without args or options job.command = script_name self.assertEqual(do_run(job), 0) expect = """Hello World! """ got = '' with open(tmp_file, 'r') as f: got = f.read() self.assertEqual(got, expect) # Test with args or options job.command = "{script} -v -a delete 123".format(script=script_name) self.assertEqual(do_run(job), 0) expect = """Hello World! 1: -v 2: -a 3: delete 4: 123 """ got = '' with open(tmp_file, 'r') as f: got = f.read() self.assertEqual(got, expect) def test__set_job_status(self): queue = Queue(db.job) job = self.add(Job, dict(command='pwd', status='d')) self.assertEqual(job.status, 'd') for status in ['a', 'd', 'p']: got = queue.set_job_status(job, status) self.assertEqual(got.status, status) # Invalid status self.assertRaises(InvalidStatusError, queue.set_job_status, job, 'z') def test__stats(self): queue = Queue(db.job) TestQueue.clear_queue() self.assertEqual(len(queue.jobs()), 0) self.add(Job, dict(status='a')) self.add(Job, dict(status='a')) self.add(Job, dict(status='d')) self.add(Job, dict(status='p')) self.assertEqual(queue.stats(), {'a': 2, 'd': 1, 'p': 1}) def test__top_job(self): queue = Queue(db.job) TestQueue.clear_queue() self.assertEqual(len(queue.jobs()), 0) self.assertRaises(QueueEmptyError, queue.top_job) jobs = [ # (command, start, priority) ('do_a', '2010-01-01 10:00:00', 0), ('do_b', '2010-01-01 10:00:01', -1), ('do_c', '2010-01-01 10:00:02', 1), ('do_d', '2999-12-31 23:59:59', 1), ] for j in jobs: self.add(Job, dict(command=j[0], start=j[1], priority=j[2])) job = queue.top_job() self.assertEqual(job.command, 'do_c') def test__unlock(self): # See test__lock() pass class TestQueuer(LocalTestCase): def test____init__(self): queuer = Queuer(db.job) self.assertTrue(queuer) self.assertEqual(queuer.queue_class, Queue) self.assertEqual(Queuer.bin_path, 'applications/zcomx/private/bin') def test__command(self): queuer = SubQueuer(db.job) self.assertEqual( queuer.command(), 'some_program.py -b -c ccc -d d1 -d d2') queuer = SubQueuer(db.job, cli_args=['file', 'arg2']) self.assertEqual( queuer.command(), 'some_program.py -b -c ccc -d d1 -d d2 file arg2' ) # Disable defaults queuer = SubQueuer( db.job, cli_options={ '-a': False, '-b': False, '-c': False, '-d': False, }, cli_args=['file'] ) self.assertEqual(queuer.command(), 'some_program.py file') invalid_cli_options = {'-x': 'invalid'} queuer = SubQueuer(db.job, cli_options=invalid_cli_options) self.assertRaises(InvalidCLIOptionError, queuer.command) # Handle quotes queuer = SubQueuer( db.job, cli_options={ '-a': False, '-b': False, '-c': False, '-d': False, '-e': """A 'B' "C" D""", }, cli_args=['file'], ) self.assertEqual( queuer.command(), 'some_program.py -e \'A \'"\'"\'B\'"\'"\' "C" D\' file' ) queuer = SubQueuer( db.job, cli_options={ '-a': False, '-b': False, '-c': False, }, cli_args=["""A 'B' "C" D"""], ) self.assertEqual( queuer.command(), 'some_program.py -d d1 -d d2 \'A \'"\'"\'B\'"\'"\' "C" D\'' ) def test__job_data(self): then = datetime.datetime.now() data = SubQueuer(db.job).job_data() self.assertEqual(data.job_queuer_id, 0) self.assertEqual(data.status, 'd') self.assertEqual(data.priority, 1) self.assertEqual( data.command, 'some_program.py -b -c ccc -d d1 -d d2' ) self.assertTrue(data.start >= then) diff = data.start - then self.assertTrue(diff.total_seconds() >= 0) self.assertTrue(diff.total_seconds() < 1) self.assertEqual(data.start, data.queued_time) invalid_job_options = {'fake_field': 'value'} queuer = SubQueuer(db.job, job_options=invalid_job_options) self.assertRaises(InvalidJobOptionError, queuer.job_data) # Test delay_seconds then = datetime.datetime.now() data = SubQueuer(db.job, delay_seconds=100).job_data() self.assertTrue(data.start > then) diff = data.start - then self.assertTrue(diff.total_seconds() >= 100) self.assertTrue(diff.total_seconds() < 101) def test__queue(self): def get_job_ids(): return sorted([x.id for x in db(db.job).select(db.job.id)]) job_ids = get_job_ids() queuer = SubQueuer(db.job) new_job = queuer.queue() self.assertEqual( new_job.command, 'some_program.py -b -c ccc -d d1 -d d2') self.assertTrue(new_job.id not in job_ids) job_ids = get_job_ids() self.assertTrue(new_job.id in job_ids) job = Job.from_id(new_job.id) self._objects.append(job) class TestRequeuer(LocalTestCase): def test____init__(self): queuer = SubQueuer(db.job) requeuer = Requeuer(queuer) self.assertTrue(requeuer) self.assertEqual(requeuer.requeues, 0) self.assertEqual(requeuer.max_requeues, 1) def test__requeue(self): sub_queuer = SubQueuer(db.job) requeuer = Requeuer(sub_queuer) self.assertRaises(InvalidCLIOptionError, requeuer.requeue) class ReQueuer(SubQueuer): valid_cli_options = ['-a', '-c', '--requeues', '--max-requeues'] default_cli_options = { '-a': True, '-c': 'ccc', } queuer = ReQueuer(db.job) requeuer = Requeuer(queuer) tracker = TableTracker(db.job) job = requeuer.requeue() self.assertFalse(tracker.had(job)) self.assertTrue(tracker.has(job)) self._objects.append(job) self.assertEqual( job.command, 'some_program.py --max-requeues 1 --requeues 1 -a -c ccc' ) requeuer = Requeuer(queuer, requeues=33, max_requeues=99) tracker = TableTracker(db.job) job = requeuer.requeue() self.assertFalse(tracker.had(job)) self.assertTrue(tracker.has(job)) self._objects.append(job) self.assertEqual( job.command, 'some_program.py --max-requeues 99 --requeues 34 -a -c ccc' ) requeuer = Requeuer(queuer, requeues=99, max_requeues=99) self.assertRaises(StopIteration, requeuer.requeue) requeuer = Requeuer(queuer, requeues=100, max_requeues=99) self.assertRaises(StopIteration, requeuer.requeue) def test__requeue_cli_options(self): requeuer = Requeuer(Queuer(db.job)) self.assertEqual( requeuer.requeue_cli_options(), { '--requeues': 1, '--max-requeues': 1, } ) requeuer = Requeuer(Queuer(db.job), requeues=33, max_requeues=99) self.assertEqual( requeuer.requeue_cli_options(), { '--requeues': 34, '--max-requeues': 99, } ) def setUpModule(): """Set up web2py environment.""" # C0103: *Invalid name "%%s" (should match %%s)* # pylint: disable=C0103 LocalTestCase.set_env(globals()) if __name__ == '__main__': unittest.main()
daemon = Daemon(self.name, self.pid_filename) params = {} daemon.write_pid(params) self.assertEqual(daemon.read_pid(), {}) params = { 'b': '2', 'last name': 'Smith', 'start time': '2002-02-02 13:58:58', 'nothing': '', 'empty_str': '', } daemon.write_pid(params) self.assertEqual(daemon.read_pid(), params)
restmapper_test.go
/** * Copyright (C) 2015 Red Hat, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package discovery_test import ( "reflect" "testing" "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apimachinery/pkg/version" . "k8s.io/client-go/discovery" restclient "k8s.io/client-go/rest" "k8s.io/client-go/rest/fake" "github.com/googleapis/gnostic/OpenAPIv2" "github.com/stretchr/testify/assert" ) func TestRESTMapper(t *testing.T) { resources := []*APIGroupResources{ { Group: metav1.APIGroup{ Name: "extensions", Versions: []metav1.GroupVersionForDiscovery{ {Version: "v1beta"}, }, PreferredVersion: metav1.GroupVersionForDiscovery{Version: "v1beta"}, }, VersionedResources: map[string][]metav1.APIResource{ "v1beta": { {Name: "jobs", Namespaced: true, Kind: "Job"}, {Name: "pods", Namespaced: true, Kind: "Pod"}, }, }, }, { Group: metav1.APIGroup{ Versions: []metav1.GroupVersionForDiscovery{ {Version: "v1"}, {Version: "v2"}, }, PreferredVersion: metav1.GroupVersionForDiscovery{Version: "v1"}, }, VersionedResources: map[string][]metav1.APIResource{ "v1": { {Name: "pods", Namespaced: true, Kind: "Pod"}, }, "v2": { {Name: "pods", Namespaced: true, Kind: "Pod"}, }, }, }, // This group tests finding and prioritizing resources that only exist in non-preferred versions { Group: metav1.APIGroup{ Name: "unpreferred", Versions: []metav1.GroupVersionForDiscovery{ {Version: "v1"}, {Version: "v2beta1"}, {Version: "v2alpha1"}, }, PreferredVersion: metav1.GroupVersionForDiscovery{Version: "v1"}, }, VersionedResources: map[string][]metav1.APIResource{ "v1": { {Name: "broccoli", Namespaced: true, Kind: "Broccoli"}, }, "v2beta1": { {Name: "broccoli", Namespaced: true, Kind: "Broccoli"}, {Name: "peas", Namespaced: true, Kind: "Pea"}, }, "v2alpha1": { {Name: "broccoli", Namespaced: true, Kind: "Broccoli"}, {Name: "peas", Namespaced: true, Kind: "Pea"}, }, }, }, } restMapper := NewRESTMapper(resources, nil) kindTCs := []struct { input schema.GroupVersionResource want schema.GroupVersionKind }{ { input: schema.GroupVersionResource{ Resource: "pods", }, want: schema.GroupVersionKind{ Version: "v1", Kind: "Pod", }, }, { input: schema.GroupVersionResource{ Version: "v1", Resource: "pods", }, want: schema.GroupVersionKind{ Version: "v1", Kind: "Pod", }, }, { input: schema.GroupVersionResource{ Version: "v2", Resource: "pods", }, want: schema.GroupVersionKind{ Version: "v2", Kind: "Pod", }, }, { input: schema.GroupVersionResource{ Resource: "pods", }, want: schema.GroupVersionKind{ Version: "v1", Kind: "Pod", }, }, { input: schema.GroupVersionResource{ Resource: "jobs", }, want: schema.GroupVersionKind{ Group: "extensions", Version: "v1beta", Kind: "Job", }, }, { input: schema.GroupVersionResource{ Resource: "peas", }, want: schema.GroupVersionKind{ Group: "unpreferred", Version: "v2beta1", Kind: "Pea", }, }, } for _, tc := range kindTCs { got, err := restMapper.KindFor(tc.input) if err != nil { t.Errorf("KindFor(%#v) unexpected error: %v", tc.input, err) continue } if !reflect.DeepEqual(got, tc.want) { t.Errorf("KindFor(%#v) = %#v, want %#v", tc.input, got, tc.want) } } resourceTCs := []struct { input schema.GroupVersionResource want schema.GroupVersionResource }{ { input: schema.GroupVersionResource{ Resource: "pods", }, want: schema.GroupVersionResource{ Version: "v1", Resource: "pods", }, }, { input: schema.GroupVersionResource{ Version: "v1", Resource: "pods", }, want: schema.GroupVersionResource{ Version: "v1", Resource: "pods", }, }, { input: schema.GroupVersionResource{ Version: "v2", Resource: "pods", }, want: schema.GroupVersionResource{ Version: "v2", Resource: "pods", }, }, { input: schema.GroupVersionResource{ Resource: "pods", }, want: schema.GroupVersionResource{ Version: "v1", Resource: "pods", }, }, { input: schema.GroupVersionResource{ Resource: "jobs", }, want: schema.GroupVersionResource{ Group: "extensions", Version: "v1beta", Resource: "jobs", }, }, } for _, tc := range resourceTCs { got, err := restMapper.ResourceFor(tc.input) if err != nil { t.Errorf("ResourceFor(%#v) unexpected error: %v", tc.input, err) continue } if !reflect.DeepEqual(got, tc.want) { t.Errorf("ResourceFor(%#v) = %#v, want %#v", tc.input, got, tc.want) } } }
func TestDeferredDiscoveryRESTMapper_CacheMiss(t *testing.T) { assert := assert.New(t) cdc := fakeCachedDiscoveryInterface{fresh: false} m := NewDeferredDiscoveryRESTMapper(&cdc, nil) assert.False(cdc.fresh, "should NOT be fresh after instantiation") assert.Zero(cdc.invalidateCalls, "should not have called Invalidate()") gvk, err := m.KindFor(schema.GroupVersionResource{ Group: "a", Version: "v1", Resource: "foo", }) assert.NoError(err) assert.True(cdc.fresh, "should be fresh after a cache-miss") assert.Equal(cdc.invalidateCalls, 1, "should have called Invalidate() once") assert.Equal(gvk.Kind, "Foo") gvk, err = m.KindFor(schema.GroupVersionResource{ Group: "a", Version: "v1", Resource: "foo", }) assert.NoError(err) assert.Equal(cdc.invalidateCalls, 1, "should NOT have called Invalidate() again") gvk, err = m.KindFor(schema.GroupVersionResource{ Group: "a", Version: "v1", Resource: "bar", }) assert.Error(err) assert.Equal(cdc.invalidateCalls, 1, "should NOT have called Invalidate() again after another cache-miss, but with fresh==true") cdc.fresh = false gvk, err = m.KindFor(schema.GroupVersionResource{ Group: "a", Version: "v1", Resource: "bar", }) assert.Error(err) assert.Equal(cdc.invalidateCalls, 2, "should HAVE called Invalidate() again after another cache-miss, but with fresh==false") } type fakeCachedDiscoveryInterface struct { invalidateCalls int fresh bool enabledA bool } var _ CachedDiscoveryInterface = &fakeCachedDiscoveryInterface{} func (c *fakeCachedDiscoveryInterface) Fresh() bool { return c.fresh } func (c *fakeCachedDiscoveryInterface) Invalidate() { c.invalidateCalls = c.invalidateCalls + 1 c.fresh = true c.enabledA = true } func (c *fakeCachedDiscoveryInterface) RESTClient() restclient.Interface { return &fake.RESTClient{} } func (c *fakeCachedDiscoveryInterface) ServerGroups() (*metav1.APIGroupList, error) { if c.enabledA { return &metav1.APIGroupList{ Groups: []metav1.APIGroup{ { Name: "a", Versions: []metav1.GroupVersionForDiscovery{ { GroupVersion: "a/v1", Version: "v1", }, }, PreferredVersion: metav1.GroupVersionForDiscovery{ GroupVersion: "a/v1", Version: "v1", }, }, }, }, nil } return &metav1.APIGroupList{}, nil } func (c *fakeCachedDiscoveryInterface) ServerResourcesForGroupVersion(groupVersion string) (*metav1.APIResourceList, error) { if c.enabledA && groupVersion == "a/v1" { return &metav1.APIResourceList{ GroupVersion: "a/v1", APIResources: []metav1.APIResource{ { Name: "foo", Kind: "Foo", Namespaced: false, }, }, }, nil } return nil, errors.NewNotFound(schema.GroupResource{}, "") } func (c *fakeCachedDiscoveryInterface) ServerResources() ([]*metav1.APIResourceList, error) { if c.enabledA { av1, _ := c.ServerResourcesForGroupVersion("a/v1") return []*metav1.APIResourceList{av1}, nil } return []*metav1.APIResourceList{}, nil } func (c *fakeCachedDiscoveryInterface) ServerPreferredResources() ([]*metav1.APIResourceList, error) { if c.enabledA { return []*metav1.APIResourceList{ { GroupVersion: "a/v1", APIResources: []metav1.APIResource{ { Name: "foo", Kind: "Foo", Verbs: []string{}, }, }, }, }, nil } return nil, nil } func (c *fakeCachedDiscoveryInterface) ServerPreferredNamespacedResources() ([]*metav1.APIResourceList, error) { return nil, nil } func (c *fakeCachedDiscoveryInterface) ServerVersion() (*version.Info, error) { return &version.Info{}, nil } func (c *fakeCachedDiscoveryInterface) OpenAPISchema() (*openapi_v2.Document, error) { return &openapi_v2.Document{}, nil }
10. regular-expression-match.py
''' A: suffix solution 1. subproblems: define dp(i, j) = is_match(s[i:], p[j:]), suffix 2. guess, 2.1 the current char in p is a '*' - use '*', repeat the char before it - do not use '*', skip to next char after '*' 2.2 current char in s and p are match, s[i] == p[j] or p[j] == '.' 3. relate subproblems: dp(i, j) = match(s[i:], s[j:]) dp(i, j) = a. if j + 1 is in bound and p[j + 1] == '*', then dp(i, j + 2) or (s[i] = p[j] or p[j] = '.' and dp(i + 1, j)) b. if s[i] == p[j] or p[j] == '.', then dp(i + 1, j + 1) c. esle false B: prefix solution 1. subproblems: define dp(i, j) = is_match(s[:i], p[:j]), prefix 2. guess, 2.1 current char in s and p are match, s[i] == p[j] or p[j] == '.' 2.2 the current char in p is a '*' - use '*', repeat the char before it - do not use '*', skip to next char after '*' 3. relate subproblems: dp(i, j) = match(s[:i], s[:j]) dp(i, j) = a. if s[i] == p[j] or p[j] == '.', then dp(i - 1, j - 1) b. if p[j] == '*', then dp(i, j - 2) or (s[i] = p[j - 1] or p[j - 1] = '.' and dp(i - 1, j)) c. else false reference: 1. https://www.youtube.com/watch?v=HAA8mgxlov8 (use * or no use) 2. https://www.youtube.com/watch?v=l3hda49XcDE (dp solution) ''' class Solution: def isMatch(self, s: str, p: str) -> bool: # Somtimes there still matches even s is out of bound, but p is still in bound(s:a, p: a*b*). # But if p is out of bound, then we must return false # return self.dfs_td(s, p, 0, 0, {}) # return self.dfs_prefix(s, p, len(s) - 1, len(p) - 1) # return self.dp_bottome_up_prefix(s, p) return self.dp_bottom_up_suffix(s, p) # top down, dfs + memorization, suffix def dfs_suffix(self, s, p, i, j, memo): # base case # if both i and j are out of boud, then we found our solution if (i, j) in memo: return memo[(i, j)] if i >= len(s) and j >= len(p): return True # if i is in bound, but j is out of bound, return false. if j >= len(p): return False # 注意括号的顺序, 在i没有超出数组下标的范围的情况下, 判断是否有匹配 match = i < len(s) and (s[i] == p[j] or p[j] == '.') # if the next character in p is a star(need to prevent the j + 1 go byond the bounday) if j + 1 < len(p) and p[j + 1] == '*': # either repeating the current character in p and move to the next character in s
# or no repeating in p and skip to next character in p memo[(i, j)] = (match and self.dfs_td(s, p, i + 1, j, memo)) or self.dfs_td(s, p, i, j + 2, memo) return memo[(i, j)] # if it is not a star but a match found in the current index of s and p if match: memo[(i, j)] = self.dfs_td(s, p, i + 1, j + 1, memo) return memo[(i, j)] # if no a match and next character is not star memo[(i, j)] = False return False # bottom up solution, suffix. def dp_bottom_up_suffix(self, s, p): s_len = len(s) p_len = len(p) dp = [[False for col in range(p_len + 1)] for row in range(s_len + 1)] dp[s_len][p_len] = True # deal with the case like a*b*c* for the last row for j in range(p_len - 2, -1, -1): if p[j + 1] == '*': dp[s_len][j] = dp[s_len][j + 2] for i in range(s_len - 1, -1, -1): for j in range(p_len - 1, -1, -1): # for suffix, checking '*' goes first. if j <= p_len - 2 and p[j + 1] == '*': if s[i] == p[j] or p[j] == '.': dp[i][j] = dp[i + 1][j] dp[i][j] = (dp[i][j] or dp[i][j + 2]) continue if s[i] == p[j] or p[j] == '.': dp[i][j] = dp[i + 1][j + 1] for i in dp: print(i) print() return dp[0][0] # top down solution, start at (n, n) def dfs_prefix(self, s, p, i, j): # base case if i < 0 and j < 0: return True # if i is in bound, but j is out of bound, return false. if j < 0: return False # if the current char is a star if j >= 0 and p[j] == '*': # check if there is a match of the current char in s and previous char in p(before *) match = (i >= 0) and (s[i] == p[j - 1] or p[j - 1] == '.') # if current charts match, then go dp(i-1, j), if no match, go check dp(i, j-2) return (match and self.dfs_prefix(s, p, i - 1, j)) or self.dfs_prefix(s, p, i, j - 2) # if there is a match of the current char in s and p if i >= 0 and (s[i] == p[j] or p[j] == '.'): return self.dfs_prefix(s, p, i - 1, j - 1) return False # bottom up algorithm, start from dp(0,0) -> dp(n, n) def dp_bottome_up_prefix(self, s, p): s_len, p_len = len(s), len(p) dp = [[False for col in range(p_len + 1)] for row in range(s_len + 1)] dp[0][0] = True # handle the pattern like a*, a*b* or a*b*c* for the 0th row for j in range(1, p_len + 1): if p[j - 1] == '*': dp[0][j] = dp[0][j - 2] for i in range(1, s_len + 1): for j in range(1, p_len + 1): if s[i - 1] == p[j - 1] or p[j - 1] == '.': dp[i][j] = dp[i - 1][j - 1] continue if p[j - 1] == '*': if s[i - 1] == p[j - 2] or p[j - 2] == '.': dp[i][j] = dp[i - 1][j] dp[i][j] = (dp[i][j] or dp[i][j - 2]) for i in dp: print(i) print() return dp[s_len][p_len] s = 'aab' p = 'c*a*b' # s = 'aaa' # p = 'aaaa' # s = "a" # p = ".*..a*" s = 'aa' p = 'a*' sol = Solution() print(sol.isMatch(s, p)) x = 'abc' print(x[1:1])
_svm.py
from sklearn.svm import LinearSVC import numpy as np def apply_threshold(data, threshold): data[np.where(np.abs(data) < threshold)] = 0 def train_one(data, loss, C, verbose, max_iter, threshold, dual, tol): def _get_features(obj): # Index samples iff they are required # Helful in reducing memory footprint if obj['ind'] is None: return obj['data'] else:
return obj['data'].take(obj['ind'], axis=0) X, y = _get_features(data), data['Y'] clf = LinearSVC(tol=tol, loss=loss, dual=dual, C=C, multi_class='ovr', fit_intercept=True, intercept_scaling=1, class_weight=None, verbose=verbose, random_state=0, max_iter=max_iter) try: clf.fit(X, y) weight, bias = clf.coef_, clf.intercept_ except ValueError: # TODO Find a solution for this; choose randomly may be? weight, bias = np.zeros((1, X.shape[1]), dtype=np.float32), np.zeros( (1), dtype=np.float32) del clf apply_threshold(weight, threshold) return weight, bias
main_multi_gpu_distill.py
# Copyright (c) 2021 PPViT Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """PiT train and eval using multiple GPU without teacher model and distillation""" import sys import os import time import argparse import random import math import numpy as np import paddle from datasets import get_dataloader from datasets import get_dataset from config import get_config from config import update_config from utils import AverageMeter from utils import get_logger from utils import write_log from utils import all_reduce_mean from utils import skip_weight_decay_fn from mixup import Mixup from model_ema import ModelEma from losses import LabelSmoothingCrossEntropyLoss from losses import SoftTargetCrossEntropyLoss from losses import DistillationLoss from regnet import build_regnet as build_teacher_model from pit import build_pit as build_model def get_arguments(): """return argumeents, this will overwrite the config by (1) yaml file (2) argument values""" parser = argparse.ArgumentParser('PiT') parser.add_argument('-cfg', type=str, default=None) parser.add_argument('-dataset', type=str, default=None) parser.add_argument('-data_path', type=str, default=None) parser.add_argument('-output', type=str, default=None) parser.add_argument('-batch_size', type=int, default=None) parser.add_argument('-batch_size_eval', type=int, default=None) parser.add_argument('-image_size', type=int, default=None) parser.add_argument('-accum_iter', type=int, default=None) parser.add_argument('-pretrained', type=str, default=None) parser.add_argument('-teacher_model_path', type=str, default=None) parser.add_argument('-resume', type=str, default=None) parser.add_argument('-last_epoch', type=int, default=None) parser.add_argument('-eval', action='store_true') parser.add_argument('-amp', action='store_true') arguments = parser.parse_args() return arguments def train(dataloader, model, optimizer, criterion, epoch, total_epochs, total_batches, debug_steps=100, accum_iter=1, model_ema=None, mixup_fn=None, amp_grad_scaler=None, local_logger=None, master_logger=None): """Training for one epoch Args: dataloader: paddle.io.DataLoader, dataloader instance model: nn.Layer, a ViT model optimizer: nn.optimizer criterion: nn.XXLoss epoch: int, current epoch total_epochs: int, total num of epochs total_batches: int, total num of batches for one epoch debug_steps: int, num of iters to log info, default: 100 accum_iter: int, num of iters for accumulating gradients, default: 1 model_ema: ModelEma, model moving average instance mixup_fn: Mixup, mixup instance, default: None amp_grad_scaler: GradScaler, if not None pass the GradScaler and enable AMP, default: None local_logger: logger for local process/gpu, default: None master_logger: logger for main process, default: None Returns: train_loss_meter.avg: float, average loss on current process/gpu train_acc_meter.avg: float, average acc@1 on current process/gpu master_loss_meter.avg: float, average loss on all processes/gpus master_acc_meter.avg: float, average acc@1 on all processes/gpus train_time: float, training time """ time_st = time.time() train_loss_meter = AverageMeter() train_acc_meter = AverageMeter() master_loss_meter = AverageMeter() master_acc_meter = AverageMeter() model.train() optimizer.clear_grad() for batch_id, data in enumerate(dataloader): # get data images = data[0] label = data[1] label_orig = label.clone() batch_size = images.shape[0] if mixup_fn is not None: images, label = mixup_fn(images, label_orig) # forward with paddle.amp.auto_cast(amp_grad_scaler is not None): output = model(images) loss = criterion(images, output, label) loss_value = loss.item() if not math.isfinite(loss_value): print("Loss is {}, stopping training".format(loss_value)) sys.exit(1) loss = loss / accum_iter # backward and step if amp_grad_scaler is None: # fp32 loss.backward() if ((batch_id + 1) % accum_iter == 0) or (batch_id + 1 == len(dataloader)): optimizer.step() optimizer.clear_grad() else: # amp scaled_loss = amp_grad_scaler.scale(loss) scaled_loss.backward() if ((batch_id + 1) % accum_iter == 0) or (batch_id + 1 == len(dataloader)): # amp for param group reference: https://github.com/PaddlePaddle/Paddle/issues/37188 amp_grad_scaler.step(optimizer) amp_grad_scaler.update() optimizer.clear_grad() if model_ema is not None and paddle.distributed.get_rank() == 0: model_ema.update(model) # average of output and kd_output, same as eval mode pred = paddle.nn.functional.softmax((output[0] + output[1]) / 2) acc = paddle.metric.accuracy(pred, label_orig if mixup_fn else label_orig.unsqueeze(1)).item() # sync from other gpus for overall loss and acc
master_loss_meter.update(master_loss, master_batch_size) master_acc_meter.update(master_acc, master_batch_size) train_loss_meter.update(loss_value, batch_size) train_acc_meter.update(acc, batch_size) if batch_id % debug_steps == 0 or batch_id + 1 == len(dataloader): general_message = (f"Epoch[{epoch:03d}/{total_epochs:03d}], " f"Step[{batch_id:04d}/{total_batches:04d}], " f"Lr: {optimizer.get_lr():04f}, ") local_message = (general_message + f"Loss: {loss_value:.4f} ({train_loss_meter.avg:.4f}), " f"Avg Acc: {train_acc_meter.avg:.4f}") master_message = (general_message + f"Loss: {master_loss:.4f} ({master_loss_meter.avg:.4f}), " f"Avg Acc: {master_acc_meter.avg:.4f}") write_log(local_logger, master_logger, local_message, master_message) paddle.distributed.barrier() train_time = time.time() - time_st return (train_loss_meter.avg, train_acc_meter.avg, master_loss_meter.avg, master_acc_meter.avg, train_time) @paddle.no_grad() def validate(dataloader, model, criterion, total_batches, debug_steps=100, local_logger=None, master_logger=None): """Validation for the whole dataset Args: dataloader: paddle.io.DataLoader, dataloader instance model: nn.Layer, a ViT model total_batches: int, total num of batches for one epoch debug_steps: int, num of iters to log info, default: 100 local_logger: logger for local process/gpu, default: None master_logger: logger for main process, default: None Returns: val_loss_meter.avg: float, average loss on current process/gpu val_acc1_meter.avg: float, average top1 accuracy on current processes/gpus val_acc5_meter.avg: float, average top5 accuracy on current processes/gpus master_loss_meter.avg: float, average loss on all processes/gpus master_acc1_meter.avg: float, average top1 accuracy on all processes/gpus master_acc5_meter.avg: float, average top5 accuracy on all processes/gpus val_time: float, validation time """ model.eval() val_loss_meter = AverageMeter() val_acc1_meter = AverageMeter() val_acc5_meter = AverageMeter() master_loss_meter = AverageMeter() master_acc1_meter = AverageMeter() master_acc5_meter = AverageMeter() time_st = time.time() for batch_id, data in enumerate(dataloader): # get data images = data[0] label = data[1] batch_size = images.shape[0] output = model(images) loss = criterion(output, label) loss_value = loss.item() pred = paddle.nn.functional.softmax(output) acc1 = paddle.metric.accuracy(pred, label.unsqueeze(1)).item() acc5 = paddle.metric.accuracy(pred, label.unsqueeze(1), k=5).item() # sync from other gpus for overall loss and acc master_loss = all_reduce_mean(loss_value) master_acc1 = all_reduce_mean(acc1) master_acc5 = all_reduce_mean(acc5) master_batch_size = all_reduce_mean(batch_size) master_loss_meter.update(master_loss, master_batch_size) master_acc1_meter.update(master_acc1, master_batch_size) master_acc5_meter.update(master_acc5, master_batch_size) val_loss_meter.update(loss_value, batch_size) val_acc1_meter.update(acc1, batch_size) val_acc5_meter.update(acc5, batch_size) if batch_id % debug_steps == 0: local_message = (f"Step[{batch_id:04d}/{total_batches:04d}], " f"Avg Loss: {val_loss_meter.avg:.4f}, " f"Avg Acc@1: {val_acc1_meter.avg:.4f}, " f"Avg Acc@5: {val_acc5_meter.avg:.4f}") master_message = (f"Step[{batch_id:04d}/{total_batches:04d}], " f"Avg Loss: {master_loss_meter.avg:.4f}, " f"Avg Acc@1: {master_acc1_meter.avg:.4f}, " f"Avg Acc@5: {master_acc5_meter.avg:.4f}") write_log(local_logger, master_logger, local_message, master_message) paddle.distributed.barrier() val_time = time.time() - time_st return (val_loss_meter.avg, val_acc1_meter.avg, val_acc5_meter.avg, master_loss_meter.avg, master_acc1_meter.avg, master_acc5_meter.avg, val_time) def main_worker(*args): """main method for each process""" # STEP 0: Preparation paddle.device.set_device('gpu') paddle.distributed.init_parallel_env() world_size = paddle.distributed.get_world_size() local_rank = paddle.distributed.get_rank() config = args[0] last_epoch = config.TRAIN.LAST_EPOCH seed = config.SEED + local_rank paddle.seed(seed) np.random.seed(seed) random.seed(seed) local_logger, master_logger = get_logger(config.SAVE) message = (f'----- world_size = {world_size}, local_rank = {local_rank} \n' f'----- {config}') write_log(local_logger, master_logger, message) # STEP 1: Create model model = build_model(config) # define model ema model_ema = None if not config.EVAL and config.TRAIN.MODEL_EMA and local_rank == 0: model_ema = ModelEma(model, decay=config.TRAIN.MODEL_EMA_DECAY) if config.TRAIN.MODEL_EMA_FORCE_CPU: model_ema.to('cpu') # STEP 2: Create train and val dataloader if not config.EVAL: dataset_train = args[1] dataloader_train = get_dataloader(config, dataset_train, True, True) total_batch_train = len(dataloader_train) message = f'----- Total # of train batch (single gpu): {total_batch_train}' write_log(local_logger, master_logger, message) dataset_val = args[2] dataloader_val = get_dataloader(config, dataset_val, False, True) total_batch_val = len(dataloader_val) message = f'----- Total # of val batch (single gpu): {total_batch_val}' write_log(local_logger, master_logger, message) # STEP 3: (Optional) Define Mixup function mixup_fn = None if (config.TRAIN.MIXUP_PROB > 0 or config.TRAIN.CUTMIX_ALPHA > 0 or config.TRAIN.CUTMIX_MINMAX is not None): mixup_fn = Mixup(mixup_alpha=config.TRAIN.MIXUP_ALPHA, cutmix_alpha=config.TRAIN.CUTMIX_ALPHA, cutmix_minmax=config.TRAIN.CUTMIX_MINMAX, prob=config.TRAIN.MIXUP_PROB, switch_prob=config.TRAIN.MIXUP_SWITCH_PROB, mode=config.TRAIN.MIXUP_MODE, label_smoothing=config.TRAIN.SMOOTHING)# # STEP 4: Define loss/criterion if mixup_fn is not None: criterion = SoftTargetCrossEntropyLoss() elif config.TRAIN.SMOOTHING: criterion = LabelSmoothingCrossEntropyLoss() else: criterion = paddle.nn.CrossEntropyLoss() # Use CrossEntropyLoss for val criterion_val = paddle.nn.CrossEntropyLoss() # STEP 5: Create Teacher model and distill loss teacher_model = None if not config.EVAL: if config.TRAIN.DISTILLATION_TYPE != 'none': write_log(local_logger, master_logger, f'----- Load teacher model: {config.TRAIN.TEACHER_MODEL}') teacher_model = build_teacher_model() assert os.path.isfile(config.TRAIN.TEACHER_MODEL) teacher_model_state = paddle.load(config.TRAIN.TEACHER_MODEL) teacher_model.set_state_dict(teacher_model_state) teacher_model.eval() teacher_model = paddle.DataParallel(teacher_model) # wrap the criterion: criterion = DistillationLoss(criterion, teacher_model, config.TRAIN.DISTILLATION_TYPE, config.TRAIN.DISTILLATION_ALPHA, config.TRAIN.DISTILLATION_TAU) # STEP 5: Define optimizer and lr_scheduler if not config.EVAL: # set lr according to batch size and world size if config.TRAIN.LINEAR_SCALED_LR is not None: effective_batch_size = config.DATA.BATCH_SIZE * config.TRAIN.ACCUM_ITER * world_size config.TRAIN.BASE_LR = ( config.TRAIN.BASE_LR * effective_batch_size / config.TRAIN.LINEAR_SCALED_LR ) config.TRAIN.WARMUP_START_LR = ( config.TRAIN.WARMUP_START_LR* effective_batch_size / config.TRAIN.LINEAR_SCALED_LR ) config.TRAIN.END_LR = ( config.TRAIN.END_LR * effective_batch_size / config.TRAIN.LINEAR_SCALED_LR ) message = (f'Base lr is scaled to: {config.TRAIN.BASE_LR}, ' f'warmup start lr is scaled to: {config.TRAIN.WARMUP_START_LR}, ' f'end lr is scaled to: {config.TRAIN.BASE_LR}') write_log(local_logger, master_logger, message) # define scaler for amp training amp_grad_scaler = paddle.amp.GradScaler() if config.AMP else None # warmup + cosine lr scheduler if config.TRAIN.WARMUP_EPOCHS > 0: cosine_lr_scheduler = paddle.optimizer.lr.CosineAnnealingDecay( learning_rate=config.TRAIN.BASE_LR, T_max=config.TRAIN.NUM_EPOCHS - config.TRAIN.WARMUP_EPOCHS, eta_min=config.TRAIN.END_LR, last_epoch=-1) # do not set last epoch, handled in warmup sched get_lr() lr_scheduler = paddle.optimizer.lr.LinearWarmup( learning_rate=cosine_lr_scheduler, # use cosine lr sched after warmup warmup_steps=config.TRAIN.WARMUP_EPOCHS, # only support position integet start_lr=config.TRAIN.WARMUP_START_LR, end_lr=config.TRAIN.BASE_LR, last_epoch=config.TRAIN.LAST_EPOCH) else: lr_scheduler = paddle.optimizer.lr.CosineAnnealingDecay( learning_rate=config.TRAIN.BASE_LR, T_max=config.TRAIN.NUM_EPOCHS, eta_min=config.TRAIN.END_LR, last_epoch=config.TRAIN.LAST_EPOCH) # set gradient clip if config.TRAIN.GRAD_CLIP: clip = paddle.nn.ClipGradByGlobalNorm(config.TRAIN.GRAD_CLIP) else: clip = None # set optimizer optimizer = paddle.optimizer.AdamW( parameters=model.parameters(), learning_rate=lr_scheduler, # set to scheduler beta1=config.TRAIN.OPTIMIZER.BETAS[0], beta2=config.TRAIN.OPTIMIZER.BETAS[1], weight_decay=config.TRAIN.WEIGHT_DECAY, epsilon=config.TRAIN.OPTIMIZER.EPS, grad_clip=clip, apply_decay_param_fun=skip_weight_decay_fn( model, # skip bn and bias ['pos_embed', 'cls_token', 'dist_token']), # skip custom ops ) # STEP 6: (Optional) Load pretrained model weights for evaluation or finetuning if config.MODEL.PRETRAINED: assert os.path.isfile(config.MODEL.PRETRAINED) is True model_state = paddle.load(config.MODEL.PRETRAINED) if 'model' in model_state: # load state_dict with multi items: model, optimier, and epoch # pretrain only load model weight, opt and epoch are ignored if 'model_ema' in model_state: model_state = model_state['model_ema'] else: model_state = model_state['model'] model.set_state_dict(model_state) message = f"----- Pretrained: Load model state from {config.MODEL.PRETRAINED}" write_log(local_logger, master_logger, message) # STEP 7: (Optional) Load model weights and status for resume training if config.MODEL.RESUME: assert os.path.isfile(config.MODEL.RESUME) is True model_state = paddle.load(config.MODEL.RESUME) if 'model' in model_state: # load state_dict with multi items: model, optimier, and epoch model.set_state_dict(model_state['model']) if 'optimizer' in model_state: optimizer.set_state_dict(model_state['optimizer']) if 'epoch' in model_state: config.TRAIN.LAST_EPOCH = model_state['epoch'] last_epoch = model_state['epoch'] if 'lr_scheduler' in model_state: lr_scheduler.set_state_dict(model_state['lr_scheduler']) if 'amp_grad_scaler' in model_state and amp_grad_scaler is not None: amp_grad_scaler.load_state_dict(model_state['amp_grad_scaler']) if config.TRAIN.MODEL_EMA and local_rank == 0: model_ema.module.set_state_dict(model_state['model_ema']) lr_scheduler.step(last_epoch + 1) message = (f"----- Resume Training: Load model from {config.MODEL.RESUME}, w/t " f"opt = [{'optimizer' in model_state}], " f"lr_scheduler = [{'lr_scheduler' in model_state}], " f"model_ema = [{'model_ema' in model_state}], " f"epoch = [{model_state.get('epoch', -1)}], " f"amp_grad_scaler = [{'amp_grad_scaler' in model_state}]") write_log(local_logger, master_logger, message) else: # direct load pdparams without other items message = f"----- Resume Training: Load {config.MODEL.RESUME}, w/o opt/epoch/scaler" write_log(local_logger, master_logger, message, 'warning') model.set_state_dict(model_state) lr_scheduler.step(last_epoch + 1) # STEP 8: Enable model data parallelism on multi processes model = paddle.DataParallel(model) # STEP 9: (Optional) Run evaluation and return if config.EVAL: write_log(local_logger, master_logger, "----- Start Validation") val_loss, val_acc1, val_acc5, avg_loss, avg_acc1, avg_acc5, val_time = validate( dataloader=dataloader_val, model=model, criterion=criterion_val, total_batches=total_batch_val, debug_steps=config.REPORT_FREQ, local_logger=local_logger, master_logger=master_logger) local_message = ("----- Validation: " + f"Validation Loss: {val_loss:.4f}, " + f"Validation Acc@1: {val_acc1:.4f}, " + f"Validation Acc@5: {val_acc5:.4f}, " + f"time: {val_time:.2f}") master_message = ("----- Validation: " + f"Validation Loss: {avg_loss:.4f}, " + f"Validation Acc@1: {avg_acc1:.4f}, " + f"Validation Acc@5: {avg_acc5:.4f}, " + f"time: {val_time:.2f}") write_log(local_logger, master_logger, local_message, master_message) return # STEP 10: Run training write_log(local_logger, master_logger, f"----- Start training from epoch {last_epoch+1}.") for epoch in range(last_epoch + 1, config.TRAIN.NUM_EPOCHS + 1): # Train one epoch write_log(local_logger, master_logger, f"Train epoch {epoch}. LR={optimizer.get_lr():.6e}") train_loss, train_acc, avg_loss, avg_acc, train_time = train( dataloader=dataloader_train, model=model, optimizer=optimizer, criterion=criterion, epoch=epoch, total_epochs=config.TRAIN.NUM_EPOCHS, total_batches=total_batch_train, debug_steps=config.REPORT_FREQ, accum_iter=config.TRAIN.ACCUM_ITER, model_ema=model_ema, mixup_fn=mixup_fn, amp_grad_scaler=amp_grad_scaler, local_logger=local_logger, master_logger=master_logger) # update lr lr_scheduler.step() general_message = (f"----- Epoch[{epoch:03d}/{config.TRAIN.NUM_EPOCHS:03d}], " f"Lr: {optimizer.get_lr():.4f}, " f"time: {train_time:.2f}, ") local_message = (general_message + f"Train Loss: {train_loss:.4f}, " f"Train Acc: {train_acc:.4f}") master_message = (general_message + f"Train Loss: {avg_loss:.4f}, " f"Train Acc: {avg_acc:.4f}") write_log(local_logger, master_logger, local_message, master_message) # Evaluation (optional) if epoch % config.VALIDATE_FREQ == 0 or epoch == config.TRAIN.NUM_EPOCHS: write_log(local_logger, master_logger, f'----- Validation after Epoch: {epoch}') val_loss, val_acc1, val_acc5, avg_loss, avg_acc1, avg_acc5, val_time = validate( dataloader=dataloader_val, model=model, criterion=criterion_val, total_batches=total_batch_val, debug_steps=config.REPORT_FREQ, local_logger=local_logger, master_logger=master_logger) local_message = (f"----- Epoch[{epoch:03d}/{config.TRAIN.NUM_EPOCHS:03d}], " + f"Validation Loss: {val_loss:.4f}, " + f"Validation Acc@1: {val_acc1:.4f}, " + f"Validation Acc@5: {val_acc5:.4f}, " + f"time: {val_time:.2f}") master_message = (f"----- Epoch[{epoch:03d}/{config.TRAIN.NUM_EPOCHS:03d}], " + f"Validation Loss: {avg_loss:.4f}, " + f"Validation Acc@1: {avg_acc1:.4f}, " + f"Validation Acc@5: {avg_acc5:.4f}, " + f"time: {val_time:.2f}") write_log(local_logger, master_logger, local_message, master_message) # Save model weights and training status if local_rank == 0: if epoch % config.SAVE_FREQ == 0 or epoch == config.TRAIN.NUM_EPOCHS: model_path = os.path.join( config.SAVE, f"Epoch-{epoch}-Loss-{avg_loss}.pdparams") state_dict = dict() state_dict['model'] = model.state_dict() if model_ema is not None: state_dict['model_ema'] = model_ema.state_dict() state_dict['optimizer'] = optimizer.state_dict() state_dict['epoch'] = epoch if lr_scheduler is not None: state_dict['lr_scheduler'] = lr_scheduler.state_dict() if amp_grad_scaler is not None: state_dict['amp_grad_scaler'] = amp_grad_scaler.state_dict() paddle.save(state_dict, model_path) message = (f"----- Save model: {model_path}") write_log(local_logger, master_logger, message) def main(): # config is updated in order: (1) default in config.py, (2) yaml file, (3) arguments config = update_config(get_config(), get_arguments()) # set output folder config.SAVE = os.path.join(config.SAVE, f"{'eval' if config.EVAL else 'train'}-{time.strftime('%Y%m%d-%H-%M')}") if not os.path.exists(config.SAVE): os.makedirs(config.SAVE, exist_ok=True) # get train dataset if in train mode and val dataset dataset_train = get_dataset(config, is_train=True) if not config.EVAL else None dataset_val = get_dataset(config, is_train=False) # dist spawn lunch: use CUDA_VISIBLE_DEVICES to set available gpus paddle.distributed.spawn(main_worker, args=(config, dataset_train, dataset_val)) if __name__ == "__main__": main()
master_loss = all_reduce_mean(loss_value) master_acc = all_reduce_mean(acc) master_batch_size = all_reduce_mean(batch_size)
expression_evaluator.rs
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use common_base::tokio; use common_datablocks::DataBlock; use common_datavalues::Column; use common_datavalues::DataField; use common_datavalues::DataValue; use common_datavalues::Int32Type; use common_datavalues::Int64Type; use common_datavalues::PrimitiveColumn; use common_datavalues::StringType; use common_exception::Result; use common_planners::Expression; use databend_query::common::ExpressionEvaluator; use crate::tests::create_query_context; #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_expression_evaluator() -> Result<()>
{ // A expression which contains Unary, Binary, ScalarFunction, Column, Literal and Cast: // // CAST(-POW(a + b, 2) AS STRING) let expr = Expression::Cast { expr: Box::new(Expression::UnaryExpression { op: "negate".to_string(), expr: Box::new(Expression::ScalarFunction { op: "pow".to_string(), args: vec![ Expression::BinaryExpression { left: Box::new(Expression::Column("a".to_string())), op: "+".to_string(), right: Box::new(Expression::Column("b".to_string())), }, Expression::Literal { value: DataValue::Int64(2), column_name: None, data_type: Int64Type::new_impl(), }, ], }), }), data_type: StringType::new_impl(), pg_style: false, }; // Block layout: // // a b // ------ // 1 1 // 2 2 // 3 3 let mut block = DataBlock::empty(); block = block.add_column( PrimitiveColumn::<i32>::new_from_vec(vec![1, 2, 3]).arc(), DataField::new("a", Int32Type::new_impl()), )?; block = block.add_column( PrimitiveColumn::<i32>::new_from_vec(vec![1, 2, 3]).arc(), DataField::new("b", Int32Type::new_impl()), )?; let func_ctx = create_query_context().await?.try_get_function_context()?; let result = ExpressionEvaluator::eval(func_ctx, &expr, &block)?; assert_eq!( result.get(0), DataValue::String("-4.0".to_string().into_bytes()) ); assert_eq!( result.get(1), DataValue::String("-16.0".to_string().into_bytes()) ); assert_eq!( result.get(2), DataValue::String("-36.0".to_string().into_bytes()) ); Ok(()) }
pull.go
// Copyright 2016 The Gitea Authors. All rights reserved. // Use of this source code is governed by a MIT-style // license that can be found in the LICENSE file. package repo import ( "fmt" "net/http" "strings" "time" "code.gitea.io/gitea/models" "code.gitea.io/gitea/modules/auth" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/git" "code.gitea.io/gitea/modules/log" api "code.gitea.io/gitea/modules/structs" "code.gitea.io/gitea/modules/timeutil" issue_service "code.gitea.io/gitea/services/issue" pull_service "code.gitea.io/gitea/services/pull" ) // ListPullRequests returns a list of all PRs func ListPullRequests(ctx *context.APIContext, form api.ListPullRequestsOptions) { // swagger:operation GET /repos/{owner}/{repo}/pulls repository repoListPullRequests // --- // summary: List a repo's pull requests // produces: // - application/json // parameters: // - name: owner // in: path // description: owner of the repo // type: string // required: true // - name: repo // in: path // description: name of the repo // type: string // required: true // - name: page // in: query // description: Page number // type: integer // - name: state // in: query // description: "State of pull request: open or closed (optional)" // type: string // enum: [closed, open, all] // - name: sort // in: query // description: "Type of sort" // type: string // enum: [oldest, recentupdate, leastupdate, mostcomment, leastcomment, priority] // - name: milestone // in: query // description: "ID of the milestone" // type: integer // format: int64 // - name: labels // in: query // description: "Label IDs" // type: array // collectionFormat: multi // items: // type: integer // format: int64 // responses: // "200": // "$ref": "#/responses/PullRequestList" prs, maxResults, err := models.PullRequests(ctx.Repo.Repository.ID, &models.PullRequestsOptions{ Page: ctx.QueryInt("page"), State: ctx.QueryTrim("state"), SortType: ctx.QueryTrim("sort"), Labels: ctx.QueryStrings("labels"), MilestoneID: ctx.QueryInt64("milestone"), }) if err != nil { ctx.Error(http.StatusInternalServerError, "PullRequests", err) return } apiPrs := make([]*api.PullRequest, len(prs)) for i := range prs { if err = prs[i].LoadIssue(); err != nil { ctx.Error(http.StatusInternalServerError, "LoadIssue", err) return } if err = prs[i].LoadAttributes(); err != nil { ctx.Error(http.StatusInternalServerError, "LoadAttributes", err) return } apiPrs[i] = prs[i].APIFormat() } ctx.SetLinkHeader(int(maxResults), models.ItemsPerPage) ctx.JSON(http.StatusOK, &apiPrs) } // GetPullRequest returns a single PR based on index func GetPullRequest(ctx *context.APIContext) { // swagger:operation GET /repos/{owner}/{repo}/pulls/{index} repository repoGetPullRequest // --- // summary: Get a pull request // produces: // - application/json // parameters: // - name: owner // in: path // description: owner of the repo // type: string // required: true // - name: repo // in: path // description: name of the repo // type: string // required: true // - name: index // in: path // description: index of the pull request to get // type: integer // format: int64 // required: true // responses: // "200": // "$ref": "#/responses/PullRequest" pr, err := models.GetPullRequestByIndex(ctx.Repo.Repository.ID, ctx.ParamsInt64(":index")) if err != nil { if models.IsErrPullRequestNotExist(err) { ctx.NotFound() } else { ctx.Error(http.StatusInternalServerError, "GetPullRequestByIndex", err) } return } ctx.JSON(http.StatusOK, pr.APIFormat()) } // CreatePullRequest does what it says func CreatePullRequest(ctx *context.APIContext, form api.CreatePullRequestOption) { // swagger:operation POST /repos/{owner}/{repo}/pulls repository repoCreatePullRequest // --- // summary: Create a pull request // consumes: // - application/json // produces: // - application/json // parameters: // - name: owner // in: path // description: owner of the repo // type: string // required: true // - name: repo // in: path // description: name of the repo // type: string // required: true // - name: body // in: body // schema: // "$ref": "#/definitions/CreatePullRequestOption" // responses: // "201": // "$ref": "#/responses/PullRequest" // "409": // "$ref": "#/responses/error" // "422": // "$ref": "#/responses/validationError" var ( repo = ctx.Repo.Repository labelIDs []int64 assigneeID int64 milestoneID int64 ) // Get repo/branch information _, headRepo, headGitRepo, compareInfo, baseBranch, headBranch := parseCompareInfo(ctx, form) if ctx.Written() { return } defer headGitRepo.Close() // Check if another PR exists with the same targets existingPr, err := models.GetUnmergedPullRequest(headRepo.ID, ctx.Repo.Repository.ID, headBranch, baseBranch) if err != nil { if !models.IsErrPullRequestNotExist(err) { ctx.Error(http.StatusInternalServerError, "GetUnmergedPullRequest", err) return } } else { err = models.ErrPullRequestAlreadyExists{ ID: existingPr.ID, IssueID: existingPr.Index, HeadRepoID: existingPr.HeadRepoID, BaseRepoID: existingPr.BaseRepoID, HeadBranch: existingPr.HeadBranch, BaseBranch: existingPr.BaseBranch, } ctx.Error(http.StatusConflict, "GetUnmergedPullRequest", err) return } if len(form.Labels) > 0 { labels, err := models.GetLabelsInRepoByIDs(ctx.Repo.Repository.ID, form.Labels) if err != nil { ctx.Error(http.StatusInternalServerError, "GetLabelsInRepoByIDs", err) return } labelIDs = make([]int64, len(labels)) for i := range labels { labelIDs[i] = labels[i].ID } } if form.Milestone > 0 { milestone, err := models.GetMilestoneByRepoID(ctx.Repo.Repository.ID, milestoneID) if err != nil { if models.IsErrMilestoneNotExist(err) { ctx.NotFound() } else { ctx.Error(http.StatusInternalServerError, "GetMilestoneByRepoID", err) } return } milestoneID = milestone.ID } var deadlineUnix timeutil.TimeStamp if form.Deadline != nil { deadlineUnix = timeutil.TimeStamp(form.Deadline.Unix()) } prIssue := &models.Issue{ RepoID: repo.ID, Title: form.Title, PosterID: ctx.User.ID, Poster: ctx.User, MilestoneID: milestoneID, AssigneeID: assigneeID, IsPull: true, Content: form.Body, DeadlineUnix: deadlineUnix, } pr := &models.PullRequest{ HeadRepoID: headRepo.ID, BaseRepoID: repo.ID, HeadBranch: headBranch, BaseBranch: baseBranch, HeadRepo: headRepo, BaseRepo: repo, MergeBase: compareInfo.MergeBase, Type: models.PullRequestGitea, } // Get all assignee IDs assigneeIDs, err := models.MakeIDsFromAPIAssigneesToAdd(form.Assignee, form.Assignees) if err != nil { if models.IsErrUserNotExist(err) { ctx.Error(http.StatusUnprocessableEntity, "", fmt.Sprintf("Assignee does not exist: [name: %s]", err)) } else { ctx.Error(http.StatusInternalServerError, "AddAssigneeByName", err) } return } // Check if the passed assignees is assignable for _, aID := range assigneeIDs { assignee, err := models.GetUserByID(aID) if err != nil { ctx.Error(http.StatusInternalServerError, "GetUserByID", err) return } valid, err := models.CanBeAssigned(assignee, repo, true) if err != nil { ctx.Error(http.StatusInternalServerError, "canBeAssigned", err) return } if !valid { ctx.Error(http.StatusUnprocessableEntity, "canBeAssigned", models.ErrUserDoesNotHaveAccessToRepo{UserID: aID, RepoName: repo.Name}) return } } if err := pull_service.NewPullRequest(repo, prIssue, labelIDs, []string{}, pr, assigneeIDs); err != nil { if models.IsErrUserDoesNotHaveAccessToRepo(err) { ctx.Error(http.StatusBadRequest, "UserDoesNotHaveAccessToRepo", err) return } ctx.Error(http.StatusInternalServerError, "NewPullRequest", err) return } log.Trace("Pull request created: %d/%d", repo.ID, prIssue.ID) ctx.JSON(http.StatusCreated, pr.APIFormat()) } // EditPullRequest does what it says func EditPullRequest(ctx *context.APIContext, form api.EditPullRequestOption) { // swagger:operation PATCH /repos/{owner}/{repo}/pulls/{index} repository repoEditPullRequest // --- // summary: Update a pull request. If using deadline only the date will be taken into account, and time of day ignored. // consumes: // - application/json // produces: // - application/json // parameters: // - name: owner // in: path // description: owner of the repo // type: string // required: true // - name: repo // in: path // description: name of the repo // type: string // required: true // - name: index // in: path // description: index of the pull request to edit // type: integer // format: int64 // required: true // - name: body // in: body // schema: // "$ref": "#/definitions/EditPullRequestOption" // responses: // "201": // "$ref": "#/responses/PullRequest" // "403": // "$ref": "#/responses/forbidden" // "412": // "$ref": "#/responses/error" // "422": // "$ref": "#/responses/validationError" pr, err := models.GetPullRequestByIndex(ctx.Repo.Repository.ID, ctx.ParamsInt64(":index")) if err != nil { if models.IsErrPullRequestNotExist(err) { ctx.NotFound() } else { ctx.Error(http.StatusInternalServerError, "GetPullRequestByIndex", err) } return } err = pr.LoadIssue() if err != nil { ctx.Error(http.StatusInternalServerError, "LoadIssue", err) return } issue := pr.Issue issue.Repo = ctx.Repo.Repository if !issue.IsPoster(ctx.User.ID) && !ctx.Repo.CanWrite(models.UnitTypePullRequests) { ctx.Status(http.StatusForbidden) return } if len(form.Title) > 0 { issue.Title = form.Title } if len(form.Body) > 0 { issue.Content = form.Body } // Update or remove deadline if set if form.Deadline != nil || form.RemoveDeadline != nil { var deadlineUnix timeutil.TimeStamp if (form.RemoveDeadline == nil || !*form.RemoveDeadline) && !form.Deadline.IsZero() { deadline := time.Date(form.Deadline.Year(), form.Deadline.Month(), form.Deadline.Day(), 23, 59, 59, 0, form.Deadline.Location()) deadlineUnix = timeutil.TimeStamp(deadline.Unix()) } if err := models.UpdateIssueDeadline(issue, deadlineUnix, ctx.User); err != nil { ctx.Error(http.StatusInternalServerError, "UpdateIssueDeadline", err) return } issue.DeadlineUnix = deadlineUnix } // Add/delete assignees // Deleting is done the GitHub way (quote from their api documentation): // https://developer.github.com/v3/issues/#edit-an-issue // "assignees" (array): Logins for Users to assign to this issue. // Pass one or more user logins to replace the set of assignees on this Issue. // Send an empty array ([]) to clear all assignees from the Issue. if ctx.Repo.CanWrite(models.UnitTypePullRequests) && (form.Assignees != nil || len(form.Assignee) > 0) { err = issue_service.UpdateAssignees(issue, form.Assignee, form.Assignees, ctx.User) if err != nil { if models.IsErrUserNotExist(err) { ctx.Error(http.StatusUnprocessableEntity, "", fmt.Sprintf("Assignee does not exist: [name: %s]", err)) } else { ctx.Error(http.StatusInternalServerError, "UpdateAssignees", err) } return } } if ctx.Repo.CanWrite(models.UnitTypePullRequests) && form.Milestone != 0 && issue.MilestoneID != form.Milestone { oldMilestoneID := issue.MilestoneID issue.MilestoneID = form.Milestone if err = issue_service.ChangeMilestoneAssign(issue, ctx.User, oldMilestoneID); err != nil { ctx.Error(http.StatusInternalServerError, "ChangeMilestoneAssign", err) return } } if ctx.Repo.CanWrite(models.UnitTypePullRequests) && form.Labels != nil { labels, err := models.GetLabelsInRepoByIDs(ctx.Repo.Repository.ID, form.Labels) if err != nil { ctx.Error(http.StatusInternalServerError, "GetLabelsInRepoByIDsError", err) return } if err = issue.ReplaceLabels(labels, ctx.User); err != nil { ctx.Error(http.StatusInternalServerError, "ReplaceLabelsError", err) return } } if err = models.UpdateIssueByAPI(issue); err != nil { ctx.Error(http.StatusInternalServerError, "UpdateIssueByAPI", err) return } if form.State != nil { if err = issue_service.ChangeStatus(issue, ctx.User, api.StateClosed == api.StateType(*form.State)); err != nil { if models.IsErrDependenciesLeft(err) { ctx.Error(http.StatusPreconditionFailed, "DependenciesLeft", "cannot close this pull request because it still has open dependencies") return } ctx.Error(http.StatusInternalServerError, "ChangeStatus", err) return } } // Refetch from database pr, err = models.GetPullRequestByIndex(ctx.Repo.Repository.ID, pr.Index) if err != nil { if models.IsErrPullRequestNotExist(err) { ctx.NotFound() } else { ctx.Error(http.StatusInternalServerError, "GetPullRequestByIndex", err) } return } // TODO this should be 200, not 201 ctx.JSON(http.StatusCreated, pr.APIFormat()) } // IsPullRequestMerged checks if a PR exists given an index func IsPullRequestMerged(ctx *context.APIContext) { // swagger:operation GET /repos/{owner}/{repo}/pulls/{index}/merge repository repoPullRequestIsMerged // --- // summary: Check if a pull request has been merged // produces: // - application/json // parameters: // - name: owner // in: path // description: owner of the repo // type: string // required: true // - name: repo // in: path // description: name of the repo // type: string // required: true // - name: index // in: path // description: index of the pull request // type: integer // format: int64 // required: true // responses: // "204": // description: pull request has been merged // "404": // description: pull request has not been merged pr, err := models.GetPullRequestByIndex(ctx.Repo.Repository.ID, ctx.ParamsInt64(":index")) if err != nil { if models.IsErrPullRequestNotExist(err) { ctx.NotFound() } else { ctx.Error(http.StatusInternalServerError, "GetPullRequestByIndex", err) } return } if pr.HasMerged { ctx.Status(http.StatusNoContent) } ctx.NotFound() } // MergePullRequest merges a PR given an index func MergePullRequest(ctx *context.APIContext, form auth.MergePullRequestForm) { // swagger:operation POST /repos/{owner}/{repo}/pulls/{index}/merge repository repoMergePullRequest // --- // summary: Merge a pull request // produces:
// in: path // description: owner of the repo // type: string // required: true // - name: repo // in: path // description: name of the repo // type: string // required: true // - name: index // in: path // description: index of the pull request to merge // type: integer // format: int64 // required: true // - name: body // in: body // schema: // $ref: "#/definitions/MergePullRequestOption" // responses: // "200": // "$ref": "#/responses/empty" // "405": // "$ref": "#/responses/empty" // "409": // "$ref": "#/responses/error" pr, err := models.GetPullRequestByIndex(ctx.Repo.Repository.ID, ctx.ParamsInt64(":index")) if err != nil { if models.IsErrPullRequestNotExist(err) { ctx.NotFound("GetPullRequestByIndex", err) } else { ctx.Error(http.StatusInternalServerError, "GetPullRequestByIndex", err) } return } if err = pr.LoadHeadRepo(); err != nil { ctx.ServerError("LoadHeadRepo", err) return } if pr.HeadRepo == nil { ctx.ServerError("LoadHeadRepo", models.ErrHeadRepoMissed{pr.HeadRepoID, pr.ID}) return } err = pr.LoadIssue() if err != nil { ctx.Error(http.StatusInternalServerError, "LoadIssue", err) return } pr.Issue.Repo = ctx.Repo.Repository if ctx.IsSigned { // Update issue-user. if err = pr.Issue.ReadBy(ctx.User.ID); err != nil { ctx.Error(http.StatusInternalServerError, "ReadBy", err) return } } if pr.Issue.IsClosed { ctx.NotFound() return } if !pr.CanAutoMerge() || pr.HasMerged || pr.IsWorkInProgress() { ctx.Status(http.StatusMethodNotAllowed) return } isPass, err := pull_service.IsPullCommitStatusPass(pr) if err != nil { ctx.Error(http.StatusInternalServerError, "IsPullCommitStatusPass", err) return } if !isPass && !ctx.IsUserRepoAdmin() { ctx.Status(http.StatusMethodNotAllowed) return } if len(form.Do) == 0 { form.Do = string(models.MergeStyleMerge) } message := strings.TrimSpace(form.MergeTitleField) if len(message) == 0 { if models.MergeStyle(form.Do) == models.MergeStyleMerge { message = pr.GetDefaultMergeMessage() } if models.MergeStyle(form.Do) == models.MergeStyleSquash { message = pr.GetDefaultSquashMessage() } } form.MergeMessageField = strings.TrimSpace(form.MergeMessageField) if len(form.MergeMessageField) > 0 { message += "\n\n" + form.MergeMessageField } if err := pull_service.Merge(pr, ctx.User, ctx.Repo.GitRepo, models.MergeStyle(form.Do), message); err != nil { if models.IsErrInvalidMergeStyle(err) { ctx.Status(http.StatusMethodNotAllowed) return } else if models.IsErrMergeConflicts(err) { conflictError := err.(models.ErrMergeConflicts) ctx.JSON(http.StatusConflict, conflictError) } else if models.IsErrRebaseConflicts(err) { conflictError := err.(models.ErrRebaseConflicts) ctx.JSON(http.StatusConflict, conflictError) } else if models.IsErrMergeUnrelatedHistories(err) { conflictError := err.(models.ErrMergeUnrelatedHistories) ctx.JSON(http.StatusConflict, conflictError) } else if models.IsErrMergePushOutOfDate(err) { ctx.Error(http.StatusConflict, "Merge", "merge push out of date") return } else if models.IsErrPushRejected(err) { errPushRej := err.(models.ErrPushRejected) if len(errPushRej.Message) == 0 { ctx.Error(http.StatusConflict, "Merge", "PushRejected without remote error message") return } ctx.Error(http.StatusConflict, "Merge", "PushRejected with remote message: "+errPushRej.Message) return } ctx.Error(http.StatusInternalServerError, "Merge", err) return } log.Trace("Pull request merged: %d", pr.ID) ctx.Status(http.StatusOK) } func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption) (*models.User, *models.Repository, *git.Repository, *git.CompareInfo, string, string) { baseRepo := ctx.Repo.Repository // Get compared branches information // format: <base branch>...[<head repo>:]<head branch> // base<-head: master...head:feature // same repo: master...feature // TODO: Validate form first? baseBranch := form.Base var ( headUser *models.User headBranch string isSameRepo bool err error ) // If there is no head repository, it means pull request between same repository. headInfos := strings.Split(form.Head, ":") if len(headInfos) == 1 { isSameRepo = true headUser = ctx.Repo.Owner headBranch = headInfos[0] } else if len(headInfos) == 2 { headUser, err = models.GetUserByName(headInfos[0]) if err != nil { if models.IsErrUserNotExist(err) { ctx.NotFound("GetUserByName") } else { ctx.ServerError("GetUserByName", err) } return nil, nil, nil, nil, "", "" } headBranch = headInfos[1] } else { ctx.NotFound() return nil, nil, nil, nil, "", "" } ctx.Repo.PullRequest.SameRepo = isSameRepo log.Info("Base branch: %s", baseBranch) log.Info("Repo path: %s", ctx.Repo.GitRepo.Path) // Check if base branch is valid. if !ctx.Repo.GitRepo.IsBranchExist(baseBranch) { ctx.NotFound("IsBranchExist") return nil, nil, nil, nil, "", "" } // Check if current user has fork of repository or in the same repository. headRepo, has := models.HasForkedRepo(headUser.ID, baseRepo.ID) if !has && !isSameRepo { log.Trace("parseCompareInfo[%d]: does not have fork or in same repository", baseRepo.ID) ctx.NotFound("HasForkedRepo") return nil, nil, nil, nil, "", "" } var headGitRepo *git.Repository if isSameRepo { headRepo = ctx.Repo.Repository headGitRepo = ctx.Repo.GitRepo } else { headGitRepo, err = git.OpenRepository(models.RepoPath(headUser.Name, headRepo.Name)) if err != nil { ctx.Error(http.StatusInternalServerError, "OpenRepository", err) return nil, nil, nil, nil, "", "" } } // user should have permission to read baseRepo's codes and pulls, NOT headRepo's permBase, err := models.GetUserRepoPermission(baseRepo, ctx.User) if err != nil { headGitRepo.Close() ctx.ServerError("GetUserRepoPermission", err) return nil, nil, nil, nil, "", "" } if !permBase.CanReadIssuesOrPulls(true) || !permBase.CanRead(models.UnitTypeCode) { if log.IsTrace() { log.Trace("Permission Denied: User %-v cannot create/read pull requests or cannot read code in Repo %-v\nUser in baseRepo has Permissions: %-+v", ctx.User, baseRepo, permBase) } headGitRepo.Close() ctx.NotFound("Can't read pulls or can't read UnitTypeCode") return nil, nil, nil, nil, "", "" } // user should have permission to read headrepo's codes permHead, err := models.GetUserRepoPermission(headRepo, ctx.User) if err != nil { headGitRepo.Close() ctx.ServerError("GetUserRepoPermission", err) return nil, nil, nil, nil, "", "" } if !permHead.CanRead(models.UnitTypeCode) { if log.IsTrace() { log.Trace("Permission Denied: User: %-v cannot read code in Repo: %-v\nUser in headRepo has Permissions: %-+v", ctx.User, headRepo, permHead) } headGitRepo.Close() ctx.NotFound("Can't read headRepo UnitTypeCode") return nil, nil, nil, nil, "", "" } // Check if head branch is valid. if !headGitRepo.IsBranchExist(headBranch) { headGitRepo.Close() ctx.NotFound() return nil, nil, nil, nil, "", "" } compareInfo, err := headGitRepo.GetCompareInfo(models.RepoPath(baseRepo.Owner.Name, baseRepo.Name), baseBranch, headBranch) if err != nil { headGitRepo.Close() ctx.Error(http.StatusInternalServerError, "GetCompareInfo", err) return nil, nil, nil, nil, "", "" } return headUser, headRepo, headGitRepo, compareInfo, baseBranch, headBranch }
// - application/json // parameters: // - name: owner
process.go
package main import ( "crypto/aes" "crypto/cipher" "crypto/rand" "crypto/rsa" "crypto/sha256" "fmt" "io/ioutil" "log" "math/big" "os" "strconv" "strings" "github.com/SSSaaS/sssa-golang" ) // 一時ディレクトリ作成 func (s *commonSetting) makeTempDir() { s.tempDirPath = "temp" if _, err := os.Stat(s.tempDirPath); os.IsNotExist(err) { err = os.Mkdir(s.tempDirPath, 0755) if err != nil { log.Fatal(err) } } } // 秘密分散法適用 func (s *uploadSetting) sssaCreate() { file, err := os.Open(s.comSet.readFilePath) if err != nil { log.Fatal(err) } defer file.Close() raw, err := ioutil.ReadAll(file) if err != nil { log.Fatal(err) } s.comSet.shares, err = sssa.Create(s.minNum, s.shareNum, string(raw)) if err != nil { log.Fatal(err) } } // 暗号化 func (s *uploadSetting) encrypt() { var cipherShareNum int = 0 // 対象シェア for mIndex := 0; mIndex < len(s.managers); mIndex++ { // 各シェアを共通鍵で暗号化 comKey := genComKey() for sIndex := 0; sIndex < s.managers[mIndex].manageShareNum; sIndex++ { fmt.Printf("%d / %d\r", cipherShareNum+1, s.cipherShareNum) block, err := aes.NewCipher(comKey) if err != nil { log.Fatal(err) } gcm, err := cipher.NewGCM(block) if err != nil { log.Fatal(err) } nonce := make([]byte, gcm.NonceSize()) _, err = rand.Read(nonce) if err != nil { log.Fatal(err) } content := []byte(s.comSet.shares[cipherShareNum]) cipherContent := gcm.Seal(nil, nonce, content, nil) cipherContent = append(nonce, cipherContent...) index := strconv.Itoa(sIndex + 1) name := strings.Replace(s.managers[mIndex].keyfileName, ".", "_", -1) err = ioutil.WriteFile(s.comSet.tempDirPath+"/"+name+"_share"+index, cipherContent, 0755) if err != nil { log.Fatal(err) } cipherShareNum++ } // 共通鍵を公開鍵で暗号化 rng := rand.Reader encryptedComKey, err := rsa.EncryptOAEP(sha256.New(), rng, s.managers[mIndex].publicKey, comKey, []byte("")) if err != nil { log.Fatal(err) } s.managers[mIndex].config.EncryptedComKey = encryptedComKey } // 非対象シェア for i := s.cipherShareNum; i < s.shareNum; i++ { index := strconv.Itoa(i - s.cipherShareNum + 1) err := ioutil.WriteFile(s.comSet.tempDirPath+"/un_managed_share"+index, []byte(s.comSet.shares[i]), 0755) if err != nil { log.Fatal(err) } } s.comSet.shares = nil } // 共通鍵の生成 func genComKey() []byte { keyList := "abcdefghijklmnopqrstuvwxyzABCDEFHFGHIJKLMNOPQRSTUVWXYZ1234567890" size := 32 //256bit var key = make([
ha256.New(), rng, s.manager.privateKey, s.manager.config.EncryptedComKey, []byte("")) if err != nil { log.Fatal(err) } // 各シェアを共通鍵で復号 for i := 0; i < len(s.manager.config.ManagedShares); i++ { fmt.Printf("\r%d / %d", i+1, len(s.manager.config.ManagedShares)) file, err := os.Open(s.comSet.tempDirPath + "/" + s.manager.config.ManagedShares[i]) if err != nil { log.Fatal(err) } defer file.Close() raw, err := ioutil.ReadAll(file) if err != nil { log.Fatal(err) } block, err := aes.NewCipher(plainComKey) if err != nil { log.Fatal(err) } gcm, err := cipher.NewGCM(block) if err != nil { log.Fatal(err) } nonce := raw[:gcm.NonceSize()] plainByte, err := gcm.Open(nil, nonce, raw[gcm.NonceSize():], nil) if err != nil { log.Fatal(err) } s.comSet.shares = append(s.comSet.shares, string(plainByte)) } // 非対象シェア for i := 0; i < len(s.manager.config.UnmanagedShares); i++ { file, err := os.Open(s.comSet.tempDirPath + "/" + s.manager.config.UnmanagedShares[i]) if err != nil { log.Fatal(err) } defer file.Close() raw, err := ioutil.ReadAll(file) if err != nil { log.Fatal(err) } s.comSet.shares = append(s.comSet.shares, string(raw)) } } // 秘密分散法復元 func (s *downloadSetting) sssaCombine() { combined, err := sssa.Combine(s.comSet.shares) if err != nil { log.Fatal(err) } s.comSet.shares = nil err = ioutil.WriteFile(s.comSet.writeDirPath+"/content", []byte(combined), 0755) if err != nil { log.Fatal(err) } }
]byte, 0, size) for i := 1; i <= size; i++ { res, _ := rand.Int(rand.Reader, big.NewInt(64)) keyGen := keyList[res.Int64()] key = append(key, keyGen) } return key } // 復号 func (s *downloadSetting) decrypt() { // 共通鍵を秘密鍵で復号 rng := rand.Reader plainComKey, err := rsa.DecryptOAEP(s
generateArtiTest.py
#!/usr/bin/python3 import os import argparse import shutil import subprocess
import pprint #import oyaml as yaml from collections import OrderedDict import glob import numpy as np import datetime import time ts = time.time() class UnsortableList(list): def sort(self, *args, **kwargs): pass class UnsortableOrderedDict(OrderedDict): def items(self, *args, **kwargs): return UnsortableList(OrderedDict.items(self, *args, **kwargs)) yaml.add_representer(UnsortableOrderedDict, yaml.representer.SafeRepresenter.represent_dict) def genOrdererConfig(domainName, orderersCount): config = [] for ordcounter in range(orderersCount): tempConfig = {} tempConfig["Name"] = "Orderer{}".format(ordcounter+1) tempConfig["Domain"] = "orderer{}.{}".format(ordcounter+1, domainName) tempConfig["Specs"] = {"- Hostname": 'fabric-ord{}'.format(ordcounter+1)} config.append(tempConfig) #config.append({ # "Name": "Orderer", # "Domain": domainName, # "Specs": [{"Hostname": "orderer{}".format(e)} for e in range(orderersCount)], #}) return config def getHosteNames(org, peerCounts): config = [] for x in range(peerCounts[org]): config.append('Hostname : fabric-peer{}'.format(x+1)) #print config return config def genPeerConfig(domainName, orgsCount, peerCounts): config = [] for org in range(orgsCount): tempConfig = { "Name": "Org{}".format(org+1), "Domain": "org{}.{}".format(org+1, domainName), "Users": {"Count": 0}, #"Template" : { # "Count": peerCounts[org], # "Hostname": 'fabric-peer1'},#"Hostname": '{{{{ include "fabric.name" . }}}}-peer{}'.format(org+1)#the hostname in template is only duplicating the first hostname in specs to overwrite eachother "Specs" : getHosteNames(org, peerCounts) } config.append(tempConfig) return config """ def NewgenOrdererConfig(domainName, orderersCount): config = [] for ordcounter in range(orderersCount): tempConfig = {} tempConfig["Name"] = "Orderer{}".format(ordcounter+1) tempConfig["Domain"] = "orderer{}.{}".format(ordcounter+1, domainName) tempConfig["Specs"] = {"Hostname": 'fabric-ord{}'.format(ordcounter+1)} config.append(tempConfig) print(tempConfig,type(tempConfig),type(tempConfig["Name"]),type(tempConfig["Domain"]),type(tempConfig["Specs"])) print("************************tempConfig") pprint.pprint(tempConfig) return config def NewgenPeerConfig(domainName, orgsCount, peerCounts): config = [] for org in range(orgsCount): nodeConfig = { "Hostname": 'fabric-peer{}'.format(org+1), "Count": peerCounts[org] } tempConfig = { "Name": "Org{}".format(org+1), "Domain": "org{}.{}".format(org+1, domainName), "Template": nodeConfig, "EnableNodeOUs": True, "Users" : { "Count": peerCounts[org] }, } config.append(tempConfig) return config """ def genCrypto(domainName, orgsCount, orderersCount, peerCounts): config = {} config["OrdererOrgs"] = genOrdererConfig(domainName, orderersCount) config["PeerOrgs"] = genPeerConfig(domainName, orgsCount, peerCounts) #print(config) fHandle = open("crypto-config.yaml", "w") #d = UnsortableOrderedDict(config) stream = yaml.dump(config, default_flow_style = False, sort_keys=True) #stream2 = stream.replace("'", "") fHandle.write(stream.replace("'", "")) #fHandle.write(yaml.dump(config, default_flow_style=False)) fHandle.close() def genZookeeperService(imageName, networkName, domainName, zooKeepersCount, index): serviceName = "zookeeper{}".format(index) serviceConfig = { "hostname": "zookeeper{}.{}".format(index, domainName), "image": imageName, "networks": { networkName: { "aliases": [ "zookeeper{}.{}".format(index, domainName), ], } }, "environment": [ "CORE_VM_DOCKER_HOSTCONFIG_NETWORKMODE={}".format(networkName), "ZOO_MY_ID={}".format(index+1), "ZOO_SERVERS={}".format(" ".join(["server.{}=zookeeper{}:2888:3888".format(e+1, e) for e in range(zooKeepersCount)])) ], } return { serviceName : serviceConfig } def genKafkaService(imageName, networkName, domainName, zooKeepersCount, index): serviceName = "kafka{}".format(index) serviceConfig = { "hostname": "kafka{}.{}".format(index, domainName), "image": imageName, "networks": { networkName: { "aliases": [ "kafka{}.{}".format(index, domainName), ], } }, "environment": [ "CORE_VM_DOCKER_HOSTCONFIG_NETWORKMODE={}".format(domainName), "KAFKA_MESSAGE_MAX_BYTES={}".format(15728640), "KAFKA_REPLICA_FETCH_MAX_BYTES={}".format(15728640), "KAFKA_UNCLEAN_LEADER_ELECTION_ENABLE={}".format(False), "KAFKA_DEFAULT_REPLICATION_FACTOR={}".format(3), "KAFKA_MIN_INSYNC_REPLICAS={}".format(2), "KAFKA_ZOOKEEPER_CONNECT={}".format(" ".join(["zookeeper{}.{}:2181".format(e, domainName) for e in range(zooKeepersCount)])), "KAFKA_BROKER_ID={}".format(index), "KAFKA_LOG_RETENTIONMS={}".format(-1), ], } return { serviceName : serviceConfig } def genOrdererService(imageName, networkName, domainName, loggingLevel, index, kafka=False): serviceName = "orderer{}".format(index) serviceConfig = { "hostname": "orderer{}.{}".format(index, domainName), "image": imageName, "environment": [ "ORDERER_GENERAL_LOGLEVEL={}".format(loggingLevel), "ORDERER_GENERAL_LISTENADDRESS=0.0.0.0", "ORDERER_GENERAL_GENESISMETHOD=file", "ORDERER_GENERAL_GENESISFILE=/var/hyperledger/orderer/orderer.genesis.block", "ORDERER_GENERAL_LOCALMSPID=OrdererMSP", "ORDERER_GENERAL_LOCALMSPDIR=/var/hyperledger/orderer/msp", "ORDERER_GENERAL_TLS_ENABLED=true", "ORDERER_GENERAL_TLS_PRIVATEKEY=/var/hyperledger/orderer/tls/server.key", "ORDERER_GENERAL_TLS_CERTIFICATE=/var/hyperledger/orderer/tls/server.crt", "ORDERER_GENERAL_TLS_ROOTCAS=[/var/hyperledger/orderer/tls/ca.crt]", ], "working_dir": "/opt/gopath/src/github.com/hyperledger/fabric", "command": "orderer", "volumes": [ "/shared/channel-artifacts/genesis.block:/var/hyperledger/orderer/orderer.genesis.block", "/shared/crypto-config/ordererOrganizations/example.com/orderers/orderer0.example.com/msp:/var/hyperledger/orderer/msp", "/shared/crypto-config/ordererOrganizations/example.com/orderers/orderer0.example.com/tls/:/var/hyperledger/orderer/tls", ], "networks": { networkName: { "aliases": [ "orderer{}.{}".format(index, domainName) ], } } } if kafka: serviceConfig["environment"].append("ORDERER_KAFKA_RETRY_SHORTINTERVAL=1s") serviceConfig["environment"].append("ORDERER_KAFKA_RETRY_SHORTTOTAL=30s") serviceConfig["environment"].append("ORDERER_KAFKA_VERBOSE=true") return { serviceName : serviceConfig } def genPeerService(imageName, networkName, domainName, orgIndex, peerIndex, loggingLevel): serviceName = "peer{}_org{}".format(peerIndex, orgIndex) serviceConfig = { "hostname": "peer{}.org{}.{}".format(peerIndex, orgIndex, domainName), "image": imageName, "environment": [ "CORE_PEER_ID=peer{}.org{}.{}".format(peerIndex, orgIndex, domainName), "CORE_PEER_ADDRESS=peer{}.org{}.{}:7051".format(peerIndex, orgIndex, domainName), "CORE_PEER_GOSSIP_BOOTSTRAP=peer{}.org{}.{}:7051".format(0 if peerIndex!=0 else 1, orgIndex, domainName), "CORE_PEER_GOSSIP_EXTERNALENDPOINT=peer{}.org{}.{}:7051".format(peerIndex, orgIndex, domainName), "CORE_PEER_LOCALMSPID=Org{}MSP".format(orgIndex), "CORE_VM_ENDPOINT=unix:///host/var/run/docker.sock", "CORE_VM_DOCKER_HOSTCONFIG_NETWORKMODE=hyperledger-ov", "CORE_LOGGING_LEVEL={}".format(loggingLevel), "CORE_PEER_TLS_ENABLED=true", "CORE_PEER_GOSSIP_USELEADERELECTION=true", "CORE_PEER_GOSSIP_ORGLEADER=false", "CORE_PEER_PROFILE_ENABLED=true", "CORE_PEER_TLS_CERT_FILE=/etc/hyperledger/fabric/tls/server.crt", "CORE_PEER_TLS_KEY_FILE=/etc/hyperledger/fabric/tls/server.key", "CORE_PEER_TLS_ROOTCERT_FILE=/etc/hyperledger/fabric/tls/ca.crt", ], "working_dir": "/opt/gopath/src/github.com/hyperledger/fabric/peer", "command": "peer node start", "deploy": { "resources": { "reservations": { "cpus": "1", "memory": "1g", } } }, "volumes": [ "/var/run/:/host/var/run/", "/shared/crypto-config/peerOrganizations/org{org}.{domain}/peers/peer{peer}.org{org}.{domain}/msp:/etc/hyperledger/fabric/msp".format(peer=peerIndex, org=orgIndex, domain=domainName), "/shared/crypto-config/peerOrganizations/org{org}.{domain}/peers/peer{peer}.org{org}.{domain}/tls:/etc/hyperledger/fabric/tls".format(peer=peerIndex, org=orgIndex, domain=domainName) ], "networks": { networkName: { "aliases": [ "peer{}.org{}.{}".format(peerIndex, orgIndex, domainName), ], } }, } return { serviceName : serviceConfig } def genCliService(imageName, networkName, domainName, loggingLevel): serviceName = "cli" serviceConfig = { "image": imageName, "environment": [ "GOPATH=/opt/gopath", "CORE_VM_ENDPOINT=unix:///host/var/run/docker.sock", "CORE_LOGGING_LEVEL={}".format(loggingLevel), "CORE_PEER_ID=cli", "CORE_PEER_ADDRESS=peer0.org1.example.com:7051", "CORE_PEER_LOCALMSPID=Org1MSP", "CORE_PEER_TLS_ENABLED=true", "CORE_PEER_TLS_CERT_FILE=/opt/gopath/src/github.com/hyperledger/fabric/peer/crypto/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/server.crt", "CORE_PEER_TLS_KEY_FILE=/opt/gopath/src/github.com/hyperledger/fabric/peer/crypto/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/server.key", "CORE_PEER_TLS_ROOTCERT_FILE=/opt/gopath/src/github.com/hyperledger/fabric/peer/crypto/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/ca.crt", "CORE_PEER_MSPCONFIGPATH=/opt/gopath/src/github.com/hyperledger/fabric/peer/crypto/peerOrganizations/org1.example.com/users/[email protected]/msp", ], "working_dir": "/opt/gopath/src/github.com/hyperledger/fabric/peer", "command": "sleep 1d", "volumes": [ "/var/run/:/host/var/run/", "/shared/chaincode/:/opt/gopath/src/github.com/hyperledger/fabric/examples/chaincode/go", "/shared/crypto-config:/opt/gopath/src/github.com/hyperledger/fabric/peer/crypto/", "/shared/scripts:/opt/gopath/src/github.com/hyperledger/fabric/peer/scripts/", "/shared/channel-artifacts:/opt/gopath/src/github.com/hyperledger/fabric/peer/channel-artifacts", ], "networks": { networkName: { "aliases": [ "cli", ], } }, } return { serviceName : serviceConfig } def generateDocker(repoOwner, networkName, domainName, orgsCount, orderersCount, peerCounts, zooKeepersCount, kafkasCount, loggingLevel): config = { "version": '3', "networks": { networkName: { "external": True, }, }, "services": {} } for orderer in range(orderersCount): config["services"].update(genOrdererService("{}/fabric-orderer:latest".format(repoOwner), networkName, domainName, loggingLevel, orderer, kafkasCount>0)) for org in range(orgsCount): for peer in range(peerCounts[org]): config["services"].update(genPeerService("berendeanicolae/fabric-peer:latest".format(repoOwner), networkName, domainName, org+1, peer, loggingLevel)) for zooKeeper in range(zooKeepersCount): config["services"].update(genZookeeperService("{}/fabric-zookeeper:latest".format(repoOwner), networkName, domainName, zooKeepersCount, zooKeeper)) for kafka in range(kafkasCount): config["services"].update(genKafkaService("{}/fabric-kafka:latest".format(repoOwner), networkName, domainName, zooKeepersCount, kafka)) config["services"].update(genCliService("berendeanicolae/fabric-tools:latest".format(repoOwner), networkName, domainName, loggingLevel)) fHandle = open("docker-compose-cli.yaml", "w") fHandle.write(yaml.dump(config, default_flow_style=False)) fHandle.close() def genNetworkOrgs(domainName, orgsCount, orderersCount, peerCounts): config = [] for ordcounter in range(orderersCount): ordererConfig = {} ordererConfig["Name"] = "Orderer{}Org".format(ordcounter+1) ordererConfig["ID"] = "Orderer{}MSP".format(ordcounter+1) ordererConfig["MSPDir"] = "./crypto-config/ordererOrganizations/orderer{}.svc.cluster.local/msp".format(ordcounter+1) ordererConfig["AdminPrincipal"] = "Role.MEMBER" config.append(ordererConfig) for org in range(orgsCount): orgConfig = {} orgConfig["Name"] = "Org{}MSP".format(org+1) orgConfig["ID"] = "Org{}MSP".format(org+1) orgConfig["MSPDir"] = "crypto-config/peerOrganizations/org{}.svc.cluster.local/msp".format(org+1) orgConfig["AdminPrincipal"] = "Role.MEMBER" orgConfig["AnchorPeers"] = [] for peerPerOrg in range(peerCounts[org]): orgConfig["AnchorPeers"].append({ "Host": "fabric-peer{}org{}".format(peerPerOrg+1, org+1), "Port": 7051, }) config.append(orgConfig) return config def genNetworkCapabilities(): config = {} config["Global"] = {"V1_1": True} config["Orderer"] = {"V1_1": True} config["Application"] = {"V1_2": True} return config def genNetworkApplication(): config = {} config["Organizations"] = None return config def genNetworkOrderer(domainName, orderersCount): config = {} config["OrdererType"] = "kafka" config["Addresses"] = ['fabric-ord{}:7050'.format(ordCount+1, domainName) for ordCount in range(orderersCount)] config["BatchTimeout"] = "2s" config["BatchSize"] = { "MaxMessageCount": 10, "AbsoluteMaxBytes": 10485760, "PreferredMaxBytes": 524288, } config["Kafka"] = { "Brokers": ['fabric-kafka-hlf:9092'] } config["Organizations"] = None #if kafkasCount>0: # config["OrdererType"] = "kafka" # config["Kafka"]["Brokers"] = ["kafka{}.{}".format(e, domainName) for e in range(kafkasCount)] return config def setNetworkProfiles(config, domainName): config["Profiles"] = {} config["Profiles"]["OrdererGenesis"] = { "Capabilities": { "V1_1": True, }, "Orderer": { "OrdererType": config["Orderer"]["OrdererType"], "Addresses": config["Orderer"]["Addresses"], "BatchTimeout": config["Orderer"]["BatchTimeout"], "BatchSize": config["Orderer"]["BatchSize"], "Kafka": config["Orderer"]["Kafka"], "Organizations": [org for org in config["Organizations"] if "Orderer" in org["ID"]], "Capabilities": { "V1_1": True } }, "Consortiums": { "MyConsortium": { "Organizations": [org for org in config["Organizations"] if "Org" in org["ID"]] }, }, } config["Profiles"]["MyChannel"] = { "Consortium": "MyConsortium", "Application": { "Organizations": [org for org in config["Organizations"] if "Org" in org["ID"]], "Capabilities": { "V1_2": True, } } } def genNetwork(domainName, orgsCount, orderersCount, peerCounts): config = {} config["Organizations"] = genNetworkOrgs(domainName, orgsCount, orderersCount, peerCounts) config["Capabilities"] = genNetworkCapabilities() config["Application"] = genNetworkApplication() config["Orderer"] = genNetworkOrderer(domainName, orderersCount) setNetworkProfiles(config, domainName) fHandle = open("configtx.yaml", "w") #fHandle.write(yaml.dump(config, default_flow_style=False)) stream = yaml.dump(config, default_flow_style = False, sort_keys=True) fHandle.write(stream.replace("'", "")) fHandle.close() #config["BatchTimeout"] = "2s" #config["BatchSize"] = { # "MaxMessageCount": 10, # "AbsoluteMaxBytes": 10485760, # "PreferredMaxBytes": 524288, #} #config["Kafka"] = { # "Brokers": [] #} #config["Organizations"] = None def NewgetKafkaBrokers(domainName, kafkasCount): tempKafka= [] if kafkasCount>0: for e in range(kafkasCount): tempKafka.append("kafka{}-hlf.{}:9092".format(e, domainName)) else: tempKafka.append("kafka-hlf.orderers.svc.cluster.local:9092") return tempKafka def NewsetNetworkProfiles(config, domainName): config["Profiles"] = {} config["Profiles"]["OrdererGenesis"] = { "Orderer": { '<<' : '*OrdererDefaults', "Organizations": [org for org in config["Organizations"] if "Orderer" in org["ID"]], }, "Consortiums": { "MyConsortium": { "Organizations": [org for org in config["Organizations"] if "Org" in org["ID"]] }, }, } config["Profiles"]["MyChannel"] = { "Consortium": "MyConsortium", "Application": { '<<' : '*ApplicationDefaults', "Organizations": [org for org in config["Organizations"] if "Org" in org["ID"]], } } def NewgenNetwork(domainName, orgsCount, orderersCount, kafkasCount): config = {} config["Organizations"] = genNetworkOrgs(domainName, orgsCount, orderersCount) config["Application"] = '&ApplicationDefaults' #config["Application"]["Organizations"] = None config["Orderer"] = {"OrdererType" : "kafka","Addresses" : ['fabric-ord{}:7050'.format(e, domainName) for e in range(orderersCount)], "BatchSize" : { "MaxMessageCount": 10, "AbsoluteMaxBytes": 10485760, "PreferredMaxBytes": 524288, }, "Kafka" : {'Brokers' : ['{{ include "fabric.name" . }}-kafka-hlf:9092'] , "Organizations": None}} #config["Profiles"] = setNetworkProfiles(config, domainName, orgsCount, orderersCount) setNetworkProfiles(config, domainName) fHandle = open("configtx.yaml", "w") stream = yaml.dump(config, default_flow_style = False) #stream2 = stream.replace("'<<'", "<<") stream2 = stream.replace("null", "") #stream4 = stream3.replace("'&ApplicationDefaults'", "&ApplicationDefaults") #stream5 = stream4.replace("'*OrdererDefaults'", "*OrdererDefaults") #fHandle.write(stream5.replace("'*ApplicationDefaults'", "*ApplicationDefaults")) fHandle.write(stream2.replace("'", "")) #fHandle.write(yaml.dump(config, default_flow_style=False)) fHandle.close() def getKeyFilesInFolder(org, peerPerOrg, domainName): print "crypto-config/peerOrganizations/org{}.{}/peers/fabric-peer{}-org{}.{}/msp/keystore/*".format(org+1, domainName, peerPerOrg+1, org+1,domainName) return glob.glob("crypto-config/peerOrganizations/org{}.{}/peers/fabric-peer{}-org{}.{}/msp/keystore/*".format(org+1, domainName, peerPerOrg+1, org+1,domainName)) def genCaliperClients(domainName, orgsCount, orderersCount, peerCounts): clConfig = {} for org in range(orgsCount): for peerPerOrg in range(peerCounts[org]): clConfig["client{}.org{}.{}".format(peerPerOrg+1, org+1, domainName)] = { "client" : { "organization" : "Org{}".format(org+1), "credentialStore" : { "path" : "/tmp/hfc-kvs/org{}".format(org+1), "cryptoStore" : { "path" : "/tmp/hfc-kvs/org{}".format(org+1),}}, #"affiliation": "aff{}".format(peerPerOrg+1)}} "clientPrivateKey" : { "path" : "crypto-config/peerOrganizations/org{}.{}/peers/fabric-peer{}.org{}.{}/msp/keystore/key.pem".format(org+1, domainName, peerPerOrg+1, org+1,domainName)}, "clientSignedCert" : { "path" : "crypto-config/peerOrganizations/org{}.{}/peers/fabric-peer{}.org{}.{}/msp/signcerts/fabric-peer{}.org{}.{}-cert.pem".format(org+1, domainName, peerPerOrg+1, org+1, domainName, peerPerOrg+1,org+1,domainName)}}} #clConfig["client{}.org{}.{}".format(peerPerOrg+1, org+1, domainName)]["client"]["clientSignedCert"]["path"] = getKeyFilesInFolder(org, peerPerOrg, domainName) return clConfig def ordListInsideChannels(orderersCount): config = [] for ordcounter in range(orderersCount): config.append("ord{}-hlf-ord".format(ordcounter+1)) return config def peerListInsideChannels(orgsCount, peerCounts): clConfig = {} for org in range(orgsCount): for peerPerOrg in range(peerCounts[org]): clConfig["fabric-peer{}org{}".format(peerPerOrg+1, org+1)] = {"eventSource" : "true"} return clConfig def MSPList(orgNames): mspConfig='' for x in range(len(orgNames)): if x !=0: mspConfig = mspConfig + ', "{}MSP"'.format(orgNames[x]) else: mspConfig = mspConfig + '"{}MSP"'.format(orgNames[x]) #mspConfig.append('{}MSP'.format(x)) return '[{}]'.format(mspConfig) def contractsIdentitiesList(orgNames): mspConfig=[] for x in orgNames: clConfig = {'role': {'name': 'member', 'mspId': '{}MSP'.format(x)}} mspConfig.append(clConfig) return mspConfig #'create': {'buildTransaction': {'version': 0, 'consortium': 'SupplyChainConsortium', 'capabilities': [], 'msps': ['carrierMSP', 'manufacturerMSP']}}, 'channelName': 'allchannel'}]} def genCaliperChannels(domainName, orgsCount, orderersCount, chaincodeName, peerCounts, chaincodeversion, chaincode_lang, chaincode_init_function, chaincode_path, orgNames, chaincode_init_arguments, chaincode_created): chConfig = {} tempList=[] chConfig["channelName"] = "allchannel" chConfig["create"] = {'buildTransaction': {'version': 0, 'consortium': '"SupplyChainConsortium"', 'capabilities': []}} chConfig["create"]["buildTransaction"]["msps"] = MSPList(orgNames) #chConfig["contracts"] = contractsListInsideChannels(orgsCount, peerCounts) #tempList.append({"id" : chaincodeName, # "contractID" : chaincodeName, # "install": { # "version" : chaincodeversion, # "language" : chaincode_lang, # "path" : chaincode_path,}, # 'instantiate': {'initFunction': chaincode_init_function}, # "initArguments" : chaincode_init_arguments, # "created" : chaincode_created}) chConfig["contracts"] = [{"id" : chaincodeName, "contractID" : chaincodeName, "install": { "version" : '"v{}"'.format(chaincodeversion), "language" : chaincode_lang, "path" : chaincode_path,}, 'instantiate': {'initFunction': chaincode_init_function}, "initArguments" : chaincode_init_arguments, "created" : chaincode_created, "endorsementPolicy" : {'policy': {'{}-of'.format(len(orgNames)): [{'signed-by': x} for x in range(len(orgNames))]}, "identities" : contractsIdentitiesList(orgNames)}}] #caliperNetwork # can use here endorsersList #{'policy': {'{}}-of'.format(len(orgNames)): [{'signed-by': x} for x in range(len(orgNames))]}} #chConfig["contracts"]["endorsementPolicy"] = {'policy': {'{}-of'.format(len(orgNames)): [{'signed-by': x} for x in range(len(orgNames))]}} #chConfig["contracts"]["identities"] = contractsIdentitiesList(orgNames) return chConfig def genCaliperOrganizations(domainName, orgsCount, orderersCount, peerCounts, orgNames): config = [] for org in orgNames: orgConfig = {} orgConfig["mspid"] = "{}MSP".format(org) orgConfig["identities"] = {'certificates': [{'admin': True, 'clientPrivateKey': {'path': 'secret/{}/tls/admin.pem'.format(org)}, 'clientSignedCert': {'path': 'secret/{}/tls/admin.cert'.format(org)}, 'name': '"admin"'}]} orgConfig["connectionProfile"] = {'path': '"./{}ConnectionProfile.yaml"'.format(org), 'discover': "True"} config.append(orgConfig) return config def getCaliperNetworkConfig(domainName, orgsCount, orderersCount, chaincodeName, peerCounts, chaincodeversion, chaincode_lang, chaincode_init_function, chaincode_path, orgNames, chaincode_init_arguments, chaincode_created, endorsersList): caliperConfig = {} caliperConfig["channels"] = [genCaliperChannels(domainName, orgsCount, orderersCount, chaincodeName, peerCounts, chaincodeversion, chaincode_lang, chaincode_init_function, chaincode_path, endorsersList, chaincode_init_arguments, chaincode_created)] #streamChannel = yaml.dump(caliperConfig, default_flow_style = None, sort_keys=False) #fHandle.write(stream.replace("'", "")) #fHandle.write(streamChannel) caliperConfig["info"] = {"Version" : "2.2.0", "Size" : "3 Orgs", "Orderer" : "Kafka", "Distribution" : "Single Host", "StateDB" : "CouchDB"} caliperConfig["caliper"] = {'sutOptions': {'mutualTls': False}, 'blockchain': 'fabric', 'fabric': {'gateway': {'usegateway': True, 'discovery': True}}} caliperConfig["version"] ='"2.0.0"' caliperConfig["name"] = "Fabric" caliperConfig["organizations"] = genCaliperOrganizations(domainName, orgsCount, orderersCount, peerCounts, endorsersList) fHandle = open("caliperNetworkConfig.yaml", "w") stream = yaml.dump(caliperConfig, default_flow_style = False, sort_keys=False) fHandle.write(stream.replace("'", "")) #fHandle.write(stream) fHandle.close() ''' caliperConfig["peers"] = genCaliperPeers(domainName, orgsCount, orderersCount, peerCounts) caliperConfig["orderers"] = genCaliperOrderers(domainName, orgsCount, orderersCount) caliperConfig["clients"] = genCaliperClients(domainName, orgsCount, orderersCount, peerCounts) def peersListInsideOrganizations(org, peerCounts): config = [] #for peerPerOrg in range(sum(peerCounts[0:org]), sum(peerCounts[0:org])+peerCounts[org]): for peerPerOrg in range(peerCounts[org]): #print peerCounts, org, peerPerOrg, sum(peerCounts[0:org]), sum(peerCounts[0:org])+peerCounts[org] config.append("fabric-peer{}org{}".format(peerPerOrg+1,org+1)) return config def genCaliperOrderers(domainName, orgsCount, orderersCount): ordererConfig = {} for ordcounter in range(orderersCount): ordererConfig["ord{}-hlf-ord".format(ordcounter+1)] = {"url" : "grpc://fabric-ord{}:7050".format(ordcounter+1), "grpcOptions" : {"ssl-target-name-override" : "fabric-ord{}".format(ordcounter+1)}} return ordererConfig def genCaliperPeers(domainName, orgsCount, orderersCount, peerCounts): clConfig = {} for org in range(orgsCount): #for peerPerOrg in range(sum(peerCounts[0:org]), sum(peerCounts[0:org])+peerCounts[org]): for peerPerOrg in range(peerCounts[org]): clConfig["fabric-peer{}org{}".format(peerPerOrg+1,org+1)] = {"url" : "grpc://fabric-peer{}org{}:7051".format(peerPerOrg+1, org+1), "grpcOptions" : {"ssl-target-name-override" : "fabric-peer{}org{}".format(peerPerOrg+1,org+1), "grpc.keepalive_time_ms" : "600000"}} #print peerCounts, org, peerPerOrg, sum(peerCounts[0:org]), sum(peerCounts[0:org])+peerCounts[org] return clConfig ''' """ def gendependencies(orgsCount, orderersCount, peerCounts): config = [] for ordcounter in range(orderersCount): ordererConfig = {} ordererConfig["name"] = "hlf-ord" ordererConfig["version"] = "1.3.0" ordererConfig["repository"] = "https://kubernetes-charts.storage.googleapis.com/" ordererConfig["alias"] = "ord{}".format(ordcounter+1) config.append(ordererConfig) for org in range(orgsCount): for peerNum in range(peerCounts[org]): orgConfig = {} orgConfig["name"] = "hlf-peer" orgConfig["version"] = "1.3.0" orgConfig["repository"] = "https://kubernetes-charts.storage.googleapis.com/" orgConfig["alias"] = "peer{}org{}".format(peerNum+1,org+1) config.append(orgConfig) for cdb in range(orgsCount): for peerNum in range(peerCounts[cdb]): #print(peerCounts[cdb]) cdbConfig = {} cdbConfig["name"] = "hlf-couchdb" cdbConfig["version"] = "1.0.7" cdbConfig["repository"] = "https://kubernetes-charts.storage.googleapis.com/" cdbConfig["alias"] = "cdb-peer{}org{}".format(peerNum+1,cdb+1) config.append(cdbConfig) kafkaConfig = {} kafkaConfig["name"] = "kafka" kafkaConfig["version"] = "0.20.8" kafkaConfig["repository"] = "https://kubernetes-charts-incubator.storage.googleapis.com/" kafkaConfig["alias"] = "kafka-hlf" config.append(kafkaConfig) return config """ """ def genFabricrequirements(orgsCount, orderersCount, peerCounts): reqConfig = {} reqConfig["dependencies"] = gendependencies(orgsCount, orderersCount, peerCounts) fHandle = open("requirements.yaml", "w") stream = yaml.dump(reqConfig, default_flow_style = False) stream2 = stream.replace(" ", " ") fHandle.write(stream2.replace("- ", " - ")) #fHandle.write(yaml.dump(reqConfig, default_flow_style=False)) fHandle.close() """ """ def genFabricValues(orgsCount, orderersCount, peerCounts): config = {} for cdb in range(orgsCount): for peerNum in range(peerCounts[cdb]): #config = {} config["cdb-peer{}org{}".format(peerNum+1,cdb+1)] = { "fullnameOverride" : "cdb-peer{}org{}-hlf-couchdb".format(peerNum+1,cdb+1), "image" : {"tag" : "0.4.10"}, "persistence" : {"size" : "1Gi", "storageClass" : "local-path"}, } #config.append(cdbConfig) for org in range(orgsCount):# must add another for for peerCount[org+1] for peerNum in range(peerCounts[org]): #orgConfig = {} config["peer{}org{}".format(peerNum+1,org+1)] = { "image" : {"tag" : "1.4.6"}, "persistence" : {"accessMode" : "ReadWriteOnce", "size" : "1Gi", "storageClass" : "local-path"}, "peer" : {"databaseType" : "CouchDB", "couchdbInstance" : "cdb-peer{}org{}".format(peerNum+1,org+1), "mspID" : "Org{}MSP".format(org+1)}, "secrets" : {"channels" : '[hlf--channel]', "adminCert" : "hlf--peer{}-admincert".format(org+1), "adminKey" : "hlf--peer{}-adminkey".format(org+1), "peer" : { "cert" : "hlf--peer{}--org{}-idcert".format(peerNum+1, org+1),#### here ### modify to be peer{peerCount[org]+1}{org+1} "key" : "hlf--peer{}--org{}-idkey".format(peerNum+1, org+1),### here ### "caCert" : "hlf--peer{}-cacert".format(org+1)}}, "nodeSelector" : { "peer" : "peer{}org{}".format(peerNum+1,org+1)} } #config.append(orgConfig) for ordcounter in range(orderersCount): #ordererConfig = {} config["ord{}".format(ordcounter+1)] = { "image" : {"tag" : "1.4.6"}, "persistence" : {"enabled" : "true", "accessMode" : "ReadWriteOnce", "size" : "1Gi", "storageClass" : "local-path"}, "ord" : {"type" : "kafka", "mspID" : "Orderer{}MSP".format(ordcounter+1)}, "secrets" : {"ord" : { "cert" : "hlf--ord{}-idcert".format(ordcounter+1), "key" : "hlf--ord{}-idkey".format(ordcounter+1), "caCert" : "hlf--ord{}-cacert".format(ordcounter+1)}, "genesis" : "hlf--genesis", "adminCert" : "hlf--ord{}-admincert".format(ordcounter+1)} } #config.append(ordererConfig) #kafkaConfig = {} config["kafka-hlf"] = { "persistence" : {"enabled" : "true", "accessMode" : "ReadWriteOnce", "size" : "1Gi", "storageClass" : "local-path"}} #config.append(kafkaConfig) #imageConfig = {} config["image"] = { "repository" : "nginx", "tag" : "stable", "pullPolicy" : "IfNotPresent"} #config.append(imageConfig) config["persistence"] = { "enabled" : "true", "annotations" : "{}", "storageClass" : "local-path", "accessMode" : "ReadWriteOnce", "size" : "1Gi"} #serviceAccountConfig = {} config["serviceAccount"] = { "create" : "true", "name" : '""'} #config.append(serviceAccountConfig) #servicConfig = {} config["service"] = { "type" : "ClusterIP", "port" : "80"} #config.append(servicConfig) #ingressConfig = {} config["ingress"] = { "enabled" : "false", "annotations" : "{}", "hosts" : {"host" : "chart-example.local" , "paths" : "[]"}, "tls" : "[]"} #config.append(ingressConfig) config["replicaCount"] = "1" config["imagePullSecrets"] = "[]" config["nameOverride"] = '""' config["fullnameOverride"] = '""' config["resources"] = '{}' config["nodeSelector"] = '{}' config["tolerations"] = '[]' config["affinity"] = '{}' fHandle = open("values.yaml", "w") stream = yaml.dump(config, default_flow_style = False) #stream2 = stream.replace("- ", "") #stream3 = stream2.replace(" ", " ") fHandle.write(stream.replace("'", "")) #fHandle.write(yaml.dump(config, default_flow_style=False)) fHandle.close() """ ''' def genCaliperOrderers(domainName, orgsCount, orderersCount): ordererConfig = {} for ordcounter in range(orderersCount): ordererConfig["ord{}-hlf-ord".format(ordcounter+1)] = {"url" : "grpc://fabric-ord{}:7050".format(ordcounter+1), "grpcOptions" : {"ssl-target-name-override" : "fabric-ord{}".format(ordcounter+1)}} return ordererConfig def genCaliperPeers(domainName, orgsCount, orderersCount, peerCounts): clConfig = {} for org in range(orgsCount): #for peerPerOrg in range(sum(peerCounts[0:org]), sum(peerCounts[0:org])+peerCounts[org]): for peerPerOrg in range(peerCounts[org]): clConfig["fabric-peer{}org{}".format(peerPerOrg+1,org+1)] = {"url" : "grpc://fabric-peer{}org{}:7051".format(peerPerOrg+1, org+1), "grpcOptions" : {"ssl-target-name-override" : "fabric-peer{}org{}".format(peerPerOrg+1,org+1), "grpc.keepalive_time_ms" : "600000"}} #print peerCounts, org, peerPerOrg, sum(peerCounts[0:org]), sum(peerCounts[0:org])+peerCounts[org] return clConfig ''' def getCaliperConnectionProfile(domainName, orgsCount, orderersCount, chaincodeName, peerCounts, chaincodeversion, chaincode_lang, chaincode_init_function, chaincode_path, org, ordererOrg, ordererIndexCounter): caliperConnectionProfile = {} #connectionProfile = {'organizations': {'carrier-net{var OrgName}': {'peers': ['peer0.carrier-net'], 'orderers': ['orderer1.supplychain-net'], 'mspid': 'carrierMSP', 'certificateAuthorities': ['ca.carrier-net']}}, 'peers': {'peer0.carrier-net': {'url': 'grpcs://peer0.carrier-net:7051', 'tlsCACerts': {'path': 'secret/msp/tlscacerts/tlsca.pem'}}}, 'orderers': {'orderer1.supplychain-net': {'url': 'grpcs://orderer1.supplychain-net:7050', 'tlsCACerts': {'path': 'secret/msp/tlscacerts/orderer-tlsca.pem'}}}, 'name': 'test-network-carrier-net', 'certificateAuthorities': {'ca.carrier-net': {'url': 'https://ca.carrier-net:7054', 'httpOptions': {'verify': False}, 'tlsCACerts': {'path': 'secret/msp/tlscacerts/tlsca.pem'}, 'caName': 'ca.carrier-net'}}, 'channels': {'allchannel': {'peers': {'peer0.carrier-net': {'endorsingPeer': True, 'chaincodeQuery': True, 'eventSource': True, 'discover': True, 'ledgerQuery': True}}, 'orderers': ['orderer1.supplychain-net']}}, 'client': {'organization': 'carrier-net', 'connection': {'timeout': {'peer': {'endorser': '300', 'eventHub': '300', 'eventReg': '300'}, 'orderer': '300'}}}, 'version': '1.0.0'} caliperConnectionProfile["channels"] = {'allchannel': {'peers': {'peer0.{}-net'.format(org): {'endorsingPeer': True, 'chaincodeQuery': True, 'eventSource': True, 'discover': True, 'ledgerQuery': True}}, 'orderers': ['orderer{}.{}-net'.format(ordererIndexCounter, ordererOrg)]}} caliperConnectionProfile["name"] = "test-network-carrier-net" caliperConnectionProfile["version"] = "1.0.0" caliperConnectionProfile["client"] = {'organization': '{}-net'.format(org), 'connection': {'timeout': {'peer': {'endorser': '"300"', 'eventHub': '"300"', 'eventReg': '"300"'}, 'orderer': '"300"'}}} caliperConnectionProfile["peers"] = {'peer0.{}-net'.format(org): {'url': 'grpcs://peer0.{}-net:7051'.format(org), 'tlsCACerts': {'path': 'secret/{}/msp/tlscacerts/tlsca.pem'.format(org)}}} ###########" get the ordere org of this ogr here for fabric config" caliperConnectionProfile["orderers"] = {'orderer{}.{}-net'.format(ordererIndexCounter, ordererOrg): {'url': 'grpcs://orderer{}.{}-net:7050'.format(ordererIndexCounter,ordererOrg), 'tlsCACerts': {'path': 'secret/{}/msp/tlscacerts/orderer-tlsca.pem'.format(org)}}} caliperConnectionProfile["organizations"] = {'{}-net'.format(org): {'peers': ['peer0.{}-net'.format(org)], 'orderers': ['orderer{}.{}-net'.format(ordererIndexCounter, ordererOrg)], 'mspid': '{}MSP'.format(org), 'certificateAuthorities': ['ca.{}-net'.format(org)]}} caliperConnectionProfile["certificateAuthorities"] = {'ca.{}-net'.format(org): {'url': 'https://ca.{}-net:7054'.format(org), 'httpOptions': {'verify': False}, 'tlsCACerts': {'path': 'secret/{}/msp/tlscacerts/tlsca.pem'.format(org)}, 'caName': 'ca.{}-net'.format(org)}} #caliperConfig["clients"] = genCaliperClients(domainName, orgsCount, orderersCount, peerCounts) fHandle = open("{}ConnectionProfile.yaml".format(org), "w") stream = yaml.dump(caliperConnectionProfile, default_flow_style = False, sort_keys=False) fHandle.write(stream.replace("'", "")) fHandle.close() def genBAFChannels(domainName, orgsCount, orderersCount, peerCounts, orgNames): config = [] for org in orgNames: orgConfig = {} orgConfig["mspid"] = "{}MSP".format(org) orgConfig["identities"] = {'certificates': [{'admin': True, 'clientPrivateKey': {'path': "secret/{}/tls/admin.pem".format(org)}, 'clientSignedCert': {'path': "secret/{}/tls/admin.cert".format(org)}, 'name': 'admin'}]} orgConfig["connectionProfile"] = {'path': './{}ConnectionProfile.yaml'.format(org), 'discover': "True"} config.append(orgConfig) return config def genBAForganizationsPeers(org, peerCounts, orgNames, pathToBAF, chaincodeversion, chaincodeName, BAFgitusername, BAFgit_url, BAFgitpassword, BAFgitbranch, BAFChaincodePath): config = [] for peerNb in range(peerCounts[org]): peerConfig={'name': 'peer{}'.format(peerNb), 'certificate': '{}/build/{}/ca.crt'.format(pathToBAF, orgNames[org]), 'peerAddress': 'peer{}.{}-net:7051'.format(peerNb, orgNames[org]), 'grpc': {'port': 7051}, 'restserver': {'targetPort': 20001, 'port': 20001}, 'couchdb': {'port': 5984}, 'gossippeeraddress': 'peer{}.{}-net:7051'.format((peerNb+1) % peerCounts[org], orgNames[org]), 'chaincode': {'version': '"{}"'.format(chaincodeversion), 'name': '"{}"'.format(chaincodeName), 'repository': {'username': '"{}"'.format(BAFgitusername), 'url': '"{}"'.format(BAFgit_url), 'password': '"{}"'.format(BAFgitpassword), 'branch': '{}'.format(BAFgitbranch), 'path': '"{}"'.format(BAFChaincodePath)}, 'endorsements': '""', 'maindirectory': '"."', 'arguments': '\\"init\\",\\"\\"'}, 'peer': '', 'type': 'anchor', 'events': {'port': 7053}, 'expressapi': {'targetPort': 3000, 'port': 3000}, 'cli': 'disabled'} config.append(peerConfig) return config def genBAFOrganizations(domainName, orgsCount, orderersCount, peerCounts, orgNames, BAFgit_protocol, BAFgit_url, BAFgitbranch, BAFgitrelease_dir, BAFgitchart_source, BAFgit_repo, BAFgitusername, BAFgitpassword, BAFgitemail, BAFgitprivate_key, BAFk8sContext, BAFk8sConfig_file, VAULT_ADDR, VAULT_TOKEN, endorsersList, ordererOwnershipList, pathToBAF, chaincodeversion, chaincodeName, BAFChaincodePath, cloud_provider): config = [] for org in range(len(orgNames)): if orgNames[org] in endorsersList: orgConfig = {'ca_data': {'url': 'ca.{}-net:7054'.format(orgNames[org]), 'certificate': 'file/server.crt'}, 'org_status': 'new', 'external_url_suffix': 'develop.local.com', 'organization': '', 'services': {'ca': {'grpc': {'port': 7054}, 'type': 'ca', 'name': 'ca', 'subject': '/C=CH/ST=Zurich/L=Zurich/O={}/CN=ca.{}-net'.format(orgNames[org], orgNames[org])}, 'peers': genBAForganizationsPeers(org, peerCounts, orgNames, pathToBAF, chaincodeversion, chaincodeName, BAFgitusername, BAFgit_url, BAFgitpassword, BAFgitbranch, BAFChaincodePath)}, 'k8s': {'region': '"cluster_region"', 'config_file': '"{}"'.format(BAFk8sConfig_file), 'context': '"{}"'.format(BAFk8sContext)}, 'cli': 'enabled', 'gitops': {'username': '"{}"'.format(BAFgitusername), 'private_key': '"{}/build/gitops"'.format(pathToBAF), 'git_protocol': '"{}"'.format(BAFgit_protocol), 'chart_source': '"{}"'.format(BAFgitchart_source), 'git_url': '"{}://{}"'.format(BAFgit_protocol, BAFgit_url), 'branch': '"{}"'.format(BAFgitbranch), 'release_dir': '"{}"'.format(BAFgitrelease_dir), 'password': '"{}"'.format(BAFgitpassword), 'email': '"{}"'.format(BAFgitemail), 'git_repo': '"{}"'.format(BAFgit_repo)}, 'cloud_provider': '{}'.format(cloud_provider), 'name': orgNames[org], 'country': 'CH', 'state': 'Zurich', 'location': 'Zurich', 'vault': {'url': '"{}"'.format(VAULT_ADDR), 'root_token': '"{}"'.format(VAULT_TOKEN), 'secret_path': '"secret"'}, 'subject': 'O={},OU={},L=47.38/8.54/Zurich,C=CH'.format(orgNames[org], orgNames[org]), 'type': 'peer'} else: orgConfig = {'ca_data': {'url': 'ca.{}-net:7054'.format(orgNames[org]), 'certificate': 'file/server.crt'}, 'org_status': 'new', 'external_url_suffix': 'develop.local.com', 'organization': '', 'services': {'consensus': {'grpc': {'port': 9092}, 'type': 'broker', 'name': 'kafka', 'replicas': 3}, 'ca': {'grpc': {'port': 7054}, 'type': 'ca', 'name': 'ca', 'subject': '"/C=GB/ST=London/L=London/O=Orderer/CN=ca.{}-net"'.format(orgNames[org], orgNames[org])}, 'orderers': [{'consensus': 'kafka', 'grpc': {'port': 7050}, 'orderer': '', 'type': 'orderer', 'name': 'orderer1'}]}, 'k8s': {'region': '"cluster_region"', 'config_file': '"{}"'.format(BAFk8sConfig_file), 'context': '"{}"'.format(BAFk8sContext)}, 'cli': 'disabled', 'gitops': {'username': '"{}"'.format(BAFgitusername), 'private_key': '"{}/build/gitops"'.format(pathToBAF), 'git_protocol': '"{}"'.format(BAFgit_protocol), 'chart_source': '"{}"'.format(BAFgitchart_source), 'git_url': '"{}://{}"'.format(BAFgit_protocol, BAFgit_url), 'branch': '"{}"'.format(BAFgitbranch), 'release_dir': '"{}"'.format(BAFgitrelease_dir), 'password': '"{}"'.format(BAFgitpassword), 'email': '"{}"'.format(BAFgitemail), 'git_repo': '"{}"'.format(BAFgit_repo)}, 'cloud_provider': '{}'.format(cloud_provider), 'name': orgNames[org], 'country': 'CH', 'state': 'Zurich', 'location': 'Zurich', 'vault': {'url': '"{}"'.format(VAULT_ADDR), 'root_token': '"{}"'.format(VAULT_TOKEN), 'secret_path': '"secret"'}, 'subject': '"O=Orderer,L=51.50/-0.13/London,C=GB"', 'type': 'orderer'} #orgConfig = {'cloud_provider': 'minikube', 'name': org, 'country': 'GB', 'state': 'London', 'location': 'London', 'subject': 'O=Carrier,OU=Carrier,L=51.50/-0.13/London,C=GB', 'type': 'peer', 'version': '2.2.0'} #orgConfig["organization"] = None #orgConfig["services"] = {'ca': {'grpc': {'port': 7054}, 'type': 'ca', 'name': 'ca', 'subject': '/C=GB/ST=London/L=London/O=Carrier/CN=ca.carrier-net'}} # TAKE peer count per org into cconsideration here ******* and in caliper config **************** #orgConfig["peers"] = None #orgConfig["vault"] = {'url': vaultUrl, 'root_token': vaultRootToken, 'secret_path': 'secret'} config.append(orgConfig) return config def genBAFOrderers(orgNames, pathToBAF): config = [] used = [] for org in orgNames: if org not in used: ordererConfig={'org_name': org, 'orderer': '', 'name': 'orderer1', 'certificate': '{}/build/{}/orderer1.crt'.format(pathToBAF, org), 'type': 'orderer', 'uri': 'orderer1.{}-net:7050'.format(org)} config.append(ordererConfig) used.append(org) return config def TOUSEEEEchannelParticipantsList(orgNames): mspConfig='' for x in range(len(orgNames)): if x !=0: mspConfig = mspConfig + ', "{}MSP"'.format(orgNames[x]) else: mspConfig = mspConfig + '"{}MSP"'.format(orgNames[x]) #mspConfig.append('{}MSP'.format(x)) return '[{}]'.format(mspConfig) def channelParticipantsList(orgNames, ordererOwnershipList, peerCounts, endorsersList): config = [] for org in range(len(orgNames)): if orgNames[org] in endorsersList: a = [1,2,3] orgConfig = {'ordererAddress': 'orderer1.{}-net:7050'.format(ordererOwnershipList[org]), 'peers': [{'peer': '', 'gossipAddress': 'peer{}.{}-net:7051'.format((x+1) % peerCounts[org], orgNames[org]), 'name': 'peer{}'.format(x), 'peerAddress': 'peer{}.{}-net:7051'.format(x , orgNames[org])} for x in range(peerCounts[org])], 'name': orgNames[org], 'organization': '', 'org_status': 'new'} #for x in range(peerCounts[org]): # print("********************* HERE ************ peer{}.{}-net:7051.format(x+1 % peerCounts[org]", org, x, x+1, peerCounts[org], (x+1) % peerCounts[org]) if org == 0: orgConfig["type"] = "creator" else: orgConfig["type"] = "joiner" config.append(orgConfig) return config def getBAFnetwork(domainName, orgsCount, orderersCount, chaincodeName, peerCounts, chaincodeversion, chaincode_lang, chaincode_init_function, chaincode_path, orgNames, BAFgit_protocol, BAFgit_url, BAFgitbranch, BAFgitrelease_dir, BAFgitchart_source, BAFgit_repo, BAFgitusername, BAFgitpassword, BAFgitemail, BAFgitprivate_key, BAFk8sContext, BAFk8sConfig_file, vaultUrl, vaultRootToken, endorsersList, ordererOwnershipList, pathToBAF, BAFChaincodePath, cloud_provider): bafNetwork = {'network': {'channels': [{'channel_name': 'AllChannel', 'orderer': {'name': 'supplychain'}, 'participants': channelParticipantsList(orgNames, ordererOwnershipList, peerCounts, endorsersList), 'endorsers': {'corepeerAddress': ['peer0.{}-net:7051'.format(x) for x in endorsersList], 'name': endorsersList}, 'consortium': 'SupplyChainConsortium', 'channel': '', 'genesis': {'name': 'OrdererGenesis'}}], 'organizations': genBAFOrganizations(domainName, orgsCount, orderersCount, peerCounts, orgNames, BAFgit_protocol, BAFgit_url, BAFgitbranch, BAFgitrelease_dir, BAFgitchart_source, BAFgit_repo, BAFgitusername, BAFgitpassword, BAFgitemail, BAFgitprivate_key, BAFk8sContext, BAFk8sConfig_file, vaultUrl, vaultRootToken, endorsersList, ordererOwnershipList, pathToBAF, chaincodeversion, chaincodeName, BAFChaincodePath, cloud_provider), 'version': '2.2.0', 'orderers': genBAFOrderers(ordererOwnershipList, pathToBAF), 'env': {'retry_count': 50, 'type': '"local"', 'proxy': 'none', 'ambassadorPorts': '15010,15020', 'external_dns': 'disabled'}, 'docker': {'url': '"index.docker.io/hyperledgerlabs"', 'username': '"docker_username"', 'password': '"docker_password"'}, 'type': 'fabric'}} #bafNetwork = {} #bafNetwork["network"] = {'version': '2.2.0' ,'type': 'fabric'} #bafNetwork["network"]["env"] ={'env': {'retry_count': 50, 'type': 'local', 'proxy': 'none', 'ambassadorPorts': '15010,15020', 'external_dns': 'disabled'}, 'docker': {'url': 'index.docker.io/hyperledgerlabs', 'username': 'docker_username', 'password': 'docker_password'}} ##bafNetwork["network"]["type"] = "fabric" ##bafNetwork["network"]["version"] = {"2.2.0"} #bafNetwork["network"]["channels"] = genBAFChannels(domainName, orgsCount, orderersCount, peerCounts, orgNames) #bafNetwork["network"]["orderers"] = genBAFOrderers(domainName, orgsCount, orderersCount) #bafNetwork["network"]["organizations"] = [genBAFOrganizations(domainName, orgsCount, orderersCount, peerCounts, orgNames, BAFgit_protocol, BAFgit_url, BAFgitbranch, BAFgitrelease_dir, BAFgitchart_source, BAFgit_repo, BAFgitusername, BAFgitpassword, BAFgitemail, BAFgitprivate_key, BAFk8sContext, BAFk8sConfig_file, vaultUrl, vaultRootToken)] ##{'network': {'channels': [{'channel_name': 'AllChannel', 'orderer': {'name': 'supplychain'}, 'participants': [{'ordererAddress': 'orderer1.supplychain-net:7050', 'peers': [{'peer': None, 'gossipAddress': 'peer0.carrier-net:7051', 'name': 'peer0', 'peerAddress': 'peer0.carrier-net:7051'}], 'name': 'carrier', 'organization': None, 'type': 'creator', 'org_status': 'new'}, {'ordererAddress': 'orderer1.supplychain-net:7050', 'peers': [{'peer': None, 'gossipAddress': 'peer0.manufacturer-net:7051', 'name': 'peer0', 'peerAddress': 'peer0.manufacturer-net:7051'}], 'name': 'manufacturer', 'organization': None, 'type': 'joiner', 'org_status': 'new'}], 'endorsers': {'corepeerAddress': ['peer0.carrier-net:7051', 'peer0.manufacturer-net:7051'], 'name': ['carrier', 'manufacturer']}, 'consortium': 'SupplyChainConsortium', 'channel': None, 'genesis': {'name': 'OrdererGenesis'}}], 'organizations': [{'ca_data': {'url': 'ca.supplychain-net:7054', 'certificate': 'file/server.crt'}, 'org_status': 'new', 'external_url_suffix': 'develop.local.com', 'organization': None, 'services': {'consensus': {'grpc': {'port': 9092}, 'type': 'broker', 'name': 'kafka', 'replicas': 3}, 'ca': {'grpc': {'port': 7054}, 'type': 'ca', 'name': 'ca', 'subject': '/C=GB/ST=London/L=London/O=Orderer/CN=ca.supplychain-net'}, 'orderers': [{'consensus': 'kafka', 'grpc': {'port': 7050}, 'orderer': None, 'type': 'orderer', 'name': 'orderer1'}]}, 'k8s': {'region': 'cluster_region', 'config_file': '~/.kube/config', 'context': 'local'}, 'cli': 'disabled', 'gitops': {'username': 'ayhamkassab', 'private_key': '/root/hyperledger-labs/myFork/blockchain-automation-framework/build/gitops', 'git_protocol': 'https', 'chart_source': 'platforms/hyperledger-fabric/charts', 'git_url': 'https://github.com/ayhamkassab/blockchain-automation-framework.git', 'branch': 'master', 'release_dir': 'platforms/hyperledger-fabric/releases/dev', 'password': '**Ak47**', 'email': 'github_email', 'git_repo': 'github.com/ayhamkassab/blockchain-automation-framework.git'}, 'cloud_provider': 'minikube', 'name': 'supplychain', 'country': 'UK', 'state': 'London', 'location': 'London', 'vault': {'url': 'http://130.104.229.21:30000', 'root_token': 's.32JqFMwzbrqbeFl8bKI9R2QC', 'secret_path': 'secret'}, 'subject': 'O=Orderer,L=51.50/-0.13/London,C=GB', 'type': 'orderer'}, {'ca_data': {'url': 'ca.manufacturer-net:7054', 'certificate': 'file/server.crt'}, 'org_status': 'new', 'external_url_suffix': 'develop.local.com', 'organization': None, 'services': {'ca': {'grpc': {'port': 7054}, 'type': 'ca', 'name': 'ca', 'subject': '/C=CH/ST=Zurich/L=Zurich/O=Manufacturer/CN=ca.manufacturer-net'}, 'peers': [{'name': 'peer0', 'certificate': '/root/hyperledger-labs/myFork/blockchain-automation-framework/build/manufacturer/ca.crt', 'peerAddress': 'peer0.manufacturer-net:7051', 'grpc': {'port': 7051}, 'restserver': {'targetPort': 20001, 'port': 20001}, 'couchdb': {'port': 5984}, 'gossippeeraddress': 'peer0.manufacturer-net:7051', 'chaincode': {'version': '1', 'name': 'simpleauction', 'repository': {'username': 'ayhamkassab', 'url': 'github.com/ayhamkassab/blockchain-automation-framework.git', 'password': '**Ak47**', 'branch': 'master', 'path': 'examples/supplychain-app/fabric/chaincode_rest_server/chaincode/'}, 'endorsements': '', 'maindirectory': '.', 'arguments': '\\"init\\",\\"\\"'}, 'peer': None, 'type': 'anchor', 'events': {'port': 7053}, 'expressapi': {'targetPort': 3000, 'port': 3000}, 'cli': 'disabled'}]}, 'k8s': {'region': 'cluster_region', 'config_file': '~/.kube/config', 'context': 'local'}, 'cli': 'enabled', 'gitops': {'username': 'ayhamkassab', 'private_key': '/root/hyperledger-labs/myFork/blockchain-automation-framework/build/gitops', 'git_protocol': 'https', 'chart_source': 'platforms/hyperledger-fabric/charts', 'git_url': 'https://github.com/ayhamkassab/blockchain-automation-framework.git', 'branch': 'master', 'release_dir': 'platforms/hyperledger-fabric/releases/dev', 'password': '**Ak47**', 'email': 'github_email', 'git_repo': 'github.com/ayhamkassab/blockchain-automation-framework.git'}, 'cloud_provider': 'minikube', 'name': 'manufacturer', 'country': 'CH', 'state': 'Zurich', 'location': 'Zurich', 'vault': {'url': 'http://130.104.229.21:30000', 'root_token': 's.32JqFMwzbrqbeFl8bKI9R2QC', 'secret_path': 'secret'}, 'subject': 'O=Manufacturer,OU=Manufacturer,L=47.38/8.54/Zurich,C=CH', 'type': 'peer'}, {'ca_data': {'url': 'ca.carrier-net:7054', 'certificate': 'file/server.crt'}, 'org_status': 'new', 'external_url_suffix': 'develop.local.com', 'organization': None, 'services': {'ca': {'grpc': {'port': 7054}, 'type': 'ca', 'name': 'ca', 'subject': '/C=GB/ST=London/L=London/O=Carrier/CN=ca.carrier-net'}, 'peers': [{'name': 'peer0', 'certificate': '/root/hyperledger-labs/myFork/blockchain-automation-framework/build/carrier/ca.crt', 'peerAddress': 'peer0.carrier-net:7051', 'grpc': {'port': 7051}, 'restserver': {'targetPort': 20001, 'port': 20001}, 'couchdb': {'port': 5984}, 'gossippeeraddress': 'peer0.carrier-net:7051', 'chaincode': {'version': '1', 'name': 'simpleauction', 'repository': {'username': 'ayhamkassab', 'url': 'github.com/ayhamkassab/blockchain-automation-framework.git', 'password': '**Ak47**', 'branch': 'master', 'path': 'examples/supplychain-app/fabric/chaincode_rest_server/chaincode/'}, 'endorsements': '', 'maindirectory': '.', 'arguments': '\\"init\\",\\"\\"'}, 'peer': None, 'type': 'anchor', 'events': {'port': 7053}, 'expressapi': {'targetPort': 3000, 'port': 3000}, 'cli': 'disabled'}]}, 'k8s': {'region': 'cluster_region', 'config_file': '~/.kube/config', 'context': 'local'}, 'cli': 'enabled', 'gitops': {'username': 'ayhamkassab', 'private_key': '/root/hyperledger-labs/myFork/blockchain-automation-framework/build/gitops', 'git_protocol': 'https', 'chart_source': 'platforms/hyperledger-fabric/charts', 'git_url': 'https://github.com/ayhamkassab/blockchain-automation-framework.git', 'branch': 'master', 'release_dir': 'platforms/hyperledger-fabric/releases/dev', 'password': '**Ak47**', 'email': 'github_email', 'git_repo': 'github.com/ayhamkassab/blockchain-automation-framework.git'}, 'cloud_provider': 'minikube', 'name': 'carrier', 'country': 'GB', 'state': 'London', 'location': 'London', 'vault': {'url': 'http://130.104.229.21:30000', 'root_token': 's.32JqFMwzbrqbeFl8bKI9R2QC', 'secret_path': 'secret'}, 'subject': 'O=Carrier,OU=Carrier,L=51.50/-0.13/London,C=GB', 'type': 'peer'}], 'version': '2.2.0', 'orderers': [{'org_name': 'supplychain', 'orderer': None, 'name': 'orderer1', 'certificate': '/root/hyperledger-labs/myFork/blockchain-automation-framework/build/orderer1.crt', 'type': 'orderer', 'uri': 'orderer1.supplychain-net:7050'}], 'env': {'retry_count': 50, 'type': 'local', 'proxy': 'none', 'ambassadorPorts': '15010,15020', 'external_dns': 'disabled'}, 'docker': {'url': 'index.docker.io/hyperledgerlabs', 'username': 'docker_username', 'password': 'docker_password'}, 'type': 'fabric'}} fHandle = open("bafNetwork.yaml", "w") stream = yaml.dump(bafNetwork, default_flow_style = False, sort_keys=False) fHandle.write(stream.replace("'", "")) fHandle.close() def caliperValuesEndorsersList(orgNames): mspConfig='' for x in range(len(orgNames)): if x !=0: mspConfig = mspConfig + ' {}'.format(orgNames[x]) else: mspConfig = mspConfig + '{}'.format(orgNames[x]) #mspConfig.append('{}MSP'.format(x)) return '"{}"'.format(mspConfig) def getCaliperValues(replicaMasterCount, replicaWorkersCount, repository, vaultUrl, vaultRootToken, endorsersList, peerCounts, orderersCount): caliperValues = {'ingress': {'tls': [], 'enabled': False, 'hosts': [{'paths': [], 'host': 'chart-example.local'}], 'annotations': {}}, 'replicaWorkersCount': replicaWorkersCount, 'image': {'pullPolicy': 'Always', 'tag': 'b2c2', 'repository': repository}, 'args': {'second': 'manager', 'first': 'launch'}, 'replicaMasterCount': replicaMasterCount, 'VAULT_TOKEN': vaultRootToken, 'VAULT_ADDR': vaultUrl, 'fullnameOverride': '', 'securityContext': {}, 'service': {'type': 'ClusterIP', 'port': 80}, 'serviceAccount': {'create': True, 'name': ''}, 'ENDORSING_ORG_NAMES': caliperValuesEndorsersList(endorsersList), 'podSecurityContext': {}, 'nodeSelector': {}, 'affinity': {}, 'nameOverride': '', 'tolerations': [], 'imagePullSecrets': [], 'resources': {}, 'resultsFoldername':'{}Orgs-{}Peers-{}Orderers{}'.format(len(endorsersList), '-'.join(str(i) for i in peerCounts), orderersCount, datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d%H:%M:%S'))} fHandle = open("values.yaml", "w") stream = yaml.dump(caliperValues, default_flow_style = False, sort_keys=False) fHandle.write(stream.replace("'", "")) fHandle.close() def generate(): with open("fabricConfig.yaml", 'r') as stream: try: fabricConfig = yaml.safe_load(stream) endorsersBooleanList= [] ordererOwnershipList= [] ordererOwners= [] peerCounts= [] orgNames= [] listOfOrgs = fabricConfig["organizations"] orgsCount= len(listOfOrgs) for org in range(orgsCount): ordererOwnershipList.append(listOfOrgs[org]["orderer"]) orgNames.append(listOfOrgs[org]["name"]) endorsersBooleanList.append(listOfOrgs[org]["endorser"]) if listOfOrgs[org]["endorser"]: peerCounts.append(listOfOrgs[org]["numberOfPeers"]) else: peerCounts.append(0) #orderersCount=(len([idx for idx in range(len(ordererOwnershipList)) if ordererOwnershipList[idx] == True])) orderersCount = len(np.unique(ordererOwnershipList)) print(np.unique(ordererOwnershipList), orderersCount) domainName = fabricConfig["domain_name"]#"svc.cluster.local" endorsersList=([orgNames[idx] for idx in range(len(endorsersBooleanList)) if endorsersBooleanList[idx] == True]) print(endorsersList, endorsersBooleanList, "peerCounts", peerCounts) BAFk8sContext = fabricConfig["BAFk8s"]["context"] BAFk8sConfig_file = fabricConfig["BAFk8s"]["config_file"] vaultUrl = fabricConfig["vault"]["url"] vaultRootToken = fabricConfig["vault"]["root_token"] pathToBAF = fabricConfig["pathToBAF"] cloud_provider = fabricConfig["cloud_provider"] BAFChaincodePath = fabricConfig["BAFChaincodePath"] except yaml.YAMLError as exc: print(exc) with open("gitops.yaml", 'r') as stream: try: gitopsConfig = yaml.safe_load(stream) BAFgit_protocol = gitopsConfig["BAFgitops"]["git_protocol"] BAFgit_url = gitopsConfig["BAFgitops"]["git_url"] BAFgitbranch = gitopsConfig["BAFgitops"]["branch"] BAFgitrelease_dir = gitopsConfig["BAFgitops"]["release_dir"] BAFgitchart_source = gitopsConfig["BAFgitops"]["chart_source"] BAFgit_repo = gitopsConfig["BAFgitops"]["git_repo"] BAFgitusername = gitopsConfig["BAFgitops"]["username"] BAFgitpassword = gitopsConfig["BAFgitops"]["password"] BAFgitemail = gitopsConfig["BAFgitops"]["email"] BAFgitprivate_key = gitopsConfig["BAFgitops"]["private_key"] except yaml.YAMLError as exc: print(exc) kafka=True #orgsCount = int(sys.argv[1]) #orderersCount = int(sys.argv[2]) zooKeepersCount = 3 if kafka else 0 kafkasCount = 3 if kafka else 0 #peerCounts = list(sys.argv[3])#[2,1,3,1,1]21311 #for x in range(len(peerCounts)): # peerCounts[x]= int(peerCounts[x]) with open("caliperConfig.yaml", 'r') as stream: try: caliperConfig = yaml.safe_load(stream) #chaincodeName = sys.argv[4]#simple# still not done in bench mark config yaml (so far only tps is modifyble there) chaincodeName = caliperConfig["chaincode_name"] chaincodeversion = caliperConfig["chaincode_version"] chaincode_lang = caliperConfig["chaincode_lang"] chaincode_init_function = caliperConfig["chaincode_init_function"] chaincode_path = caliperConfig["chaincode_path"] chaincode_init_arguments = caliperConfig["initArguments"] chaincode_created = caliperConfig["created"] replicaMasterCount = caliperConfig["replicaMasterCount"] replicaWorkersCount = caliperConfig["replicaWorkersCount"] caliperImageRepository = caliperConfig["caliperImageRepository"] #print("chaincode", chaincodeName) #print("chaincodeversion", chaincodeversion) #print("domain_name",domainName) #print("orgNames",orgNames) #print("orgsCount",orgsCount) #print("orderersCount",orderersCount) #print("peerCounts",peerCounts, type(peerCounts[0])) except yaml.YAMLError as exc: print(exc) #with open("microBenchMarkconfig.yaml", 'r') as stream: # try: # benchmarkConfig = yaml.safe_load(stream) # print benchmarkConfig["test"]["rounds"][0]["txDuration"] # print "****************************" # print "****************************" # print "****************************" # benchmarkConfig["test"]["rounds"][0]["txDuration"] = caliperConfig["txDuration"]#now rewrite the file # print benchmarkConfig["test"]["rounds"][0]["txDuration"] # with open('microBenchMarkconfig.yaml', 'w') as outfile: # yaml.dump(benchmarkConfig, outfile, default_flow_style=False) # except yaml.YAMLError as exc: # print(exc) #genFabricValues(orgsCount, orderersCount, peerCounts) #genFabricrequirements(orgsCount, orderersCount, peerCounts) getCaliperNetworkConfig(domainName, orgsCount, orderersCount, chaincodeName, peerCounts, chaincodeversion, chaincode_lang, chaincode_init_function, chaincode_path, orgNames, chaincode_init_arguments, chaincode_created, endorsersList) #for org in orgNames: ordererIndexCounter = 0 for org in range(len(orgNames)): if orgNames[org] in endorsersList: getCaliperConnectionProfile(domainName, orgsCount, orderersCount, chaincodeName, peerCounts, chaincodeversion, chaincode_lang, chaincode_init_function, chaincode_path, orgNames[org], ordererOwnershipList[org], ordererIndexCounter) ordererIndexCounter += 1 #for org in range(len(endorsersList)): # getCaliperConnectionProfile(domainName, orgsCount, orderersCount, chaincodeName, peerCounts, chaincodeversion, chaincode_lang, chaincode_init_function, chaincode_path, endorsersList[org], ordererOwnershipList[org]) getBAFnetwork(domainName, orgsCount, orderersCount, chaincodeName, peerCounts, chaincodeversion, chaincode_lang, chaincode_init_function, chaincode_path, orgNames, BAFgit_protocol, BAFgit_url, BAFgitbranch, BAFgitrelease_dir, BAFgitchart_source, BAFgit_repo, BAFgitusername, BAFgitpassword, BAFgitemail, BAFgitprivate_key, BAFk8sContext, BAFk8sConfig_file, vaultUrl, vaultRootToken, endorsersList, ordererOwnershipList, pathToBAF, BAFChaincodePath, cloud_provider) getCaliperValues(replicaMasterCount, replicaWorkersCount, caliperImageRepository, vaultUrl, vaultRootToken, endorsersList, peerCounts, orderersCount) #genNetwork(domainName, orgsCount, orderersCount, peerCounts) #genCrypto(domainName, orgsCount, orderersCount, peerCounts) #generateCerts() #replacePrivateKey(orgsCount, domainName) #generateChannelArtifacts(orgsCount) #generateHighThroughput(domainName, orgsCount, peerCounts) #generateDocker("hyperledger", "hyperledger-ov", domainName, orgsCount, orderersCount, peerCounts, zooKeepersCount, kafkasCount, "INFO") def copytree(src, dst): if os.path.isdir(dst): shutil.rmtree(dst) shutil.copytree(src, dst) def deploy(): if os.path.isdir("/export"): copytree("./../high-throughput/scripts", "/export/scripts") copytree("./crypto-config", "/export/crypto-config") copytree("./channel-artifacts", "/export/channel-artifacts") # copytree("./../high-throughput/chaincode", "/export/chaincode") copytree("./../chaincode/thesis_chaincode", "/export/chaincode") subprocess.Popen(["chmod -R 777 /export"], shell=True).wait() def main(): #parser = argparse.ArgumentParser(os.path.basename(__file__)) #args = parser.parse_args() generate() deploy() if __name__ == "__main__": main()
import yaml import sys
model_incidents.go
/* Cortex XSOAR API This is the public REST API to integrate with the Cortex XSOAR server. HTTP request can be sent using any HTTP-client. For an example dedicated client take a look at: https://github.com/demisto/demisto-py. Requests must include API-key that can be generated in the Cortex XSOAR web client under 'Settings' -> 'Integrations' -> 'API keys' Optimistic Locking and Versioning\\: When using Cortex XSOAR REST API, you will need to make sure to work on the latest version of the item (incident, entry, etc.), otherwise, you will get a DB version error (which not allow you to override a newer item). In addition, you can pass 'version\\: -1' to force data override (make sure that other users data might be lost). Assume that Alice and Bob both read the same data from Cortex XSOAR server, then they both changed the data, and then both tried to write the new versions back to the server. Whose changes should be saved? Alice’s? Bob’s? To solve this, each data item in Cortex XSOAR has a numeric incremental version. If Alice saved an item with version 4 and Bob trying to save the same item with version 3, Cortex XSOAR will rollback Bob request and returns a DB version conflict error. Bob will need to get the latest item and work on it so Alice work will not get lost. Example request using 'curl'\\: ``` curl 'https://hostname:443/incidents/search' -H 'content-type: application/json' -H 'accept: application/json' -H 'Authorization: <API Key goes here>' --data-binary '{\"filter\":{\"query\":\"-status:closed -category:job\",\"period\":{\"by\":\"day\",\"fromValue\":7}}}' --compressed ``` API version: 2.0.0 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. package openapi import ( "encoding/json" ) // Incidents Incidents is a list of incident entities type Incidents struct { Items []map[string]interface{} } // NewIncidents instantiates a new Incidents object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewIncidents() *Incidents { t
NewIncidentsWithDefaults instantiates a new Incidents object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewIncidentsWithDefaults() *Incidents { this := Incidents{} return &this } func (o Incidents) MarshalJSON() ([]byte, error) { toSerialize := make([]interface{}, len(o.Items)) for i, item := range o.Items { toSerialize[i] = item } return json.Marshal(toSerialize) } func (o *Incidents) UnmarshalJSON(bytes []byte) (err error) { return json.Unmarshal(bytes, &o.Items) } type NullableIncidents struct { value *Incidents isSet bool } func (v NullableIncidents) Get() *Incidents { return v.value } func (v *NullableIncidents) Set(val *Incidents) { v.value = val v.isSet = true } func (v NullableIncidents) IsSet() bool { return v.isSet } func (v *NullableIncidents) Unset() { v.value = nil v.isSet = false } func NewNullableIncidents(val *Incidents) *NullableIncidents { return &NullableIncidents{value: val, isSet: true} } func (v NullableIncidents) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableIncidents) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
his := Incidents{} return &this } //
get_test.go
// These tests verify that the parameters to the resolver are properly extracted from a GraphQL query. package local_get import ( "github.com/creativesoftwarefdn/weaviate/database/schema/kind" "github.com/creativesoftwarefdn/weaviate/graphqlapi/common_resolver" test_helper "github.com/creativesoftwarefdn/weaviate/graphqlapi/test/helper" "testing" ) func TestSimpleFieldParamsOK(t *testing.T) { t.Parallel() resolver := newMockResolver() expectedParams := &LocalGetClassParams{ Kind: kind.ACTION_KIND, ClassName: "SomeAction", Properties: []SelectProperty{{Name: "intField", IsPrimitive: true}}, } resolver.On("LocalGetClass", expectedParams). Return(test_helper.EmptyListThunk(), nil).Once() resolver.AssertResolve(t, "{ Get { Actions { SomeAction { intField } } } }") } func
(t *testing.T) { t.Parallel() resolver := newMockResolver() expectedParams := &LocalGetClassParams{ Kind: kind.ACTION_KIND, ClassName: "SomeAction", Properties: []SelectProperty{{Name: "intField", IsPrimitive: true}}, } resolver.On("LocalGetClass", expectedParams). Return(test_helper.EmptyListThunk(), nil).Once() query := "{ Get { Actions { SomeAction { intField } } } }" resolver.AssertResolve(t, query) } func TestExtractPagination(t *testing.T) { t.Parallel() resolver := newMockResolver() expectedParams := &LocalGetClassParams{ Kind: kind.ACTION_KIND, ClassName: "SomeAction", Properties: []SelectProperty{{Name: "intField", IsPrimitive: true}}, Pagination: &common_resolver.Pagination{ First: 10, After: 20, }, } resolver.On("LocalGetClass", expectedParams). Return(test_helper.EmptyListThunk(), nil).Once() query := "{ Get { Actions { SomeAction(first:10, after: 20) { intField } } } }" resolver.AssertResolve(t, query) } func TestGetRelation(t *testing.T) { t.Parallel() resolver := newMockResolver() expectedParams := &LocalGetClassParams{ Kind: kind.ACTION_KIND, ClassName: "SomeAction", Properties: []SelectProperty{ { Name: "HasAction", IsPrimitive: false, Refs: []SelectClass{ { ClassName: "SomeAction", RefProperties: []SelectProperty{ { Name: "intField", IsPrimitive: true, }, { Name: "HasAction", IsPrimitive: false, Refs: []SelectClass{ { ClassName: "SomeAction", RefProperties: []SelectProperty{ { Name: "intField", IsPrimitive: true, }, }, }, }, }, }, }, }, }, }, } resolver.On("LocalGetClass", expectedParams). Return(test_helper.EmptyListThunk(), nil).Once() query := "{ Get { Actions { SomeAction { HasAction { ... on SomeAction { intField, HasAction { ... on SomeAction { intField } } } } } } } }" resolver.AssertResolve(t, query) }
TestExtractIntField
index.d.ts
// FILE GENERATED BY `[email protected]` // https://github.com/Swatinem/rollup-plugin-dts /** * ASN1 type */ interface IEmptyConstructor<T> { new (): T; } /** * Allows to convert ASN.1 object to JS value and back */ interface IAsnConverter<T = any, AsnType = any> { /** * Returns JS value from ASN.1 object * @param value ASN.1 object from asn1js module */ fromASN(value: AsnType): T; /** * Returns ASN.1 object from JS value * @param value JS value */ toASN(value: T): AsnType; } declare type IntegerConverterType = string | number; declare type AnyConverterType = ArrayBuffer | null; /** * Allows an object to control its own ASN.1 serialization and deserialization */ interface IAsnConvertible<T = any> { fromASN(asn: T): this; toASN(): T; } /** * NOTE: Converter MUST have name Asn<Asn1PropType.name>Converter. * Asn1Prop decorator link custom converters by name of the Asn1PropType */ /** * ASN.1 ANY converter */ declare const AsnAnyConverter: IAsnConverter<AnyConverterType>; /** * ASN.1 INTEGER to Number/String converter */ declare const AsnIntegerConverter: IAsnConverter<IntegerConverterType>; /** * ASN.1 ENUMERATED converter */ declare const AsnEnumeratedConverter: IAsnConverter<number>; /** * ASN.1 INTEGER to ArrayBuffer converter */ declare const AsnIntegerArrayBufferConverter: IAsnConverter<ArrayBuffer>; /** * ASN.1 BIT STRING converter */ declare const AsnBitStringConverter: IAsnConverter<ArrayBuffer>; /** * ASN.1 OBJECT IDENTIFIER converter */ declare const AsnObjectIdentifierConverter: IAsnConverter<string>; /** * ASN.1 BOOLEAN converter */ declare const AsnBooleanConverter: IAsnConverter<boolean>; /** * ASN.1 OCTET_STRING converter */ declare const AsnOctetStringConverter: IAsnConverter<ArrayBuffer>; /** * ASN.1 UTF8_STRING converter */ declare const AsnUtf8StringConverter: IAsnConverter<string, any>; /** * ASN.1 BPM STRING converter */ declare const AsnBmpStringConverter: IAsnConverter<string, any>; /** * ASN.1 UNIVERSAL STRING converter */ declare const AsnUniversalStringConverter: IAsnConverter<string, any>; /** * ASN.1 NUMERIC STRING converter */ declare const AsnNumericStringConverter: IAsnConverter<string, any>; /** * ASN.1 PRINTABLE STRING converter */ declare const AsnPrintableStringConverter: IAsnConverter<string, any>; /** * ASN.1 TELETEX STRING converter */ declare const AsnTeletexStringConverter: IAsnConverter<string, any>; /** * ASN.1 VIDEOTEX STRING converter */ declare const AsnVideotexStringConverter: IAsnConverter<string, any>; /** * ASN.1 IA5 STRING converter */ declare const AsnIA5StringConverter: IAsnConverter<string, any>; /** * ASN.1 GRAPHIC STRING converter */ declare const AsnGraphicStringConverter: IAsnConverter<string, any>; /** * ASN.1 VISIBLE STRING converter */ declare const AsnVisibleStringConverter: IAsnConverter<string, any>; /** * ASN.1 GENERAL STRING converter */ declare const AsnGeneralStringConverter: IAsnConverter<string, any>; /** * ASN.1 CHARACTER STRING converter */ declare const AsnCharacterStringConverter: IAsnConverter<string, any>; /** * ASN.1 UTCTime converter */ declare const AsnUTCTimeConverter: IAsnConverter<Date>; /** * ASN.1 GeneralizedTime converter */ declare const AsnGeneralizedTimeConverter: IAsnConverter<Date>; /** * ASN.1 types for classes */ declare enum AsnTypeTypes { Sequence = 0, Set = 1, Choice = 2 } /** * ASN.1 types for properties */ declare enum AsnPropTypes { Any = 0, Boolean = 1, OctetString = 2, BitString = 3, Integer = 4, Enumerated = 5, ObjectIdentifier = 6, Utf8String = 7, BmpString = 8, UniversalString = 9, NumericString = 10, PrintableString = 11, TeletexString = 12, VideotexString = 13, IA5String = 14, GraphicString = 15, VisibleString = 16, GeneralString = 17, CharacterString = 18, UTCTime = 19, GeneralizedTime = 20, DATE = 21, TimeOfDay = 22, DateTime = 23, Duration = 24, TIME = 25, Null = 26 } interface IAsn1TypeOptions { type: AsnTypeTypes; } interface IAsn1PropOptions { type: AsnPropTypes | IEmptyConstructor<any>; optional?: boolean; defaultValue?: any; context?: number; implicit?: boolean; converter?: IAsnConverter; repeated?: boolean; } declare const AsnType: (options: IAsn1TypeOptions) => (target: object) => void; declare const AsnProp: (options: IAsn1PropOptions) => (target: object, propertyKey: string) => void; /** * Deserializes objects from ASN.1 encoded data */ declare class
{ /** * Deserializes an object from the ASN.1 encoded buffer * @param data ASN.1 encoded buffer * @param target Target schema for object deserialization */ static parse<T>(data: BufferSource, target: IEmptyConstructor<T>): T; /** * Deserializes an object from the asn1js object * @param asn1Schema asn1js object * @param target Target schema for object deserialization */ static fromASN<T>(asn1Schema: any, target: IEmptyConstructor<T>): any; } /** * Serializes objects into ASN.1 encoded data */ declare class AsnSerializer { /** * Serializes an object to the ASN.1 encoded buffer * @param obj The object to serialize */ static serialize(obj: any): ArrayBuffer; /** * Serialize an object to the asn1js object * @param obj The object to serialize */ static toASN(obj: any): any; } export { AsnAnyConverter, AsnBitStringConverter, AsnBmpStringConverter, AsnBooleanConverter, AsnCharacterStringConverter, AsnEnumeratedConverter, AsnGeneralStringConverter, AsnGeneralizedTimeConverter, AsnGraphicStringConverter, AsnIA5StringConverter, AsnIntegerArrayBufferConverter, AsnIntegerConverter, AsnNumericStringConverter, AsnObjectIdentifierConverter, AsnOctetStringConverter, AsnParser, AsnPrintableStringConverter, AsnProp, AsnPropTypes, AsnSerializer, AsnTeletexStringConverter, AsnType, AsnTypeTypes, AsnUTCTimeConverter, AsnUniversalStringConverter, AsnUtf8StringConverter, AsnVideotexStringConverter, AsnVisibleStringConverter, IAsnConverter, IAsnConvertible };
AsnParser
layout_cmdline_token.rs
/* automatically generated by rust-bindgen */ #![allow(non_snake_case)] /** * Stores a pointer to the ops struct, and the offset: the place to * write the parsed result in the destination structure. */ #[repr(C)] #[derive(Debug, Copy)] pub struct cmdline_token_hdr { pub ops: *mut cmdline_token_ops, pub offset: ::std::os::raw::c_uint, } #[test] fn bindgen_test_layout_cmdline_token_hdr() { assert_eq!(::std::mem::size_of::<cmdline_token_hdr>() , 16usize , concat ! ( "Size of: " , stringify ! ( cmdline_token_hdr ) )); assert_eq! (::std::mem::align_of::<cmdline_token_hdr>() , 8usize , concat ! ( "Alignment of " , stringify ! ( cmdline_token_hdr ) )); assert_eq! (unsafe { & ( * ( 0 as * const cmdline_token_hdr ) ) . ops as * const _ as usize } , 0usize , concat ! ( "Alignment of field: " , stringify ! ( cmdline_token_hdr ) , "::" , stringify ! ( ops ) )); assert_eq! (unsafe { & ( * ( 0 as * const cmdline_token_hdr ) ) . offset as * const _ as usize } , 8usize , concat ! ( "Alignment of field: " , stringify ! ( cmdline_token_hdr ) , "::" , stringify ! ( offset ) )); } impl Clone for cmdline_token_hdr { fn clone(&self) -> Self { *self } } impl Default for cmdline_token_hdr { fn default() -> Self { unsafe { ::std::mem::zeroed() } } } pub type cmdline_parse_token_hdr_t = cmdline_token_hdr; /** * A token is defined by this structure. * * parse() takes the token as first argument, then the source buffer * starting at the token we want to parse. The 3rd arg is a pointer * where we store the parsed data (as binary). It returns the number of * parsed chars on success and a negative value on error. * * complete_get_nb() returns the number of possible values for this * token if completion is possible. If it is NULL or if it returns 0, * no completion is possible. * * complete_get_elt() copy in dstbuf (the size is specified in the * parameter) the i-th possible completion for this token. returns 0 * on success or and a negative value on error. * * get_help() fills the dstbuf with the help for the token. It returns * -1 on error and 0 on success. */ #[repr(C)] #[derive(Debug, Copy)] pub struct cmdline_token_ops { /** parse(token ptr, buf, res pts, buf len) */ pub parse: ::std::option::Option<unsafe extern "C" fn(arg1: *mut cmdline_parse_token_hdr_t, arg2: *const ::std::os::raw::c_char, arg3: *mut ::std::os::raw::c_void, arg4: ::std::os::raw::c_uint) -> ::std::os::raw::c_int>, /** return the num of possible choices for this token */ pub complete_get_nb: ::std::option::Option<unsafe extern "C" fn(arg1: *mut cmdline_parse_token_hdr_t) -> ::std::os::raw::c_int>, /** return the elt x for this token (token, idx, dstbuf, size) */ pub complete_get_elt: ::std::option::Option<unsafe extern "C" fn(arg1: *mut cmdline_parse_token_hdr_t, arg2: ::std::os::raw::c_int, arg3: *mut ::std::os::raw::c_char, arg4: ::std::os::raw::c_uint) -> ::std::os::raw::c_int>, /** get help for this token (token, dstbuf, size) */ pub get_help: ::std::option::Option<unsafe extern "C" fn(arg1: *mut cmdline_parse_token_hdr_t, arg2: *mut ::std::os::raw::c_char, arg3: ::std::os::raw::c_uint) -> ::std::os::raw::c_int>, } #[test] fn bindgen_test_layout_cmdline_token_ops() { assert_eq!(::std::mem::size_of::<cmdline_token_ops>() , 32usize , concat ! ( "Size of: " , stringify ! ( cmdline_token_ops ) )); assert_eq! (::std::mem::align_of::<cmdline_token_ops>() , 8usize , concat ! ( "Alignment of " , stringify ! ( cmdline_token_ops ) )); assert_eq! (unsafe { & ( * ( 0 as * const cmdline_token_ops ) ) . parse as * const _ as usize } , 0usize , concat ! ( "Alignment of field: " , stringify ! ( cmdline_token_ops ) , "::" , stringify ! ( parse ) )); assert_eq! (unsafe { & ( * ( 0 as * const cmdline_token_ops ) ) . complete_get_nb as * const _ as usize } , 8usize , concat ! ( "Alignment of field: " , stringify ! ( cmdline_token_ops ) , "::" , stringify ! ( complete_get_nb ) )); assert_eq! (unsafe { & ( * ( 0 as * const cmdline_token_ops ) ) . complete_get_elt as * const _ as usize } , 16usize , concat ! ( "Alignment of field: " , stringify ! ( cmdline_token_ops ) , "::" , stringify ! ( complete_get_elt ) )); assert_eq! (unsafe { & ( * ( 0 as * const cmdline_token_ops ) ) . get_help as * const _ as usize } , 24usize , concat ! ( "Alignment of field: " , stringify ! ( cmdline_token_ops ) , "::" , stringify ! ( get_help ) )); } impl Clone for cmdline_token_ops { fn clone(&self) -> Self { *self } } impl Default for cmdline_token_ops { fn
() -> Self { unsafe { ::std::mem::zeroed() } } } #[repr(u32)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum cmdline_numtype { UINT8 = 0, UINT16 = 1, UINT32 = 2, UINT64 = 3, INT8 = 4, INT16 = 5, INT32 = 6, INT64 = 7, } #[repr(C)] #[derive(Debug, Copy)] pub struct cmdline_token_num_data { pub type_: cmdline_numtype, } #[test] fn bindgen_test_layout_cmdline_token_num_data() { assert_eq!(::std::mem::size_of::<cmdline_token_num_data>() , 4usize , concat ! ( "Size of: " , stringify ! ( cmdline_token_num_data ) )); assert_eq! (::std::mem::align_of::<cmdline_token_num_data>() , 4usize , concat ! ( "Alignment of " , stringify ! ( cmdline_token_num_data ) )); assert_eq! (unsafe { & ( * ( 0 as * const cmdline_token_num_data ) ) . type_ as * const _ as usize } , 0usize , concat ! ( "Alignment of field: " , stringify ! ( cmdline_token_num_data ) , "::" , stringify ! ( type_ ) )); } impl Clone for cmdline_token_num_data { fn clone(&self) -> Self { *self } } impl Default for cmdline_token_num_data { fn default() -> Self { unsafe { ::std::mem::zeroed() } } } #[repr(C)] #[derive(Debug, Copy)] pub struct cmdline_token_num { pub hdr: cmdline_token_hdr, pub num_data: cmdline_token_num_data, } #[test] fn bindgen_test_layout_cmdline_token_num() { assert_eq!(::std::mem::size_of::<cmdline_token_num>() , 24usize , concat ! ( "Size of: " , stringify ! ( cmdline_token_num ) )); assert_eq! (::std::mem::align_of::<cmdline_token_num>() , 8usize , concat ! ( "Alignment of " , stringify ! ( cmdline_token_num ) )); assert_eq! (unsafe { & ( * ( 0 as * const cmdline_token_num ) ) . hdr as * const _ as usize } , 0usize , concat ! ( "Alignment of field: " , stringify ! ( cmdline_token_num ) , "::" , stringify ! ( hdr ) )); assert_eq! (unsafe { & ( * ( 0 as * const cmdline_token_num ) ) . num_data as * const _ as usize } , 16usize , concat ! ( "Alignment of field: " , stringify ! ( cmdline_token_num ) , "::" , stringify ! ( num_data ) )); } impl Clone for cmdline_token_num { fn clone(&self) -> Self { *self } } impl Default for cmdline_token_num { fn default() -> Self { unsafe { ::std::mem::zeroed() } } } pub type cmdline_parse_token_num_t = cmdline_token_num;
default
autocomplete.component.ts
import { Component, Input, Output, EventEmitter, TemplateRef, ViewChild, HostListener, ElementRef } from '@angular/core'; import { ControlValueAccessor, NG_VALUE_ACCESSOR } from '@angular/forms'; import { Observable, Subject, noop, from } from 'rxjs'; import { IonSearchbar } from '@ionic/angular'; // searchbar default options const defaultOpts = { cancelButtonText: 'Cancel', showCancelButton: false, debounce: 250, placeholder: 'Search', autocomplete: 'off', autocorrect: 'off', spellcheck: 'off', type: 'search', value: '', noItems: '', clearOnEdit: false, clearInput: false }; @Component({ selector: 'ion-auto-complete', template: ` <ion-input #inputElem (keyup)="getItems($event)" (click)="handleTap($event)" [(ngModel)]="keyword" (ngModelChange)="updateModel()" [placeholder]="options.placeholder == null ? defaultOpts.placeholder : options.placeholder" [type]="options.type == null ? defaultOpts.type : options.type" [clearOnEdit]="options.clearOnEdit == null ? defaultOpts.clearOnEdit : options.clearOnEdit" [clearInput]="options.clearInput == null ? defaultOpts.clearInput : options.clearInput" [disabled]="disabled" [ngClass]="{ hidden: !useIonInput }" (ionFocus)="onFocus()" (ionBlur)="onBlur()" > </ion-input> <ion-searchbar #searchbarElem (ionInput)="getItems($event)" (click)="handleTap($event)" [(ngModel)]="keyword" (ngModelChange)="updateModel()" [cancelButtonText]="options.cancelButtonText == null ? defaultOpts.cancelButtonText : options.cancelButtonText" [showCancelButton]="options.showCancelButton == null ? defaultOpts.showCancelButton : options.showCancelButton" [debounce]="options.debounce == null ? defaultOpts.debounce : options.debounce" [placeholder]="options.placeholder == null ? defaultOpts.placeholder : options.placeholder" [autocomplete]="options.autocomplete == null ? defaultOpts.autocomplete : options.autocomplete" [autocorrect]="options.autocorrect == null ? defaultOpts.autocorrect : options.autocorrect" [spellcheck]="options.spellcheck == null ? defaultOpts.spellcheck : options.spellcheck" [type]="options.type == null ? defaultOpts.type : options.type" [disabled]="disabled" [ngClass]="{ hidden: useIonInput }" (ionClear)="clearValue(true)" (ionFocus)="onFocus()" (ionBlur)="onBlur()" > </ion-searchbar> <ng-template #defaultTemplate let-attrs="attrs"> <span [innerHTML]="attrs.label | boldprefix: attrs.keyword"></span> </ng-template> <ul *ngIf="!disabled && suggestions.length > 0 && showList"> <li *ngFor="let suggestion of suggestions" (click)="handleSelectTap($event, suggestion)"> <ng-template [ngTemplateOutlet]="template || defaultTemplate" [ngTemplateOutletContext]="{ attrs: { data: suggestion, label: getLabel(suggestion), keyword: keyword, formValue: getFormValue(suggestion), labelAttribute: dataProvider.labelAttribute, formValueAttribute: dataProvider.formValueAttribute } }" ></ng-template> </li> </ul> <p *ngIf="suggestions.length == 0 && showList && options.noItems"> {{ options.noItems }} </p> `, providers: [ { provide: NG_VALUE_ACCESSOR, useExisting: AutoCompleteComponent, multi: true } ] }) export class AutoCompleteComponent implements ControlValueAccessor { @Input() public dataProvider: any; @Input() public options: any; @Input() public disabled: any; @Input() public keyword: string; @Input() public showResultsFirst: boolean; @Input() public alwaysShowList: boolean; @Input() public hideListOnSelection: boolean = true; @Input() public template: TemplateRef<any>; @Input() public useIonInput: boolean; @Output() public autoFocus: EventEmitter<any>; @Output() public autoBlur: EventEmitter<any>; @Output() public itemSelected: EventEmitter<any>; @Output() public itemsShown: EventEmitter<any>; @Output() public itemsHidden: EventEmitter<any>; @Output() public ionAutoInput: EventEmitter<string>; @ViewChild('searchbarElem', { read: ElementRef }) searchbarElem: ElementRef<IonSearchbar>; @ViewChild('inputElem') inputElem; private onTouchedCallback: () => void = noop; private onChangeCallback: (_: any) => void = noop; public defaultOpts: any; public suggestions: any[]; public formValue: any; private wasClickedInside = false; public get showList(): boolean { return this._showList; } public set showList(value: boolean) { if (this._showList === value) { return; } this._showList = value; this.showListChanged = true; } private _showList: boolean; private selection: any; private showListChanged: boolean = false; /** * create a new instace */ public constructor() { this.keyword = '';
this.itemsHidden = new EventEmitter<any>(); this.ionAutoInput = new EventEmitter<string>(); this.autoFocus = new EventEmitter<any>(); this.autoBlur = new EventEmitter<any>(); this.options = {}; // set default options this.defaultOpts = defaultOpts; } /** * handle tap * @param event */ public handleTap(event) { if (this.showResultsFirst || this.keyword.length > 0) { this.getItems(); } } public handleSelectTap($event, suggestion): boolean { this.select(suggestion); $event.stopPropagation(); $event.preventDefault(); return false; } public writeValue(value: any) { if (value !== this.selection) { this.selection = value || null; this.formValue = this.getFormValue(this.selection); this.keyword = this.getLabel(this.selection); } } public registerOnChange(fn: any) { this.onChangeCallback = fn; } public registerOnTouched(fn: any) { this.onTouchedCallback = fn; } public updateModel() { this.onChangeCallback(this.formValue); } ngAfterViewChecked() { if (this.showListChanged) { this.showListChanged = false; this.showList ? this.itemsShown.emit() : this.itemsHidden.emit(); } } /** * get items for auto-complete */ public getItems(e?: Event) { let result; if (this.showResultsFirst && this.keyword.trim() === '') { this.keyword = ''; } else if (this.keyword.trim() === '') { this.suggestions = []; return; } if (typeof this.dataProvider === 'function') { result = this.dataProvider(this.keyword); } else { result = this.dataProvider.getResults(this.keyword); } // if result is instanceof Subject, use it asObservable if (result instanceof Subject) { result = result.asObservable(); } if (result instanceof Promise) { result = from(result); } // if query is async if (result instanceof Observable) { result.subscribe( (results: any[]) => { this.suggestions = results; this.showItemList(); }, (error: any) => console.error(error) ); } else { this.suggestions = result; this.showItemList(); } // emit event this.ionAutoInput.emit(this.keyword); } /** * show item list */ public showItemList(): void { this.showList = true; } /** * hide item list */ public hideItemList(): void { this.showList = this.alwaysShowList; } /** * select item from list * * @param event * @param selection **/ public select(selection: any): void { this.keyword = this.getLabel(selection); this.formValue = this.getFormValue(selection); this.hideItemList(); // emit selection event this.updateModel(); if (this.hideListOnSelection) { this.hideItemList(); } // emit selection event this.itemSelected.emit(selection); this.selection = selection; } /** * get current selection * @returns */ public getSelection(): any { return this.selection; } /** * get current input value * @returns */ public getValue() { return this.formValue; } /** * set current input value */ public setValue(selection: any) { this.formValue = this.getFormValue(selection); this.keyword = this.getLabel(selection); return; } /** /** * clear current input value */ public clearValue(hideItemList: boolean = false) { this.keyword = ''; this.selection = null; this.formValue = null; if (hideItemList) { this.hideItemList(); } return; } /** * set focus of searchbar */ public setFocus() { if (this.searchbarElem) { this.searchbarElem.nativeElement.setFocus(); } } /** * fired when the input focused */ onFocus() { this.autoFocus.emit(); } /** * fired when the input focused */ onBlur() { this.autoBlur.emit(); } /** * handle document click * @param event */ @HostListener('click') clickInside() { this.wasClickedInside = true; } @HostListener('document:click') clickout() { if (!this.wasClickedInside) { this.hideItemList(); } this.wasClickedInside = false; } private getFormValue(selection: any): any { if (selection == null) { return null; } let attr = this.dataProvider.formValueAttribute == null ? this.dataProvider.labelAttribute : this.dataProvider.formValueAttribute; if (typeof selection === 'object' && attr) { return selection[attr]; } return selection; } private getLabel(selection: any): string { if (selection == null) { return ''; } let attr = this.dataProvider.labelAttribute; let value = selection; if (this.dataProvider.getItemLabel) { value = this.dataProvider.getItemLabel(value); } if (typeof value === 'object' && attr) { return value[attr] || ''; } return value || ''; } }
this.suggestions = []; this._showList = false; this.itemSelected = new EventEmitter<any>(); this.itemsShown = new EventEmitter<any>();
chart_downloader_test.go
/* Copyright The Helm Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package downloader import ( "os" "path/filepath" "testing" "helm.sh/helm/v3/internal/test/ensure" "helm.sh/helm/v3/pkg/cli" "helm.sh/helm/v3/pkg/getter" "helm.sh/helm/v3/pkg/repo" "helm.sh/helm/v3/pkg/repo/repotest" ) const ( repoConfig = "testdata/repositories.yaml" repoCache = "testdata/repository" ) func TestResolveChartRef(t *testing.T) { tests := []struct { name, ref, expect, version string fail bool }{ {name: "full URL", ref: "http://example.com/foo-1.2.3.tgz", expect: "http://example.com/foo-1.2.3.tgz"}, {name: "full URL, HTTPS", ref: "https://example.com/foo-1.2.3.tgz", expect: "https://example.com/foo-1.2.3.tgz"}, {name: "full URL, with authentication", ref: "http://username:[email protected]/foo-1.2.3.tgz", expect: "http://username:[email protected]/foo-1.2.3.tgz"}, {name: "reference, testing repo", ref: "testing/alpine", expect: "http://example.com/alpine-1.2.3.tgz"}, {name: "reference, version, testing repo", ref: "testing/alpine", version: "0.2.0", expect: "http://example.com/alpine-0.2.0.tgz"}, {name: "reference, version, malformed repo", ref: "malformed/alpine", version: "1.2.3", expect: "http://dl.example.com/alpine-1.2.3.tgz"}, {name: "reference, querystring repo", ref: "testing-querystring/alpine", expect: "http://example.com/alpine-1.2.3.tgz?key=value"}, {name: "reference, testing-relative repo", ref: "testing-relative/foo", expect: "http://example.com/helm/charts/foo-1.2.3.tgz"}, {name: "reference, testing-relative repo", ref: "testing-relative/bar", expect: "http://example.com/helm/bar-1.2.3.tgz"}, {name: "reference, testing-relative-trailing-slash repo", ref: "testing-relative-trailing-slash/foo", expect: "http://example.com/helm/charts/foo-1.2.3.tgz"}, {name: "reference, testing-relative-trailing-slash repo", ref: "testing-relative-trailing-slash/bar", expect: "http://example.com/helm/bar-1.2.3.tgz"}, {name: "full URL, HTTPS, irrelevant version", ref: "https://example.com/foo-1.2.3.tgz", version: "0.1.0", expect: "https://example.com/foo-1.2.3.tgz", fail: true}, {name: "full URL, file", ref: "file:///foo-1.2.3.tgz", fail: true}, {name: "invalid", ref: "invalid-1.2.3", fail: true}, {name: "not found", ref: "nosuchthing/invalid-1.2.3", fail: true}, } c := ChartDownloader{ Out: os.Stderr, RepositoryConfig: repoConfig, RepositoryCache: repoCache, Getters: getter.All(&cli.EnvSettings{ RepositoryConfig: repoConfig, RepositoryCache: repoCache, }), } for _, tt := range tests { u, err := c.ResolveChartVersion(tt.ref, tt.version) if err != nil { if tt.fail { continue } t.Errorf("%s: failed with error %q", tt.name, err) continue } if got := u.String(); got != tt.expect { t.Errorf("%s: expected %s, got %s", tt.name, tt.expect, got) } } } func
(t *testing.T) { tests := []struct { name, ref, version string expect []getter.Option }{ { name: "repo with CA-file", ref: "testing-ca-file/foo", expect: []getter.Option{ getter.WithURL("https://example.com/foo-1.2.3.tgz"), getter.WithTLSClientConfig("cert", "key", "ca"), }, }, } c := ChartDownloader{ Out: os.Stderr, RepositoryConfig: repoConfig, RepositoryCache: repoCache, Getters: getter.All(&cli.EnvSettings{ RepositoryConfig: repoConfig, RepositoryCache: repoCache, }), } // snapshot options snapshotOpts := c.Options for _, tt := range tests { // reset chart downloader options for each test case c.Options = snapshotOpts expect, err := getter.NewHTTPGetter(tt.expect...) if err != nil { t.Errorf("%s: failed to setup http client: %s", tt.name, err) continue } u, err := c.ResolveChartVersion(tt.ref, tt.version) if err != nil { t.Errorf("%s: failed with error %s", tt.name, err) continue } got, err := getter.NewHTTPGetter( append( c.Options, getter.WithURL(u.String()), )..., ) if err != nil { t.Errorf("%s: failed to create http client: %s", tt.name, err) continue } if *(got.(*getter.HTTPGetter)) != *(expect.(*getter.HTTPGetter)) { t.Errorf("%s: expected %s, got %s", tt.name, expect, got) } } } func TestVerifyChart(t *testing.T) { v, err := VerifyChart("testdata/signtest-0.1.0.tgz", "testdata/helm-test-key.pub") if err != nil { t.Fatal(err) } // The verification is tested at length in the provenance package. Here, // we just want a quick sanity check that the v is not empty. if len(v.FileHash) == 0 { t.Error("Digest missing") } } func TestIsTar(t *testing.T) { tests := map[string]bool{ "foo.tgz": true, "foo/bar/baz.tgz": true, "foo-1.2.3.4.5.tgz": true, "foo.tar.gz": false, // for our purposes "foo.tgz.1": false, "footgz": false, } for src, expect := range tests { if isTar(src) != expect { t.Errorf("%q should be %t", src, expect) } } } func TestDownloadTo(t *testing.T) { srv := repotest.NewTempServerWithCleanupAndBasicAuth(t, "testdata/*.tgz*") defer srv.Stop() if err := srv.CreateIndex(); err != nil { t.Fatal(err) } if err := srv.LinkIndices(); err != nil { t.Fatal(err) } c := ChartDownloader{ Out: os.Stderr, Verify: VerifyAlways, Keyring: "testdata/helm-test-key.pub", RepositoryConfig: repoConfig, RepositoryCache: repoCache, Getters: getter.All(&cli.EnvSettings{ RepositoryConfig: repoConfig, RepositoryCache: repoCache, }), Options: []getter.Option{ getter.WithBasicAuth("username", "password"), getter.WithPassCredentialsAll(false), }, } cname := "/signtest-0.1.0.tgz" dest := srv.Root() where, v, err := c.DownloadTo(srv.URL()+cname, "", dest) if err != nil { t.Fatal(err) } if expect := filepath.Join(dest, cname); where != expect { t.Errorf("Expected download to %s, got %s", expect, where) } if v.FileHash == "" { t.Error("File hash was empty, but verification is required.") } if _, err := os.Stat(filepath.Join(dest, cname)); err != nil { t.Error(err) } } func TestDownloadTo_TLS(t *testing.T) { // Set up mock server w/ tls enabled srv, err := repotest.NewTempServerWithCleanup(t, "testdata/*.tgz*") srv.Stop() if err != nil { t.Fatal(err) } srv.StartTLS() defer srv.Stop() if err := srv.CreateIndex(); err != nil { t.Fatal(err) } if err := srv.LinkIndices(); err != nil { t.Fatal(err) } repoConfig := filepath.Join(srv.Root(), "repositories.yaml") repoCache := srv.Root() c := ChartDownloader{ Out: os.Stderr, Verify: VerifyAlways, Keyring: "testdata/helm-test-key.pub", RepositoryConfig: repoConfig, RepositoryCache: repoCache, Getters: getter.All(&cli.EnvSettings{ RepositoryConfig: repoConfig, RepositoryCache: repoCache, }), Options: []getter.Option{}, } cname := "test/signtest" dest := srv.Root() where, v, err := c.DownloadTo(cname, "", dest) if err != nil { t.Fatal(err) } target := filepath.Join(dest, "signtest-0.1.0.tgz") if expect := target; where != expect { t.Errorf("Expected download to %s, got %s", expect, where) } if v.FileHash == "" { t.Error("File hash was empty, but verification is required.") } if _, err := os.Stat(target); err != nil { t.Error(err) } } func TestDownloadTo_VerifyLater(t *testing.T) { defer ensure.HelmHome(t)() dest := ensure.TempDir(t) defer os.RemoveAll(dest) // Set up a fake repo srv, err := repotest.NewTempServerWithCleanup(t, "testdata/*.tgz*") if err != nil { t.Fatal(err) } defer srv.Stop() if err := srv.LinkIndices(); err != nil { t.Fatal(err) } c := ChartDownloader{ Out: os.Stderr, Verify: VerifyLater, RepositoryConfig: repoConfig, RepositoryCache: repoCache, Getters: getter.All(&cli.EnvSettings{ RepositoryConfig: repoConfig, RepositoryCache: repoCache, }), } cname := "/signtest-0.1.0.tgz" where, _, err := c.DownloadTo(srv.URL()+cname, "", dest) if err != nil { t.Fatal(err) } if expect := filepath.Join(dest, cname); where != expect { t.Errorf("Expected download to %s, got %s", expect, where) } if _, err := os.Stat(filepath.Join(dest, cname)); err != nil { t.Fatal(err) } if _, err := os.Stat(filepath.Join(dest, cname+".prov")); err != nil { t.Fatal(err) } } func TestScanReposForURL(t *testing.T) { c := ChartDownloader{ Out: os.Stderr, Verify: VerifyLater, RepositoryConfig: repoConfig, RepositoryCache: repoCache, Getters: getter.All(&cli.EnvSettings{ RepositoryConfig: repoConfig, RepositoryCache: repoCache, }), } u := "http://example.com/alpine-0.2.0.tgz" rf, err := repo.LoadFile(repoConfig) if err != nil { t.Fatal(err) } entry, err := c.scanReposForURL(u, rf) if err != nil { t.Fatal(err) } if entry.Name != "testing" { t.Errorf("Unexpected repo %q for URL %q", entry.Name, u) } // A lookup failure should produce an ErrNoOwnerRepo u = "https://no.such.repo/foo/bar-1.23.4.tgz" if _, err = c.scanReposForURL(u, rf); err != ErrNoOwnerRepo { t.Fatalf("expected ErrNoOwnerRepo, got %v", err) } }
TestResolveChartOpts
consoleTools.js
consoleToolsRequire=(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({"D:\\Catalin\\Munca\\privatesky\\builds\\tmp\\consoleTools_intermediar.js":[function(require,module,exports){ (function (global){ global.consoleToolsLoadModules = function(){ if(typeof $$.__runtimeModules["source-map-support"] === "undefined"){ $$.__runtimeModules["source-map-support"] = require("source-map-support"); } if(typeof $$.__runtimeModules["source-map"] === "undefined"){ $$.__runtimeModules["source-map"] = require("source-map"); } if(typeof $$.__runtimeModules["buffer-from"] === "undefined"){ $$.__runtimeModules["buffer-from"] = require("buffer-from"); } if(typeof $$.__runtimeModules["pskwallet"] === "undefined"){ $$.__runtimeModules["pskwallet"] = require("pskwallet"); } if(typeof $$.__runtimeModules["buffer-crc32"] === "undefined"){ $$.__runtimeModules["buffer-crc32"] = require("buffer-crc32"); } if(typeof $$.__runtimeModules["node-fd-slicer"] === "undefined"){ $$.__runtimeModules["node-fd-slicer"] = require("node-fd-slicer"); } if(typeof $$.__runtimeModules["csb-wizard"] === "undefined"){ $$.__runtimeModules["csb-wizard"] = require("csb-wizard"); } }; if (false) { consoleToolsLoadModules(); } global.consoleToolsRequire = require; if (typeof $$ !== "undefined") { $$.requireBundle("consoleTools"); } require('source-map-support').install({}); }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) },{"buffer-crc32":"buffer-crc32","buffer-from":"buffer-from","csb-wizard":"csb-wizard","node-fd-slicer":"node-fd-slicer","pskwallet":"pskwallet","source-map":"source-map","source-map-support":"source-map-support"}],"D:\\Catalin\\Munca\\privatesky\\modules\\csb-wizard\\CSBWizard.js":[function(require,module,exports){ (function (__dirname){ const path = require('path'); const fs = require('fs'); const VirtualMQ = require('virtualmq'); const httpWrapper = VirtualMQ.getHttpWrapper(); const httpUtils = httpWrapper.httpUtils; const Server = httpWrapper.Server; const crypto = require('pskcrypto'); const interact = require('interact'); const serverCommands = require('./utils/serverCommands'); const executioner = require('./utils/executioner'); const url = require('url'); function CSBWizard({listeningPort, rootFolder, sslConfig}, callback) { const port = listeningPort || 8081; const server = new Server(sslConfig).listen(port); const randSize = 32; rootFolder = path.join(rootFolder, 'CSB_TMP'); console.log("Listening on port:", port); fs.mkdir(rootFolder, {recursive: true}, (err) => { if(err) { throw err; } console.log("Local folder:", rootFolder); registerEndpoints(); if(typeof callback === 'function') { return callback(); } }); function registerEndpoints() { server.use((req, res, next) => { res.setHeader('Access-Control-Allow-Origin', '*'); res.setHeader('Access-Control-Allow-Methods', 'GET, POST, PUT, DELETE'); res.setHeader('Access-Control-Allow-Headers', 'Content-Type, Access-Control-Allow-Origin'); res.setHeader('Access-Control-Allow-Credentials', true); next(); }); server.post('/beginCSB', (req, res) => { const transactionId = crypto.randomBytes(randSize).toString('hex'); fs.mkdir(path.join(rootFolder, transactionId), {recursive: true}, (err) => { if (err) { res.statusCode = 500; res.end(); return; } res.end(transactionId); }); }); server.post('/attachFile', (req, res) => { res.statusCode = 400; res.end('Illegal url, missing transaction id'); }); server.post('/attachFile/:transactionId/:fileAlias', (req, res) => { const transactionId = req.params.transactionId; const fileObj = { fileName: req.params.fileAlias, stream: req }; serverCommands.attachFile(path.join(rootFolder, transactionId), fileObj, (err) => { if(err) { if(err.code === 'EEXIST') { res.statusCode = 409; } else { res.statusCode = 500; } } res.end(); }); }); server.post('/addBackup', (req, res) => { res.statusCode = 400; res.end('Illegal url, missing transaction id'); }); server.post('/addBackup/:transactionId', httpUtils.bodyParser); server.post('/addBackup/:transactionId', (req, res) => { const transactionId = req.params.transactionId; const backupObj = { endpoint: req.body }; serverCommands.addBackup(path.join(rootFolder, transactionId), backupObj, (err) => { if(err) { res.statusCode = 500; } res.end(); }); }); server.post('/buildCSB', (req, res) => { res.statusCode = 400; res.end('Illegal url, missing transaction id'); }); server.post('/buildCSB/:transactionId', httpUtils.bodyParser); server.post('/buildCSB/:transactionId', (req, res) => { const transactionId = req.params.transactionId; executioner.executioner(path.join(rootFolder, transactionId), (err, seed) => { if(err) { res.statusCode = 500; console.log("Error", err); res.end(); return; } const body = JSON.parse(req.body); if(body.url !== '' && body.channel !== '') { const endpoint = new url.URL(body.url).origin; const channel = body.channel; const ris = interact.createRemoteInteractionSpace('remote', endpoint, channel); ris.startSwarm('notifier', 'init', seed.toString()); } res.end(seed.toString()); }); }); server.use('/web', (req, res) => { res.statusCode = 303; let redirectLocation = 'index.html'; if(!req.url.endsWith('/')) { redirectLocation = '/web/' + redirectLocation; } res.setHeader("Location", redirectLocation); res.end(); }); server.use('/web/*', httpUtils.serveStaticFile(path.join(__dirname, 'web'), '/web')); server.use((req, res) => { res.statusCode = 404; res.end(); }); } } module.exports = CSBWizard; }).call(this,"/modules/csb-wizard") },{"./utils/executioner":"D:\\Catalin\\Munca\\privatesky\\modules\\csb-wizard\\utils\\executioner.js","./utils/serverCommands":"D:\\Catalin\\Munca\\privatesky\\modules\\csb-wizard\\utils\\serverCommands.js","fs":false,"interact":false,"path":false,"pskcrypto":false,"url":false,"virtualmq":false}],"D:\\Catalin\\Munca\\privatesky\\modules\\csb-wizard\\utils\\CommandsAssistant.js":[function(require,module,exports){ const fs = require('fs'); const path = require('path'); function CommandsAssistant(localFolder) { const filePath = path.join(localFolder, 'commands.json'); function loadCommands(callback) { fs.mkdir(localFolder, {recursive: true}, (err) => { if (err) { return callback(err); } fs.readFile(filePath, (err, commands) => { if (err) { return callback(undefined, []); } callback(undefined, JSON.parse(commands.toString())); }); }); } function saveCommands(commandsArr, callback) { fs.mkdir(localFolder, {recursive: true}, (err) => { if (err) { return callback(err); } fs.writeFile(filePath, JSON.stringify(commandsArr), callback); }); } function addCommand(command, callback) { loadCommands((err, commandsArr) => { if (err) { return callback(err); } commandsArr.push(command); saveCommands(commandsArr, callback); }); } return { addCommand, loadCommands }; } module.exports = CommandsAssistant; },{"fs":false,"path":false}],"D:\\Catalin\\Munca\\privatesky\\modules\\csb-wizard\\utils\\csbInteractions.js":[function(require,module,exports){ const path = require('path'); const is = require("interact").createInteractionSpace(); function createCSB(workingDir, backups, callback) { let savedSeed; is.startSwarm("createCsb", "withoutPin", "", backups, workingDir, undefined, false).on({ printSensitiveInfo: function (seed, defaultPin) { savedSeed = seed; }, handleError: function (err) { callback(err); }, __return__: function () { callback(undefined, savedSeed); } }); } function attachFile(workingDir, fileName, seed, callback) { is.startSwarm("attachFile", "withCSBIdentifier", seed, fileName, path.join(workingDir, fileName), workingDir).on({ handleError: function (err) { callback(err); }, __return__: function () { callback(); } }); } function saveBackup(workingDir, seed, callback) { is.startSwarm("saveBackup", "withCSBIdentifier", seed, workingDir).on({ handleError: function (err) { callback(err); }, csbBackupReport: function (result) { callback(result.errors, result.successes); } }); } module.exports = { attachFile, createCSB, saveBackup }; },{"interact":false,"path":false}],"D:\\Catalin\\Munca\\privatesky\\modules\\csb-wizard\\utils\\executioner.js":[function(require,module,exports){ const csbInteraction = require('./csbInteractions'); const CommandsAssistant = require('./CommandsAssistant'); function executioner(workingDir, callback) { const filteredCommands = []; const backups = []; const commandsAssistant = new CommandsAssistant(workingDir); commandsAssistant.loadCommands((err, commands) => { if (err) { console.log(); } for (let i = 0; i < commands.length; ++i) { if (commands[i].name === 'addBackup') { backups.push(commands[i].params.endpoint); continue; } filteredCommands.push(commands[i]); } csbInteraction.createCSB(workingDir, backups, (err, seed) => { if (err) { return callback(err); } executeCommand(filteredCommands, seed, workingDir, 0, (err) => { if (err) { return callback(err); } csbInteraction.saveBackup(workingDir, seed, (errors, successes) => { if (errors) { return callback(errors); } callback(undefined, seed); }); }); }); }); } function executeCommand(commands, seed, workingDir, index = 0, callback) { if (index === commands.length) { return callback(); } const match = judge(commands[index], seed, workingDir, (err) => { if (err) { return callback(err); } executeCommand(commands, seed, workingDir, ++index, callback); }); if (!match) { return callback(new Error('No match for command found' + commands[index].name)); } } function judge(command, seed, workingDir, callback) { switch (command.name) { case 'attachFile': csbInteraction.attachFile(workingDir, command.params.fileName, seed, callback); break; default: return false; } return true; } module.exports = { executioner }; },{"./CommandsAssistant":"D:\\Catalin\\Munca\\privatesky\\modules\\csb-wizard\\utils\\CommandsAssistant.js","./csbInteractions":"D:\\Catalin\\Munca\\privatesky\\modules\\csb-wizard\\utils\\csbInteractions.js"}],"D:\\Catalin\\Munca\\privatesky\\modules\\csb-wizard\\utils\\serverCommands.js":[function(require,module,exports){ const fs = require("fs"); const path = require("path"); const url = require('url'); const CommandsAssistant = require("./CommandsAssistant"); function attachFile(workingDir, FileObj, callback) { const cmd = { name: 'attachFile', params: { fileName: FileObj.fileName } }; const commandsAssistant = new CommandsAssistant(workingDir); const filePath = path.join(workingDir, FileObj.fileName); fs.access(filePath, (err) => { if (!err) { const e = new Error('File already exists'); e.code = 'EEXIST'; return callback(e); } const file = fs.createWriteStream(filePath); file.on('close', () => { commandsAssistant.addCommand(cmd, callback); }); FileObj.stream.pipe(file); }); } function addBackup(workingDir, backupObj, callback) { try { const endpoint = new url.URL(backupObj.endpoint).origin; const cmd = { name: 'addBackup', params: { endpoint: endpoint } }; const commandAssistant = new CommandsAssistant(workingDir); commandAssistant.addCommand(cmd, callback); } catch (e) { return callback(e); } } module.exports = { attachFile, addBackup }; },{"./CommandsAssistant":"D:\\Catalin\\Munca\\privatesky\\modules\\csb-wizard\\utils\\CommandsAssistant.js","fs":false,"path":false,"url":false}],"D:\\Catalin\\Munca\\privatesky\\modules\\node-fd-slicer\\modules\\node-pend\\index.js":[function(require,module,exports){ module.exports = Pend; function Pend() { this.pending = 0; this.max = Infinity; this.listeners = []; this.waiting = []; this.error = null; } Pend.prototype.go = function(fn) { if (this.pending < this.max) { pendGo(this, fn); } else { this.waiting.push(fn); } }; Pend.prototype.wait = function(cb) { if (this.pending === 0) { cb(this.error); } else { this.listeners.push(cb); } }; Pend.prototype.hold = function() { return pendHold(this); }; function pendHold(self) { self.pending += 1; var called = false; return onCb; function onCb(err) { if (called) throw new Error("callback called twice"); called = true; self.error = self.error || err; self.pending -= 1; if (self.waiting.length > 0 && self.pending < self.max) { pendGo(self, self.waiting.shift()); } else if (self.pending === 0) { var listeners = self.listeners; self.listeners = []; listeners.forEach(cbListener); } } function cbListener(listener) { listener(self.error); } } function pendGo(self, fn) { fn(pendHold(self)); } },{}],"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\cmds\\bar.js":[function(require,module,exports){ const utils = require("../utils/utils"); function listFiles(alseed, folderPath) { if (arguments.length === 1) { folderPath = alseed; utils.loadWallet(undefined, (err, wallet) => { if (err) { throw err; } wallet.listFiles(folderPath, (err, files) => { if (err) { throw err; } console.log("Files:", files); }); }); } else { if (utils.isAlias(alseed)) { utils.loadArchiveWithAlias(alseed, (err, bar) => { if (err) { throw err; } bar.listFiles(folderPath, (err, fileList) => { if (err) { throw err; } console.log("Files:", fileList); process.exit(0); }); }); } else { utils.getEDFS(alseed, (err, edfs) => { if (err) { throw err; } const bar = edfs.loadBar(alseed); bar.listFiles(folderPath, (err, fileList) => { if (err) { throw err; } console.log("Files:", fileList); }); }); } } } function extractFolder(alseed, barPath, fsFolderPath) { if (utils.isAlias(alseed)) { utils.loadArchiveWithAlias(alseed, (err, bar) => { if (err) { throw err; } bar.extractFolder(fsFolderPath, barPath, (err) => { if (err) { throw err; } console.log("Extracted folder."); process.exit(0); }); }); } else { utils.getEDFS(alseed, (err, edfs) => { if (err) { throw err; } const bar = edfs.loadBar(alseed); bar.extractFolder(fsFolderPath, barPath, (err) => { if (err) { throw err; } console.log("Extracted folder."); }); }); } } function extractFile(alseed, barPath, fsFilePath) { if (utils.isAlias(alseed)) { utils.loadArchiveWithAlias(alseed, (err, bar) => { if (err) { throw err; } bar.extractFile(fsFilePath, barPath, (err) => { if (err) { throw err; } console.log("Extracted file."); process.exit(0); }); }); } else { utils.getEDFS(alseed, (err, edfs) => { if (err) { throw err; } const bar = edfs.loadBar(alseed); bar.extractFile(fsFilePath, barPath, (err) => { if (err) { throw err; } console.log("Extracted file."); }); }); } } addCommand("list", "files", listFiles, " <archiveSeed>/<alias> <folderPath> \t\t\t\t |prints the list of all files stored at path <folderPath> inside the archive whose SEED is <archiveSeed>. If an alias is specified then the CSB's SEED is searched from the wallet."); addCommand("extract", "folder", extractFolder, " <archiveSeed> <archivePath> <fsFolderPath> \t\t |extracts the folder stored at <archivePath> inside the archive whose SEED is <archiveSeed> and writes all the extracted file on disk at path <fsFolderPath>"); addCommand("extract", "file", extractFile, " <archiveSeed> <archivePath> <fsFilePath> \t\t |extracts the folder stored at <archivePath> inside the archive whose SEED is <archiveSeed> and writes all the extracted file on disk at path <fsFilePath>"); },{"../utils/utils":"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\utils\\utils.js"}],"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\cmds\\csb.js":[function(require,module,exports){ const utils = require("../utils/utils"); const AGENT_IDENTITY = require("../utils/utils").getOwnIdentity(); function createCSB(domainName, constitutionPath, noSave) { const pth = "path"; const path = require(pth); const EDFS = require("edfs"); if (noSave === "nosave") { const edfs = utils.getInitializedEDFS(); edfs.createBarWithConstitution(path.resolve(constitutionPath), (err, archive) => { if (err) { throw err; } archive.writeFile(EDFS.constants.CSB.DOMAIN_IDENTITY_FILE, domainName, () => { if (err) { throw err; } console.log("The CSB was created. Its SEED is the following."); console.log("SEED", archive.getSeed()); }); }); } else { getPin((err, pin) => { if (err) { throw err; } EDFS.attachWithPin(pin, (err, edfs) => { if (err) { throw err; } edfs.loadWallet(undefined, pin, true, (err, wallet) => { if (err) { throw err; } const dossier = require("dossier"); dossier.load(wallet.getSeed(), AGENT_IDENTITY, (err, csb) => { if (err) { console.error(err); process.exit(1); } csb.startTransaction("StandardCSBTransactions", "domainLookup", domainName).onReturn((err, domain) => { if (err) { console.log(err); process.exit(1); } if (domain) { console.log(`Domain ${domainName} already exists!`); process.exit(1); } edfs.createBarWithConstitution(path.resolve(constitutionPath), (err, archive) => { if (err) { throw err; } csb.startTransaction("StandardCSBTransactions", "addFileAnchor", domainName, "csb", archive.getSeed(), wallet.getMapDigest()).onReturn((err, res) => { if (err) { console.error(err); process.exit(1); } console.log("The CSB was created and a reference to it has been added to the wallet."); console.log("Its SEED is:", archive.getSeed()); process.exit(0); }); }); }); }); }); }); }); } } function setApp(alseed, appPath) { if (!alseed) { throw new Error('Missing first argument, the archive seed or alais'); } if (!appPath) { throw new Error('Missing the second argument, the app path'); } const EDFS = require("edfs"); if (utils.isAlias(alseed)) { utils.loadArchiveWithAlias(alseed, (err, bar) => { if (err) { throw err; } bar.addFolder(appPath, EDFS.constants.CSB.APP_FOLDER, (err) => { if (err) { throw err; } console.log('All done'); }) }); } else { utils.getEDFS(alseed, (err, edfs) => { if (err) { throw err; } const bar = edfs.loadBar(alseed); bar.addFolder(appPath, EDFS.constants.CSB.APP_FOLDER, (err) => { if (err) { throw err; } console.log('All done'); }) }); } } addCommand("create", "csb", createCSB, "<domainName> <constitutionPath> <nosave>\t\t\t\t |creates an archive containing constitutions folder <constitutionPath> for Domain <domainName>"); addCommand("set", "app", setApp, " <seed>/<alias> <folderPath> \t\t\t\t\t |add an app to an existing archive"); },{"../utils/utils":"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\utils\\utils.js","dossier":false,"edfs":false}],"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\cmds\\index.js":[function(require,module,exports){ require("./wallet"); require("./bar"); require("./csb"); },{"./bar":"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\cmds\\bar.js","./csb":"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\cmds\\csb.js","./wallet":"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\cmds\\wallet.js"}],"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\cmds\\wallet.js":[function(require,module,exports){ const consoleUtils = require("../utils/consoleUtils"); const utils = require("../utils/utils"); function createWallet(templateSeed) { const Seed = require("bar").Seed; try { new Seed(templateSeed); } catch (e) { throw Error("Invalid template seed"); } const EDFS = require("edfs"); EDFS.checkForSeedCage(err => { const edfs = utils.getInitializedEDFS(); if (!err) { consoleUtils.getFeedback("A wallet already exists. Do you want to create a new one?(y/n)", (err, ans) => { if (err) { throw err; } if (ans[0] === "y") { __createWallet(edfs, true); } }); } else { __createWallet(edfs, false); } }); function __createWallet(edfs, overwrite) { consoleUtils.insertPassword({validationFunction: utils.validatePin}, (err, pin) => { if (err) { console.log(`Caught error: ${err.message}`); process.exit(1); } consoleUtils.insertPassword({ prompt: "Confirm pin:", validationFunction: utils.validatePin }, (err, newPin) => { if (err) { console.log(`Caught error: ${err.message}`); process.exit(1); } if (pin !== newPin) { console.log("The PINs do not coincide. Try again."); __createWallet(edfs, overwrite); } else { edfs.createWallet(templateSeed, pin, overwrite, (err, seed) => { if (err) { throw err; } console.log("Wallet with SEED was created. Please save the SEED:", seed); }); } }); }); } } function restore(seed) { const EDFS = require("edfs"); let edfs; try { edfs = EDFS.attachWithSeed(seed); } catch (e) { throw Error("The provided seed is invalid."); } __saveSeed(); function __saveSeed() { consoleUtils.insertPassword({validationFunction: utils.validatePin}, (err, pin) => { if (err) { console.log(`Caught error: ${err.message}`); process.exit(1); } consoleUtils.insertPassword({ prompt: "Confirm pin:", validationFunction: utils.validatePin }, (err, newPin) => { if (err) { console.log(`Caught error: ${err.message}`); process.exit(1); } if (pin !== newPin) { console.log("The PINs do not coincide. Try again."); __saveSeed(); } else { edfs.loadWallet(seed, pin, true, (err, wallet) => { if (err) { throw err; } console.log("Wallet was restored"); }); } }); }); } } function changePin() { utils.loadWallet(undefined, (err, wallet) => { if (err) { throw err; } consoleUtils.insertPassword({prompt: "Insert a new PIN:", validationFunction: utils.validatePin}, (err, pin) => { if (err) { throw err; } utils.getEDFS(wallet.getSeed(), (err, edfs) => { if (err) { throw err; } edfs.loadWallet(wallet.getSeed(), pin, true, (err) => { if (err) { throw err; } console.log("The PIN has been changed."); }); }); }); }); } addCommand("create", "wallet", createWallet, "<templateSeed> \t\t\t\t\t\t |creates a clone of the CSB whose SEED is <templateSeed>"); addCommand("restore", null, restore, "<seed> \t\t\t\t |Checks the seed is valid and allows the selection of a PIN"); addCommand("change", "pin", changePin, "\t\t\t\t |Asks for the PIN and then allows for the selection of a new PIN"); },{"../utils/consoleUtils":"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\utils\\consoleUtils.js","../utils/utils":"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\utils\\utils.js","bar":false,"edfs":false}],"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\utils\\consoleUtils.js":[function(require,module,exports){ const rl = "readline"; const readline = require(rl); const getPassword = require("./getPassword").readPassword; const NO_TRIES = 3; const DEFAULT_PROMPT = "Insert pin:"; function insertPassword(options, callback) { if (typeof options === "function") { callback = options; options = {}; } if (!callback) { throw new Error("Misuse of function, reason: No callback given."); } options.prompt = options.prompt || DEFAULT_PROMPT; if (typeof options.noTries === "undefined") { options.noTries = NO_TRIES; } if (options.noTries === 0) { return callback(new Error(`You have inserted an invalid pin ${NO_TRIES} times`)); } else { getPassword(options.prompt, (err, pin)=> { if (options.validationFunction && !options.validationFunction(pin)) { if (options.noTries !== 1) { console.log("Validation failed. Maybe you have inserted an invalid character."); console.log("Try again"); } options.noTries--; insertPassword(options, callback); } else { return callback(null, pin); } }); } } function getFeedback(question, callback) { const rl = readline.createInterface({ input: process.stdin, output: process.stdout }); rl.question(question, (answer) => { rl.close(); callback(null, answer); }); } module.exports = { insertPassword, getFeedback, }; },{"./getPassword":"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\utils\\getPassword.js"}],"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\utils\\getPassword.js":[function(require,module,exports){ exports.readPassword = function (prompt, callback) { const stdin = process.stdin; const stdout = process.stdout; if (prompt) { stdout.write(prompt); } stdin.resume(); stdin.setRawMode(true); stdin.resume(); stdin.setEncoding('utf8'); let password = ""; function escaping(...args) { stdin.removeListener("data", readingInput); stdin.pause(); callback(...args); } function readingInput(data) { switch (data) { case "\x03": stdin.removeListener("data", readingInput); stdin.setRawMode(false); stdin.pause(); break; case "\x0A": case "\x0D": case "\x04": stdout.write('\n'); stdin.setRawMode(false); stdin.pause(); escaping(false, password); break; case "\x08": case "\x7f": password = password.slice(0, password.length - 1); stdout.clearLine(); stdout.cursorTo(0); stdout.write(prompt); for (let i = 0; i < password.length; i++) { stdout.write("*"); } break; default: let str = ""; for (let i = 0; i < data.length; i++) { str += "*"; } stdout.write(str); password += data; } } stdin.on('data', readingInput); }; },{}],"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\utils\\utils.js":[function(require,module,exports){ (function (global){ const consoleUtils = require("./consoleUtils"); function getEndpoint() { let endpoint = process.env.EDFS_ENDPOINT; if (typeof endpoint === "undefined") { console.log("Using default endpoint. To configure set ENV['EDFS_ENDPOINT']"); endpoint = "http://localhost:8080"; } return endpoint; } function getInitializedEDFS() { const EDFS = require("edfs"); const endpoint = getEndpoint(); return EDFS.attachToEndpoint(endpoint); } function validatePin(pin) { if (typeof pin === "undefined" || pin.length < 4) { return false; } //The regex below checks that the pin only contains utf-8 characters return !/[\x00-\x03]|[\x05-\x07]|[\x09]|[\x0B-\x0C]|[\x0E-\x1F]/.test(pin); } function getEDFS(seed, callback) { const EDFS = require("edfs"); if (!seed) { getPin((err, pin) => { if (err) { return callback(err); } EDFS.attachWithPin(pin, callback); }); } else { callback(undefined, EDFS.attachWithSeed(seed)); } } function loadWallet(walletSeed, callback) { getEDFS(walletSeed, (err, edfs) => { if (err) { return callback(err); } getPin((err, pin) => { if (err) { return callback(err); } edfs.loadWallet(walletSeed, pin, true, callback); }); }); } function loadArchiveWithAlias(alias, callback) { loadWallet(undefined, (err, wallet) => { if (err) { return callback(err); } const dossier = require("dossier"); dossier.load(wallet.getSeed(), getOwnIdentity(), (err, csb) => { if (err) { return callback(err); } csb.startTransaction("StandardCSBTransactions", "getSeed", alias).onReturn((err, seed) => { if (err) { return callback(err); } getEDFS(seed, (err, edfs) => { if (err) { return callback(err); } callback(undefined, edfs.loadBar(seed)); }); }); }); }); } function isAlias(str) { const Seed = require("bar").Seed; try { new Seed(str) } catch (e) { return true; } return false; } function getOwnIdentity() { return "pskwallet-identity"; } let lastPin; let timeStamp; const PIN_LIFETIME = 5000; global.getPin = function (callback) { const currentTimestamp = new Date().getTime(); if (!lastPin || (currentTimestamp - timeStamp) > PIN_LIFETIME) { consoleUtils.insertPassword({validationFunction: validatePin}, (err, pin) => { if (err) { return callback(err); } lastPin = pin; timeStamp = new Date().getTime(); callback(undefined, pin); }); } else { callback(undefined, lastPin); } }; module.exports = { getInitializedEDFS, validatePin, isAlias, loadWallet, getEDFS, getOwnIdentity, loadArchiveWithAlias }; }).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) },{"./consoleUtils":"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\utils\\consoleUtils.js","bar":false,"dossier":false,"edfs":false}],"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\array-set.js":[function(require,module,exports){ /* -*- Mode: js; js-indent-level: 2; -*- */ /* * Copyright 2011 Mozilla Foundation and contributors * Licensed under the New BSD license. See LICENSE or: * http://opensource.org/licenses/BSD-3-Clause */ var util = require('./util'); var has = Object.prototype.hasOwnProperty; var hasNativeMap = typeof Map !== "undefined"; /** * A data structure which is a combination of an array and a set. Adding a new * member is O(1), testing for membership is O(1), and finding the index of an * element is O(1). Removing elements from the set is not supported. Only * strings are supported for membership. */ function ArraySet() { this._array = []; this._set = hasNativeMap ? new Map() : Object.create(null); } /** * Static method for creating ArraySet instances from an existing array. */ ArraySet.fromArray = function ArraySet_fromArray(aArray, aAllowDuplicates) { var set = new ArraySet(); for (var i = 0, len = aArray.length; i < len; i++) { set.add(aArray[i], aAllowDuplicates); } return set; }; /** * Return how many unique items are in this ArraySet. If duplicates have been * added, than those do not count towards the size. * * @returns Number */ ArraySet.prototype.size = function ArraySet_size() { return hasNativeMap ? this._set.size : Object.getOwnPropertyNames(this._set).length; }; /** * Add the given string to this set. * * @param String aStr */ ArraySet.prototype.add = function ArraySet_add(aStr, aAllowDuplicates) { var sStr = hasNativeMap ? aStr : util.toSetString(aStr); var isDuplicate = hasNativeMap ? this.has(aStr) : has.call(this._set, sStr); var idx = this._array.length; if (!isDuplicate || aAllowDuplicates) { this._array.push(aStr); } if (!isDuplicate) { if (hasNativeMap) { this._set.set(aStr, idx); } else { this._set[sStr] = idx; } } }; /** * Is the given string a member of this set? * * @param String aStr */ ArraySet.prototype.has = function ArraySet_has(aStr) { if (hasNativeMap) { return this._set.has(aStr); } else { var sStr = util.toSetString(aStr); return has.call(this._set, sStr); } }; /** * What is the index of the given string in the array? * * @param String aStr */ ArraySet.prototype.indexOf = function ArraySet_indexOf(aStr) { if (hasNativeMap) { var idx = this._set.get(aStr); if (idx >= 0) { return idx; } } else { var sStr = util.toSetString(aStr); if (has.call(this._set, sStr)) { return this._set[sStr]; } } throw new Error('"' + aStr + '" is not in the set.'); }; /** * What is the element at the given index? * * @param Number aIdx */ ArraySet.prototype.at = function ArraySet_at(aIdx) { if (aIdx >= 0 && aIdx < this._array.length) { return this._array[aIdx]; } throw new Error('No element indexed by ' + aIdx); }; /** * Returns the array representation of this set (which has the proper indices * indicated by indexOf). Note that this is a copy of the internal array used * for storing the members so that no one can mess with internal state. */ ArraySet.prototype.toArray = function ArraySet_toArray() { return this._array.slice(); }; exports.ArraySet = ArraySet; },{"./util":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\util.js"}],"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\base64-vlq.js":[function(require,module,exports){ /* -*- Mode: js; js-indent-level: 2; -*- */ /* * Copyright 2011 Mozilla Foundation and contributors * Licensed under the New BSD license. See LICENSE or: * http://opensource.org/licenses/BSD-3-Clause * * Based on the Base 64 VLQ implementation in Closure Compiler: * https://code.google.com/p/closure-compiler/source/browse/trunk/src/com/google/debugging/sourcemap/Base64VLQ.java * * Copyright 2011 The Closure Compiler Authors. All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ var base64 = require('./base64'); // A single base 64 digit can contain 6 bits of data. For the base 64 variable // length quantities we use in the source map spec, the first bit is the sign, // the next four bits are the actual value, and the 6th bit is the // continuation bit. The continuation bit tells us whether there are more // digits in this value following this digit. // // Continuation // | Sign // | | // V V // 101011 var VLQ_BASE_SHIFT = 5; // binary: 100000 var VLQ_BASE = 1 << VLQ_BASE_SHIFT; // binary: 011111 var VLQ_BASE_MASK = VLQ_BASE - 1; // binary: 100000 var VLQ_CONTINUATION_BIT = VLQ_BASE; /** * Converts from a two-complement value to a value where the sign bit is * placed in the least significant bit. For example, as decimals: * 1 becomes 2 (10 binary), -1 becomes 3 (11 binary) * 2 becomes 4 (100 binary), -2 becomes 5 (101 binary) */ function toVLQSigned(aValue) { return aValue < 0 ? ((-aValue) << 1) + 1 : (aValue << 1) + 0; } /** * Converts to a two-complement value from a value where the sign bit is * placed in the least significant bit. For example, as decimals: * 2 (10 binary) becomes 1, 3 (11 binary) becomes -1 * 4 (100 binary) becomes 2, 5 (101 binary) becomes -2 */ function fromVLQSigned(aValue) { var isNegative = (aValue & 1) === 1; var shifted = aValue >> 1; return isNegative ? -shifted : shifted; } /** * Returns the base 64 VLQ encoded value. */ exports.encode = function base64VLQ_encode(aValue) { var encoded = ""; var digit; var vlq = toVLQSigned(aValue); do { digit = vlq & VLQ_BASE_MASK; vlq >>>= VLQ_BASE_SHIFT; if (vlq > 0) { // There are still more digits in this value, so we must make sure the // continuation bit is marked. digit |= VLQ_CONTINUATION_BIT; } encoded += base64.encode(digit); } while (vlq > 0); return encoded; }; /** * Decodes the next base 64 VLQ value from the given string and returns the * value and the rest of the string via the out parameter. */ exports.decode = function base64VLQ_decode(aStr, aIndex, aOutParam) { var strLen = aStr.length; var result = 0; var shift = 0; var continuation, digit; do { if (aIndex >= strLen) { throw new Error("Expected more digits in base 64 VLQ value."); } digit = base64.decode(aStr.charCodeAt(aIndex++)); if (digit === -1) { throw new Error("Invalid base64 digit: " + aStr.charAt(aIndex - 1)); } continuation = !!(digit & VLQ_CONTINUATION_BIT); digit &= VLQ_BASE_MASK; result = result + (digit << shift); shift += VLQ_BASE_SHIFT; } while (continuation); aOutParam.value = fromVLQSigned(result); aOutParam.rest = aIndex; }; },{"./base64":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\base64.js"}],"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\base64.js":[function(require,module,exports){ /* -*- Mode: js; js-indent-level: 2; -*- */ /* * Copyright 2011 Mozilla Foundation and contributors * Licensed under the New BSD license. See LICENSE or: * http://opensource.org/licenses/BSD-3-Clause */ var intToCharMap = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); /** * Encode an integer in the range of 0 to 63 to a single base 64 digit. */ exports.encode = function (number) { if (0 <= number && number < intToCharMap.length) { return intToCharMap[number]; } throw new TypeError("Must be between 0 and 63: " + number); }; /** * Decode a single base 64 character code digit to an integer. Returns -1 on * failure. */ exports.decode = function (charCode) { var bigA = 65; // 'A' var bigZ = 90; // 'Z' var littleA = 97; // 'a' var littleZ = 122; // 'z' var zero = 48; // '0' var nine = 57; // '9' var plus = 43; // '+' var slash = 47; // '/' var littleOffset = 26; var numberOffset = 52; // 0 - 25: ABCDEFGHIJKLMNOPQRSTUVWXYZ if (bigA <= charCode && charCode <= bigZ) { return (charCode - bigA); } // 26 - 51: abcdefghijklmnopqrstuvwxyz if (littleA <= charCode && charCode <= littleZ) { return (charCode - littleA + littleOffset); } // 52 - 61: 0123456789 if (zero <= charCode && charCode <= nine) { return (charCode - zero + numberOffset); } // 62: + if (charCode == plus) { return 62; } // 63: / if (charCode == slash) { return 63; } // Invalid base64 digit. return -1; }; },{}],"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\binary-search.js":[function(require,module,exports){ /* -*- Mode: js; js-indent-level: 2; -*- */ /* * Copyright 2011 Mozilla Foundation and contributors * Licensed under the New BSD license. See LICENSE or: * http://opensource.org/licenses/BSD-3-Clause */ exports.GREATEST_LOWER_BOUND = 1; exports.LEAST_UPPER_BOUND = 2; /** * Recursive implementation of binary search. * * @param aLow Indices here and lower do not contain the needle. * @param aHigh Indices here and higher do not contain the needle. * @param aNeedle The element being searched for. * @param aHaystack The non-empty array being searched. * @param aCompare Function which takes two elements and returns -1, 0, or 1. * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the * closest element that is smaller than or greater than the one we are * searching for, respectively, if the exact element cannot be found. */ function recursiveSearch(aLow, aHigh, aNeedle, aHaystack, aCompare, aBias) { // This function terminates when one of the following is true: // // 1. We find the exact element we are looking for. // // 2. We did not find the exact element, but we can return the index of // the next-closest element. // // 3. We did not find the exact element, and there is no next-closest // element than the one we are searching for, so we return -1. var mid = Math.floor((aHigh - aLow) / 2) + aLow; var cmp = aCompare(aNeedle, aHaystack[mid], true); if (cmp === 0) { // Found the element we are looking for. return mid; } else if (cmp > 0) { // Our needle is greater than aHaystack[mid]. if (aHigh - mid > 1) { // The element is in the upper half. return recursiveSearch(mid, aHigh, aNeedle, aHaystack, aCompare, aBias); } // The exact needle element was not found in this haystack. Determine if // we are in termination case (3) or (2) and return the appropriate thing. if (aBias == exports.LEAST_UPPER_BOUND) { return aHigh < aHaystack.length ? aHigh : -1; } else { return mid; } } else { // Our needle is less than aHaystack[mid]. if (mid - aLow > 1) { // The element is in the lower half. return recursiveSearch(aLow, mid, aNeedle, aHaystack, aCompare, aBias); } // we are in termination case (3) or (2) and return the appropriate thing. if (aBias == exports.LEAST_UPPER_BOUND) { return mid; } else { return aLow < 0 ? -1 : aLow; } } } /** * This is an implementation of binary search which will always try and return * the index of the closest element if there is no exact hit. This is because * mappings between original and generated line/col pairs are single points, * and there is an implicit region between each of them, so a miss just means * that you aren't on the very start of a region. * * @param aNeedle The element you are looking for. * @param aHaystack The array that is being searched. * @param aCompare A function which takes the needle and an element in the * array and returns -1, 0, or 1 depending on whether the needle is less * than, equal to, or greater than the element, respectively. * @param aBias Either 'binarySearch.GREATEST_LOWER_BOUND' or * 'binarySearch.LEAST_UPPER_BOUND'. Specifies whether to return the * closest element that is smaller than or greater than the one we are * searching for, respectively, if the exact element cannot be found. * Defaults to 'binarySearch.GREATEST_LOWER_BOUND'. */ exports.search = function search(aNeedle, aHaystack, aCompare, aBias) { if (aHaystack.length === 0) { return -1; } var index = recursiveSearch(-1, aHaystack.length, aNeedle, aHaystack, aCompare, aBias || exports.GREATEST_LOWER_BOUND); if (index < 0) { return -1; } // We have found either the exact element, or the next-closest element than // the one we are searching for. However, there may be more than one such // element. Make sure we always return the smallest of these. while (index - 1 >= 0) { if (aCompare(aHaystack[index], aHaystack[index - 1], true) !== 0) { break; } --index; } return index; }; },{}],"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\mapping-list.js":[function(require,module,exports){ /* -*- Mode: js; js-indent-level: 2; -*- */ /* * Copyright 2014 Mozilla Foundation and contributors * Licensed under the New BSD license. See LICENSE or: * http://opensource.org/licenses/BSD-3-Clause */ var util = require('./util'); /** * Determine whether mappingB is after mappingA with respect to generated * position. */ function generatedPositionAfter(mappingA, mappingB) { // Optimized for most common case var lineA = mappingA.generatedLine; var lineB = mappingB.generatedLine; var columnA = mappingA.generatedColumn; var columnB = mappingB.generatedColumn; return lineB > lineA || lineB == lineA && columnB >= columnA || util.compareByGeneratedPositionsInflated(mappingA, mappingB) <= 0; } /** * A data structure to provide a sorted view of accumulated mappings in a * performance conscious manner. It trades a neglibable overhead in general * case for a large speedup in case of mappings being added in order. */ function MappingList() { this._array = []; this._sorted = true; // Serves as infimum this._last = {generatedLine: -1, generatedColumn: 0}; } /** * Iterate through internal items. This method takes the same arguments that * `Array.prototype.forEach` takes. * * NOTE: The order of the mappings is NOT guaranteed. */ MappingList.prototype.unsortedForEach = function MappingList_forEach(aCallback, aThisArg) { this._array.forEach(aCallback, aThisArg); }; /** * Add the given source mapping. * * @param Object aMapping */ MappingList.prototype.add = function MappingList_add(aMapping) { if (generatedPositionAfter(this._last, aMapping)) { this._last = aMapping; this._array.push(aMapping); } else { this._sorted = false; this._array.push(aMapping); } }; /** * Returns the flat, sorted array of mappings. The mappings are sorted by * generated position. * * WARNING: This method returns internal data without copying, for * performance. The return value must NOT be mutated, and should be treated as * an immutable borrow. If you want to take ownership, you must make your own * copy. */ MappingList.prototype.toArray = function MappingList_toArray() { if (!this._sorted) { this._array.sort(util.compareByGeneratedPositionsInflated); this._sorted = true; } return this._array; }; exports.MappingList = MappingList; },{"./util":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\util.js"}],"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\quick-sort.js":[function(require,module,exports){ /* -*- Mode: js; js-indent-level: 2; -*- */ /* * Copyright 2011 Mozilla Foundation and contributors * Licensed under the New BSD license. See LICENSE or: * http://opensource.org/licenses/BSD-3-Clause */ // It turns out that some (most?) JavaScript engines don't self-host // `Array.prototype.sort`. This makes sense because C++ will likely remain // faster than JS when doing raw CPU-intensive sorting. However, when using a // custom comparator function, calling back and forth between the VM's C++ and // JIT'd JS is rather slow *and* loses JIT type information, resulting in // worse generated code for the comparator function than would be optimal. In // fact, when sorting with a comparator, these costs outweigh the benefits of // sorting in C++. By using our own JS-implemented Quick Sort (below), we get // a ~3500ms mean speed-up in `bench/bench.html`. /** * Swap the elements indexed by `x` and `y` in the array `ary`. * * @param {Array} ary * The array. * @param {Number} x * The index of the first item. * @param {Number} y * The index of the second item. */ function swap(ary, x, y) { var temp = ary[x]; ary[x] = ary[y]; ary[y] = temp; } /** * Returns a random integer within the range `low .. high` inclusive. * * @param {Number} low * The lower bound on the range. * @param {Number} high * The upper bound on the range. */ function randomIntInRange(low, high) { return Math.round(low + (Math.random() * (high - low))); } /** * The Quick Sort algorithm. * * @param {Array} ary * An array to sort. * @param {function} comparator * Function to use to compare two items. * @param {Number} p * Start index of the array * @param {Number} r * End index of the array */ function doQuickSort(ary, comparator, p, r) { // If our lower bound is less than our upper bound, we (1) partition the // array into two pieces and (2) recurse on each half. If it is not, this is // the empty array and our base case. if (p < r) { // (1) Partitioning. // // The partitioning chooses a pivot between `p` and `r` and moves all // elements that are less than or equal to the pivot to the before it, and // all the elements that are greater than it after it. The effect is that // once partition is done, the pivot is in the exact place it will be when // the array is put in sorted order, and it will not need to be moved // again. This runs in O(n) time. // Always choose a random pivot so that an input array which is reverse // sorted does not cause O(n^2) running time. var pivotIndex = randomIntInRange(p, r); var i = p - 1; swap(ary, pivotIndex, r); var pivot = ary[r]; // Immediately after `j` is incremented in this loop, the following hold // true: // // * Every element in `ary[p .. i]` is less than or equal to the pivot. // // * Every element in `ary[i+1 .. j-1]` is greater than the pivot. for (var j = p; j < r; j++) { if (comparator(ary[j], pivot) <= 0) { i += 1; swap(ary, i, j); } } swap(ary, i + 1, j); var q = i + 1; // (2) Recurse on each half. doQuickSort(ary, comparator, p, q - 1); doQuickSort(ary, comparator, q + 1, r); } } /** * Sort the given array in-place with the given comparator function. * * @param {Array} ary * An array to sort. * @param {function} comparator * Function to use to compare two items. */ exports.quickSort = function (ary, comparator) { doQuickSort(ary, comparator, 0, ary.length - 1); }; },{}],"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\source-map-consumer.js":[function(require,module,exports){ /* -*- Mode: js; js-indent-level: 2; -*- */ /* * Copyright 2011 Mozilla Foundation and contributors * Licensed under the New BSD license. See LICENSE or: * http://opensource.org/licenses/BSD-3-Clause */ var util = require('./util'); var binarySearch = require('./binary-search'); var ArraySet = require('./array-set').ArraySet; var base64VLQ = require('./base64-vlq'); var quickSort = require('./quick-sort').quickSort; function SourceMapConsumer(aSourceMap) { var sourceMap = aSourceMap; if (typeof aSourceMap === 'string') { sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); } return sourceMap.sections != null ? new IndexedSourceMapConsumer(sourceMap) : new BasicSourceMapConsumer(sourceMap); } SourceMapConsumer.fromSourceMap = function(aSourceMap) { return BasicSourceMapConsumer.fromSourceMap(aSourceMap); } /** * The version of the source mapping spec that we are consuming. */ SourceMapConsumer.prototype._version = 3; // `__generatedMappings` and `__originalMappings` are arrays that hold the // parsed mapping coordinates from the source map's "mappings" attribute. They // are lazily instantiated, accessed via the `_generatedMappings` and // `_originalMappings` getters respectively, and we only parse the mappings // and create these arrays once queried for a source location. We jump through // these hoops because there can be many thousands of mappings, and parsing // them is expensive, so we only want to do it if we must. // // Each object in the arrays is of the form: // // { // generatedLine: The line number in the generated code, // generatedColumn: The column number in the generated code, // source: The path to the original source file that generated this // chunk of code, // originalLine: The line number in the original source that // corresponds to this chunk of generated code, // originalColumn: The column number in the original source that // corresponds to this chunk of generated code, // name: The name of the original symbol which generated this chunk of // code. // } // // All properties except for `generatedLine` and `generatedColumn` can be // `null`. // // `_generatedMappings` is ordered by the generated positions. // // `_originalMappings` is ordered by the original positions. SourceMapConsumer.prototype.__generatedMappings = null; Object.defineProperty(SourceMapConsumer.prototype, '_generatedMappings', { get: function () { if (!this.__generatedMappings) { this._parseMappings(this._mappings, this.sourceRoot); } return this.__generatedMappings; } }); SourceMapConsumer.prototype.__originalMappings = null; Object.defineProperty(SourceMapConsumer.prototype, '_originalMappings', { get: function () { if (!this.__originalMappings) { this._parseMappings(this._mappings, this.sourceRoot); } return this.__originalMappings; } }); SourceMapConsumer.prototype._charIsMappingSeparator = function SourceMapConsumer_charIsMappingSeparator(aStr, index) { var c = aStr.charAt(index); return c === ";" || c === ","; }; /** * Parse the mappings in a string in to a data structure which we can easily * query (the ordered arrays in the `this.__generatedMappings` and * `this.__originalMappings` properties). */ SourceMapConsumer.prototype._parseMappings = function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { throw new Error("Subclasses must implement _parseMappings"); }; SourceMapConsumer.GENERATED_ORDER = 1; SourceMapConsumer.ORIGINAL_ORDER = 2; SourceMapConsumer.GREATEST_LOWER_BOUND = 1; SourceMapConsumer.LEAST_UPPER_BOUND = 2; /** * Iterate over each mapping between an original source/line/column and a * generated line/column in this source map. * * @param Function aCallback * The function that is called with each mapping. * @param Object aContext * Optional. If specified, this object will be the value of `this` every * time that `aCallback` is called. * @param aOrder * Either `SourceMapConsumer.GENERATED_ORDER` or * `SourceMapConsumer.ORIGINAL_ORDER`. Specifies whether you want to * iterate over the mappings sorted by the generated file's line/column * order or the original's source/line/column order, respectively. Defaults to * `SourceMapConsumer.GENERATED_ORDER`. */ SourceMapConsumer.prototype.eachMapping = function SourceMapConsumer_eachMapping(aCallback, aContext, aOrder) { var context = aContext || null; var order = aOrder || SourceMapConsumer.GENERATED_ORDER; var mappings; switch (order) { case SourceMapConsumer.GENERATED_ORDER: mappings = this._generatedMappings; break; case SourceMapConsumer.ORIGINAL_ORDER: mappings = this._originalMappings; break; default: throw new Error("Unknown order of iteration."); } var sourceRoot = this.sourceRoot; mappings.map(function (mapping) { var source = mapping.source === null ? null : this._sources.at(mapping.source); if (source != null && sourceRoot != null) { source = util.join(sourceRoot, source); } return { source: source, generatedLine: mapping.generatedLine, generatedColumn: mapping.generatedColumn, originalLine: mapping.originalLine, originalColumn: mapping.originalColumn, name: mapping.name === null ? null : this._names.at(mapping.name) }; }, this).forEach(aCallback, context); }; /** * Returns all generated line and column information for the original source, * line, and column provided. If no column is provided, returns all mappings * corresponding to a either the line we are searching for or the next * closest line that has any mappings. Otherwise, returns all mappings * corresponding to the given line and either the column we are searching for * or the next closest column that has any offsets. * * The only argument is an object with the following properties: * * - source: The filename of the original source. * - line: The line number in the original source. * - column: Optional. the column number in the original source. * * and an array of objects is returned, each with the following properties: * * - line: The line number in the generated source, or null. * - column: The column number in the generated source, or null. */ SourceMapConsumer.prototype.allGeneratedPositionsFor = function SourceMapConsumer_allGeneratedPositionsFor(aArgs) { var line = util.getArg(aArgs, 'line'); // When there is no exact match, BasicSourceMapConsumer.prototype._findMapping // returns the index of the closest mapping less than the needle. By // setting needle.originalColumn to 0, we thus find the last mapping for // the given line, provided such a mapping exists. var needle = { source: util.getArg(aArgs, 'source'), originalLine: line, originalColumn: util.getArg(aArgs, 'column', 0) }; if (this.sourceRoot != null) { needle.source = util.relative(this.sourceRoot, needle.source); } if (!this._sources.has(needle.source)) { return []; } needle.source = this._sources.indexOf(needle.source); var mappings = []; var index = this._findMapping(needle, this._originalMappings, "originalLine", "originalColumn", util.compareByOriginalPositions, binarySearch.LEAST_UPPER_BOUND); if (index >= 0) { var mapping = this._originalMappings[index]; if (aArgs.column === undefined) { var originalLine = mapping.originalLine; // Iterate until either we run out of mappings, or we run into // a mapping for a different line than the one we found. Since // mappings are sorted, this is guaranteed to find all mappings for // the line we found. while (mapping && mapping.originalLine === originalLine) { mappings.push({ line: util.getArg(mapping, 'generatedLine', null), column: util.getArg(mapping, 'generatedColumn', null), lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) }); mapping = this._originalMappings[++index]; } } else { var originalColumn = mapping.originalColumn; // Iterate until either we run out of mappings, or we run into // a mapping for a different line than the one we were searching for. // Since mappings are sorted, this is guaranteed to find all mappings for // the line we are searching for. while (mapping && mapping.originalLine === line && mapping.originalColumn == originalColumn) { mappings.push({ line: util.getArg(mapping, 'generatedLine', null), column: util.getArg(mapping, 'generatedColumn', null), lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) }); mapping = this._originalMappings[++index]; } } } return mappings; }; exports.SourceMapConsumer = SourceMapConsumer; /** * A BasicSourceMapConsumer instance represents a parsed source map which we can * query for information about the original file positions by giving it a file * position in the generated source. * * The only parameter is the raw source map (either as a JSON string, or * already parsed to an object). According to the spec, source maps have the * following attributes: * * - version: Which version of the source map spec this map is following. * - sources: An array of URLs to the original source files. * - names: An array of identifiers which can be referrenced by individual mappings. * - sourceRoot: Optional. The URL root from which all sources are relative. * - sourcesContent: Optional. An array of contents of the original source files. * - mappings: A string of base64 VLQs which contain the actual mappings. * - file: Optional. The generated file this source map is associated with. * * Here is an example source map, taken from the source map spec[0]: * * { * version : 3, * file: "out.js", * sourceRoot : "", * sources: ["foo.js", "bar.js"], * names: ["src", "maps", "are", "fun"], * mappings: "AA,AB;;ABCDE;" * } * * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit?pli=1# */ function BasicSourceMapConsumer(aSourceMap) { var sourceMap = aSourceMap; if (typeof aSourceMap === 'string') { sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); } var version = util.getArg(sourceMap, 'version'); var sources = util.getArg(sourceMap, 'sources'); // Sass 3.3 leaves out the 'names' array, so we deviate from the spec (which // requires the array) to play nice here. var names = util.getArg(sourceMap, 'names', []); var sourceRoot = util.getArg(sourceMap, 'sourceRoot', null); var sourcesContent = util.getArg(sourceMap, 'sourcesContent', null); var mappings = util.getArg(sourceMap, 'mappings'); var file = util.getArg(sourceMap, 'file', null); // Once again, Sass deviates from the spec and supplies the version as a // string rather than a number, so we use loose equality checking here. if (version != this._version) { throw new Error('Unsupported version: ' + version); } sources = sources .map(String) // Some source maps produce relative source paths like "./foo.js" instead of // "foo.js". Normalize these first so that future comparisons will succeed. // See bugzil.la/1090768. .map(util.normalize) // Always ensure that absolute sources are internally stored relative to // the source root, if the source root is absolute. Not doing this would // be particularly problematic when the source root is a prefix of the // source (valid, but why??). See github issue #199 and bugzil.la/1188982. .map(function (source) { return sourceRoot && util.isAbsolute(sourceRoot) && util.isAbsolute(source) ? util.relative(sourceRoot, source) : source; }); // Pass `true` below to allow duplicate names and sources. While source maps // are intended to be compressed and deduplicated, the TypeScript compiler // sometimes generates source maps with duplicates in them. See Github issue // #72 and bugzil.la/889492. this._names = ArraySet.fromArray(names.map(String), true); this._sources = ArraySet.fromArray(sources, true); this.sourceRoot = sourceRoot; this.sourcesContent = sourcesContent; this._mappings = mappings; this.file = file; } BasicSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); BasicSourceMapConsumer.prototype.consumer = SourceMapConsumer; /** * Create a BasicSourceMapConsumer from a SourceMapGenerator. * * @param SourceMapGenerator aSourceMap * The source map that will be consumed. * @returns BasicSourceMapConsumer */ BasicSourceMapConsumer.fromSourceMap = function SourceMapConsumer_fromSourceMap(aSourceMap) { var smc = Object.create(BasicSourceMapConsumer.prototype); var names = smc._names = ArraySet.fromArray(aSourceMap._names.toArray(), true); var sources = smc._sources = ArraySet.fromArray(aSourceMap._sources.toArray(), true); smc.sourceRoot = aSourceMap._sourceRoot; smc.sourcesContent = aSourceMap._generateSourcesContent(smc._sources.toArray(), smc.sourceRoot); smc.file = aSourceMap._file; // Because we are modifying the entries (by converting string sources and // names to indices into the sources and names ArraySets), we have to make // a copy of the entry or else bad things happen. Shared mutable state // strikes again! See github issue #191. var generatedMappings = aSourceMap._mappings.toArray().slice(); var destGeneratedMappings = smc.__generatedMappings = []; var destOriginalMappings = smc.__originalMappings = []; for (var i = 0, length = generatedMappings.length; i < length; i++) { var srcMapping = generatedMappings[i]; var destMapping = new Mapping; destMapping.generatedLine = srcMapping.generatedLine; destMapping.generatedColumn = srcMapping.generatedColumn; if (srcMapping.source) { destMapping.source = sources.indexOf(srcMapping.source); destMapping.originalLine = srcMapping.originalLine; destMapping.originalColumn = srcMapping.originalColumn; if (srcMapping.name) { destMapping.name = names.indexOf(srcMapping.name); } destOriginalMappings.push(destMapping); } destGeneratedMappings.push(destMapping); } quickSort(smc.__originalMappings, util.compareByOriginalPositions); return smc; }; /** * The version of the source mapping spec that we are consuming. */ BasicSourceMapConsumer.prototype._version = 3; /** * The list of original sources. */ Object.defineProperty(BasicSourceMapConsumer.prototype, 'sources', { get: function () { return this._sources.toArray().map(function (s) { return this.sourceRoot != null ? util.join(this.sourceRoot, s) : s; }, this); } }); /** * Provide the JIT with a nice shape / hidden class. */ function Mapping() { this.generatedLine = 0; this.generatedColumn = 0; this.source = null; this.originalLine = null; this.originalColumn = null; this.name = null; } /** * Parse the mappings in a string in to a data structure which we can easily * query (the ordered arrays in the `this.__generatedMappings` and * `this.__originalMappings` properties). */ BasicSourceMapConsumer.prototype._parseMappings = function SourceMapConsumer_parseMappings(aStr, aSourceRoot) { var generatedLine = 1; var previousGeneratedColumn = 0; var previousOriginalLine = 0; var previousOriginalColumn = 0; var previousSource = 0; var previousName = 0; var length = aStr.length; var index = 0; var cachedSegments = {}; var temp = {}; var originalMappings = []; var generatedMappings = []; var mapping, str, segment, end, value; while (index < length) { if (aStr.charAt(index) === ';') { generatedLine++; index++; previousGeneratedColumn = 0; } else if (aStr.charAt(index) === ',') { index++; } else { mapping = new Mapping(); mapping.generatedLine = generatedLine; // Because each offset is encoded relative to the previous one, // many segments often have the same encoding. We can exploit this // fact by caching the parsed variable length fields of each segment, // allowing us to avoid a second parse if we encounter the same // segment again. for (end = index; end < length; end++) { if (this._charIsMappingSeparator(aStr, end)) { break; } } str = aStr.slice(index, end); segment = cachedSegments[str]; if (segment) { index += str.length; } else { segment = []; while (index < end) { base64VLQ.decode(aStr, index, temp); value = temp.value; index = temp.rest; segment.push(value); } if (segment.length === 2) { throw new Error('Found a source, but no line and column'); } if (segment.length === 3) { throw new Error('Found a source and line, but no column'); } cachedSegments[str] = segment; } // Generated column. mapping.generatedColumn = previousGeneratedColumn + segment[0]; previousGeneratedColumn = mapping.generatedColumn; if (segment.length > 1) { // Original source. mapping.source = previousSource + segment[1]; previousSource += segment[1]; // Original line. mapping.originalLine = previousOriginalLine + segment[2]; previousOriginalLine = mapping.originalLine; // Lines are stored 0-based mapping.originalLine += 1; // Original column. mapping.originalColumn = previousOriginalColumn + segment[3]; previousOriginalColumn = mapping.originalColumn; if (segment.length > 4) { // Original name. mapping.name = previousName + segment[4]; previousName += segment[4]; } } generatedMappings.push(mapping); if (typeof mapping.originalLine === 'number') { originalMappings.push(mapping); } } } quickSort(generatedMappings, util.compareByGeneratedPositionsDeflated); this.__generatedMappings = generatedMappings; quickSort(originalMappings, util.compareByOriginalPositions); this.__originalMappings = originalMappings; }; /** * Find the mapping that best matches the hypothetical "needle" mapping that * we are searching for in the given "haystack" of mappings. */ BasicSourceMapConsumer.prototype._findMapping = function SourceMapConsumer_findMapping(aNeedle, aMappings, aLineName, aColumnName, aComparator, aBias) { // To return the position we are searching for, we must first find the // mapping for the given position and then return the opposite position it // points to. Because the mappings are sorted, we can use binary search to // find the best mapping. if (aNeedle[aLineName] <= 0) { throw new TypeError('Line must be greater than or equal to 1, got ' + aNeedle[aLineName]); } if (aNeedle[aColumnName] < 0) { throw new TypeError('Column must be greater than or equal to 0, got ' + aNeedle[aColumnName]); } return binarySearch.search(aNeedle, aMappings, aComparator, aBias); }; /** * Compute the last column for each generated mapping. The last column is * inclusive. */ BasicSourceMapConsumer.prototype.computeColumnSpans = function SourceMapConsumer_computeColumnSpans() { for (var index = 0; index < this._generatedMappings.length; ++index) { var mapping = this._generatedMappings[index]; // Mappings do not contain a field for the last generated columnt. We // can come up with an optimistic estimate, however, by assuming that // mappings are contiguous (i.e. given two consecutive mappings, the // first mapping ends where the second one starts). if (index + 1 < this._generatedMappings.length) { var nextMapping = this._generatedMappings[index + 1]; if (mapping.generatedLine === nextMapping.generatedLine) { mapping.lastGeneratedColumn = nextMapping.generatedColumn - 1; continue; } } // The last mapping for each line spans the entire line. mapping.lastGeneratedColumn = Infinity; } }; /** * Returns the original source, line, and column information for the generated * source's line and column positions provided. The only argument is an object * with the following properties: * * - line: The line number in the generated source. * - column: The column number in the generated source. * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the * closest element that is smaller than or greater than the one we are * searching for, respectively, if the exact element cannot be found. * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. * * and an object is returned with the following properties: * * - source: The original source file, or null. * - line: The line number in the original source, or null. * - column: The column number in the original source, or null. * - name: The original identifier, or null. */ BasicSourceMapConsumer.prototype.originalPositionFor = function SourceMapConsumer_originalPositionFor(aArgs) { var needle = { generatedLine: util.getArg(aArgs, 'line'), generatedColumn: util.getArg(aArgs, 'column') }; var index = this._findMapping( needle, this._generatedMappings, "generatedLine", "generatedColumn", util.compareByGeneratedPositionsDeflated, util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) ); if (index >= 0) { var mapping = this._generatedMappings[index]; if (mapping.generatedLine === needle.generatedLine) { var source = util.getArg(mapping, 'source', null); if (source !== null) { source = this._sources.at(source); if (this.sourceRoot != null) { source = util.join(this.sourceRoot, source); } } var name = util.getArg(mapping, 'name', null); if (name !== null) { name = this._names.at(name); } return { source: source, line: util.getArg(mapping, 'originalLine', null), column: util.getArg(mapping, 'originalColumn', null), name: name }; } } return { source: null, line: null, column: null, name: null }; }; /** * Return true if we have the source content for every source in the source * map, false otherwise. */ BasicSourceMapConsumer.prototype.hasContentsOfAllSources = function BasicSourceMapConsumer_hasContentsOfAllSources() { if (!this.sourcesContent) { return false; } return this.sourcesContent.length >= this._sources.size() && !this.sourcesContent.some(function (sc) { return sc == null; }); }; /** * Returns the original source content. The only argument is the url of the * original source file. Returns null if no original source content is * available. */ BasicSourceMapConsumer.prototype.sourceContentFor = function SourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { if (!this.sourcesContent) { return null; } if (this.sourceRoot != null) { aSource = util.relative(this.sourceRoot, aSource); } if (this._sources.has(aSource)) { return this.sourcesContent[this._sources.indexOf(aSource)]; } var url; if (this.sourceRoot != null && (url = util.urlParse(this.sourceRoot))) { // XXX: file:// URIs and absolute paths lead to unexpected behavior for // many users. We can help them out when they expect file:// URIs to // behave like it would if they were running a local HTTP server. See // https://bugzilla.mozilla.org/show_bug.cgi?id=885597. var fileUriAbsPath = aSource.replace(/^file:\/\//, ""); if (url.scheme == "file" && this._sources.has(fileUriAbsPath)) { return this.sourcesContent[this._sources.indexOf(fileUriAbsPath)] } if ((!url.path || url.path == "/") && this._sources.has("/" + aSource)) { return this.sourcesContent[this._sources.indexOf("/" + aSource)]; } } // This function is used recursively from // IndexedSourceMapConsumer.prototype.sourceContentFor. In that case, we // don't want to throw if we can't find the source - we just want to // return null, so we provide a flag to exit gracefully. if (nullOnMissing) { return null; } else { throw new Error('"' + aSource + '" is not in the SourceMap.'); } }; /** * Returns the generated line and column information for the original source, * line, and column positions provided. The only argument is an object with * the following properties: * * - source: The filename of the original source. * - line: The line number in the original source. * - column: The column number in the original source. * - bias: Either 'SourceMapConsumer.GREATEST_LOWER_BOUND' or * 'SourceMapConsumer.LEAST_UPPER_BOUND'. Specifies whether to return the * closest element that is smaller than or greater than the one we are * searching for, respectively, if the exact element cannot be found. * Defaults to 'SourceMapConsumer.GREATEST_LOWER_BOUND'. * * and an object is returned with the following properties: * * - line: The line number in the generated source, or null. * - column: The column number in the generated source, or null. */ BasicSourceMapConsumer.prototype.generatedPositionFor = function SourceMapConsumer_generatedPositionFor(aArgs) { var source = util.getArg(aArgs, 'source'); if (this.sourceRoot != null) { source = util.relative(this.sourceRoot, source); } if (!this._sources.has(source)) { return { line: null, column: null, lastColumn: null }; } source = this._sources.indexOf(source); var needle = { source: source, originalLine: util.getArg(aArgs, 'line'), originalColumn: util.getArg(aArgs, 'column') }; var index = this._findMapping( needle, this._originalMappings, "originalLine", "originalColumn", util.compareByOriginalPositions, util.getArg(aArgs, 'bias', SourceMapConsumer.GREATEST_LOWER_BOUND) ); if (index >= 0) { var mapping = this._originalMappings[index]; if (mapping.source === needle.source) { return { line: util.getArg(mapping, 'generatedLine', null), column: util.getArg(mapping, 'generatedColumn', null), lastColumn: util.getArg(mapping, 'lastGeneratedColumn', null) }; } } return { line: null, column: null, lastColumn: null }; }; exports.BasicSourceMapConsumer = BasicSourceMapConsumer; /** * An IndexedSourceMapConsumer instance represents a parsed source map which * we can query for information. It differs from BasicSourceMapConsumer in * that it takes "indexed" source maps (i.e. ones with a "sections" field) as * input. * * The only parameter is a raw source map (either as a JSON string, or already * parsed to an object). According to the spec for indexed source maps, they * have the following attributes: * * - version: Which version of the source map spec this map is following. * - file: Optional. The generated file this source map is associated with. * - sections: A list of section definitions. * * Each value under the "sections" field has two fields: * - offset: The offset into the original specified at which this section * begins to apply, defined as an object with a "line" and "column" * field. * - map: A source map definition. This source map could also be indexed, * but doesn't have to be. * * Instead of the "map" field, it's also possible to have a "url" field * specifying a URL to retrieve a source map from, but that's currently * unsupported. * * Here's an example source map, taken from the source map spec[0], but * modified to omit a section which uses the "url" field. * * { * version : 3, * file: "app.js", * sections: [{ * offset: {line:100, column:10}, * map: { * version : 3, * file: "section.js", * sources: ["foo.js", "bar.js"], * names: ["src", "maps", "are", "fun"], * mappings: "AAAA,E;;ABCDE;" * } * }], * } * * [0]: https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit#heading=h.535es3xeprgt */ function IndexedSourceMapConsumer(aSourceMap) { var sourceMap = aSourceMap; if (typeof aSourceMap === 'string') { sourceMap = JSON.parse(aSourceMap.replace(/^\)\]\}'/, '')); } var version = util.getArg(sourceMap, 'version'); var sections = util.getArg(sourceMap, 'sections'); if (version != this._version) { throw new Error('Unsupported version: ' + version); } this._sources = new ArraySet(); this._names = new ArraySet(); var lastOffset = { line: -1, column: 0 }; this._sections = sections.map(function (s) { if (s.url) { // The url field will require support for asynchronicity. // See https://github.com/mozilla/source-map/issues/16 throw new Error('Support for url field in sections not implemented.'); } var offset = util.getArg(s, 'offset'); var offsetLine = util.getArg(offset, 'line'); var offsetColumn = util.getArg(offset, 'column'); if (offsetLine < lastOffset.line || (offsetLine === lastOffset.line && offsetColumn < lastOffset.column)) { throw new Error('Section offsets must be ordered and non-overlapping.'); } lastOffset = offset; return { generatedOffset: { // The offset fields are 0-based, but we use 1-based indices when // encoding/decoding from VLQ. generatedLine: offsetLine + 1, generatedColumn: offsetColumn + 1 }, consumer: new SourceMapConsumer(util.getArg(s, 'map')) } }); } IndexedSourceMapConsumer.prototype = Object.create(SourceMapConsumer.prototype); IndexedSourceMapConsumer.prototype.constructor = SourceMapConsumer; /** * The version of the source mapping spec that we are consuming. */ IndexedSourceMapConsumer.prototype._version = 3; /** * The list of original sources. */ Object.defineProperty(IndexedSourceMapConsumer.prototype, 'sources', { get: function () { var sources = []; for (var i = 0; i < this._sections.length; i++) { for (var j = 0; j < this._sections[i].consumer.sources.length; j++) { sources.push(this._sections[i].consumer.sources[j]); } } return sources; } }); /** * Returns the original source, line, and column information for the generated * source's line and column positions provided. The only argument is an object * with the following properties: * * - line: The line number in the generated source. * - column: The column number in the generated source. * * and an object is returned with the following properties: * * - source: The original source file, or null. * - line: The line number in the original source, or null. * - column: The column number in the original source, or null. * - name: The original identifier, or null. */ IndexedSourceMapConsumer.prototype.originalPositionFor = function IndexedSourceMapConsumer_originalPositionFor(aArgs) { var needle = { generatedLine: util.getArg(aArgs, 'line'), generatedColumn: util.getArg(aArgs, 'column') }; // Find the section containing the generated position we're trying to map // to an original position. var sectionIndex = binarySearch.search(needle, this._sections, function(needle, section) { var cmp = needle.generatedLine - section.generatedOffset.generatedLine; if (cmp) { return cmp; } return (needle.generatedColumn - section.generatedOffset.generatedColumn); }); var section = this._sections[sectionIndex]; if (!section) { return { source: null, line: null, column: null, name: null }; } return section.consumer.originalPositionFor({ line: needle.generatedLine - (section.generatedOffset.generatedLine - 1), column: needle.generatedColumn - (section.generatedOffset.generatedLine === needle.generatedLine ? section.generatedOffset.generatedColumn - 1 : 0), bias: aArgs.bias }); }; /** * Return true if we have the source content for every source in the source * map, false otherwise. */ IndexedSourceMapConsumer.prototype.hasContentsOfAllSources = function IndexedSourceMapConsumer_hasContentsOfAllSources() { return this._sections.every(function (s) { return s.consumer.hasContentsOfAllSources(); }); }; /** * Returns the original source content. The only argument is the url of the * original source file. Returns null if no original source content is * available. */ IndexedSourceMapConsumer.prototype.sourceContentFor = function IndexedSourceMapConsumer_sourceContentFor(aSource, nullOnMissing) { for (var i = 0; i < this._sections.length; i++) { var section = this._sections[i]; var content = section.consumer.sourceContentFor(aSource, true); if (content) { return content; } } if (nullOnMissing) { return null; } else { throw new Error('"' + aSource + '" is not in the SourceMap.'); } }; /** * Returns the generated line and column information for the original source, * line, and column positions provided. The only argument is an object with * the following properties: * * - source: The filename of the original source. * - line: The line number in the original source. * - column: The column number in the original source. * * and an object is returned with the following properties: * * - line: The line number in the generated source, or null. * - column: The column number in the generated source, or null. */ IndexedSourceMapConsumer.prototype.generatedPositionFor = function IndexedSourceMapConsumer_generatedPositionFor(aArgs) { for (var i = 0; i < this._sections.length; i++) { var section = this._sections[i]; // Only consider this section if the requested source is in the list of // sources of the consumer. if (section.consumer.sources.indexOf(util.getArg(aArgs, 'source')) === -1) { continue; } var generatedPosition = section.consumer.generatedPositionFor(aArgs); if (generatedPosition) { var ret = { line: generatedPosition.line + (section.generatedOffset.generatedLine - 1), column: generatedPosition.column + (section.generatedOffset.generatedLine === generatedPosition.line ? section.generatedOffset.generatedColumn - 1 : 0) }; return ret; } } return { line: null, column: null }; }; /** * Parse the mappings in a string in to a data structure which we can easily * query (the ordered arrays in the `this.__generatedMappings` and * `this.__originalMappings` properties). */ IndexedSourceMapConsumer.prototype._parseMappings = function IndexedSourceMapConsumer_parseMappings(aStr, aSourceRoot) { this.__generatedMappings = []; this.__originalMappings = []; for (var i = 0; i < this._sections.length; i++) { var section = this._sections[i]; var sectionMappings = section.consumer._generatedMappings; for (var j = 0; j < sectionMappings.length; j++) { var mapping = sectionMappings[j]; var source = section.consumer._sources.at(mapping.source); if (section.consumer.sourceRoot !== null) { source = util.join(section.consumer.sourceRoot, source); } this._sources.add(source); source = this._sources.indexOf(source); var name = section.consumer._names.at(mapping.name); this._names.add(name); name = this._names.indexOf(name); // The mappings coming from the consumer for the section have // generated positions relative to the start of the section, so we // need to offset them to be relative to the start of the concatenated // generated file. var adjustedMapping = { source: source, generatedLine: mapping.generatedLine + (section.generatedOffset.generatedLine - 1), generatedColumn: mapping.generatedColumn + (section.generatedOffset.generatedLine === mapping.generatedLine ? section.generatedOffset.generatedColumn - 1 : 0), originalLine: mapping.originalLine, originalColumn: mapping.originalColumn, name: name }; this.__generatedMappings.push(adjustedMapping); if (typeof adjustedMapping.originalLine === 'number') { this.__originalMappings.push(adjustedMapping); } } } quickSort(this.__generatedMappings, util.compareByGeneratedPositionsDeflated); quickSort(this.__originalMappings, util.compareByOriginalPositions); }; exports.IndexedSourceMapConsumer = IndexedSourceMapConsumer; },{"./array-set":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\array-set.js","./base64-vlq":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\base64-vlq.js","./binary-search":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\binary-search.js","./quick-sort":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\quick-sort.js","./util":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\util.js"}],"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\source-map-generator.js":[function(require,module,exports){ /* -*- Mode: js; js-indent-level: 2; -*- */ /* * Copyright 2011 Mozilla Foundation and contributors * Licensed under the New BSD license. See LICENSE or: * http://opensource.org/licenses/BSD-3-Clause */ var base64VLQ = require('./base64-vlq'); var util = require('./util'); var ArraySet = require('./array-set').ArraySet; var MappingList = require('./mapping-list').MappingList; /** * An instance of the SourceMapGenerator represents a source map which is * being built incrementally. You may pass an object with the following * properties: * * - file: The filename of the generated source. * - sourceRoot: A root for all relative URLs in this source map. */ function SourceMapGenerator(aArgs) { if (!aArgs) { aArgs = {}; } this._file = util.getArg(aArgs, 'file', null); this._sourceRoot = util.getArg(aArgs, 'sourceRoot', null); this._skipValidation = util.getArg(aArgs, 'skipValidation', false); this._sources = new ArraySet(); this._names = new ArraySet(); this._mappings = new MappingList(); this._sourcesContents = null; } SourceMapGenerator.prototype._version = 3; /** * Creates a new SourceMapGenerator based on a SourceMapConsumer * * @param aSourceMapConsumer The SourceMap. */ SourceMapGenerator.fromSourceMap = function SourceMapGenerator_fromSourceMap(aSourceMapConsumer) { var sourceRoot = aSourceMapConsumer.sourceRoot; var generator = new SourceMapGenerator({ file: aSourceMapConsumer.file, sourceRoot: sourceRoot }); aSourceMapConsumer.eachMapping(function (mapping) { var newMapping = { generated: { line: mapping.generatedLine, column: mapping.generatedColumn } }; if (mapping.source != null) { newMapping.source = mapping.source; if (sourceRoot != null) { newMapping.source = util.relative(sourceRoot, newMapping.source); } newMapping.original = { line: mapping.originalLine, column: mapping.originalColumn }; if (mapping.name != null) { newMapping.name = mapping.name; } } generator.addMapping(newMapping); }); aSourceMapConsumer.sources.forEach(function (sourceFile) { var content = aSourceMapConsumer.sourceContentFor(sourceFile); if (content != null) { generator.setSourceContent(sourceFile, content); } }); return generator; }; /** * Add a single mapping from original source line and column to the generated * source's line and column for this source map being created. The mapping * object should have the following properties: * * - generated: An object with the generated line and column positions. * - original: An object with the original line and column positions. * - source: The original source file (relative to the sourceRoot). * - name: An optional original token name for this mapping. */ SourceMapGenerator.prototype.addMapping = function SourceMapGenerator_addMapping(aArgs) { var generated = util.getArg(aArgs, 'generated'); var original = util.getArg(aArgs, 'original', null); var source = util.getArg(aArgs, 'source', null); var name = util.getArg(aArgs, 'name', null); if (!this._skipValidation) { this._validateMapping(generated, original, source, name); } if (source != null) { source = String(source); if (!this._sources.has(source)) { this._sources.add(source); } } if (name != null) { name = String(name); if (!this._names.has(name)) { this._names.add(name); } } this._mappings.add({ generatedLine: generated.line, generatedColumn: generated.column, originalLine: original != null && original.line, originalColumn: original != null && original.column, source: source, name: name }); }; /** * Set the source content for a source file. */ SourceMapGenerator.prototype.setSourceContent = function SourceMapGenerator_setSourceContent(aSourceFile, aSourceContent) { var source = aSourceFile; if (this._sourceRoot != null) { source = util.relative(this._sourceRoot, source); } if (aSourceContent != null) { // Add the source content to the _sourcesContents map. // Create a new _sourcesContents map if the property is null. if (!this._sourcesContents) { this._sourcesContents = Object.create(null); } this._sourcesContents[util.toSetString(source)] = aSourceContent; } else if (this._sourcesContents) { // Remove the source file from the _sourcesContents map. // If the _sourcesContents map is empty, set the property to null. delete this._sourcesContents[util.toSetString(source)]; if (Object.keys(this._sourcesContents).length === 0) { this._sourcesContents = null; } } }; /** * Applies the mappings of a sub-source-map for a specific source file to the * source map being generated. Each mapping to the supplied source file is * rewritten using the supplied source map. Note: The resolution for the * resulting mappings is the minimium of this map and the supplied map. * * @param aSourceMapConsumer The source map to be applied. * @param aSourceFile Optional. The filename of the source file. * If omitted, SourceMapConsumer's file property will be used. * @param aSourceMapPath Optional. The dirname of the path to the source map * to be applied. If relative, it is relative to the SourceMapConsumer. * This parameter is needed when the two source maps aren't in the same * directory, and the source map to be applied contains relative source * paths. If so, those relative source paths need to be rewritten * relative to the SourceMapGenerator. */ SourceMapGenerator.prototype.applySourceMap = function SourceMapGenerator_applySourceMap(aSourceMapConsumer, aSourceFile, aSourceMapPath) { var sourceFile = aSourceFile; // If aSourceFile is omitted, we will use the file property of the SourceMap if (aSourceFile == null) { if (aSourceMapConsumer.file == null) { throw new Error( 'SourceMapGenerator.prototype.applySourceMap requires either an explicit source file, ' + 'or the source map\'s "file" property. Both were omitted.' ); } sourceFile = aSourceMapConsumer.file; } var sourceRoot = this._sourceRoot; // Make "sourceFile" relative if an absolute Url is passed. if (sourceRoot != null) { sourceFile = util.relative(sourceRoot, sourceFile); } // Applying the SourceMap can add and remove items from the sources and // the names array. var newSources = new ArraySet(); var newNames = new ArraySet(); // Find mappings for the "sourceFile" this._mappings.unsortedForEach(function (mapping) { if (mapping.source === sourceFile && mapping.originalLine != null) { // Check if it can be mapped by the source map, then update the mapping. var original = aSourceMapConsumer.originalPositionFor({ line: mapping.originalLine, column: mapping.originalColumn }); if (original.source != null) { // Copy mapping mapping.source = original.source; if (aSourceMapPath != null) { mapping.source = util.join(aSourceMapPath, mapping.source) } if (sourceRoot != null) { mapping.source = util.relative(sourceRoot, mapping.source); } mapping.originalLine = original.line; mapping.originalColumn = original.column; if (original.name != null) { mapping.name = original.name; } } } var source = mapping.source; if (source != null && !newSources.has(source)) { newSources.add(source); } var name = mapping.name; if (name != null && !newNames.has(name)) { newNames.add(name); } }, this); this._sources = newSources; this._names = newNames; // Copy sourcesContents of applied map. aSourceMapConsumer.sources.forEach(function (sourceFile) { var content = aSourceMapConsumer.sourceContentFor(sourceFile); if (content != null) { if (aSourceMapPath != null) { sourceFile = util.join(aSourceMapPath, sourceFile); } if (sourceRoot != null) { sourceFile = util.relative(sourceRoot, sourceFile); } this.setSourceContent(sourceFile, content); } }, this); }; /** * A mapping can have one of the three levels of data: * * 1. Just the generated position. * 2. The Generated position, original position, and original source. * 3. Generated and original position, original source, as well as a name * token. * * To maintain consistency, we validate that any new mapping being added falls * in to one of these categories. */ SourceMapGenerator.prototype._validateMapping = function SourceMapGenerator_validateMapping(aGenerated, aOriginal, aSource, aName) { // When aOriginal is truthy but has empty values for .line and .column, // it is most likely a programmer error. In this case we throw a very // specific error message to try to guide them the right way. // For example: https://github.com/Polymer/polymer-bundler/pull/519 if (aOriginal && typeof aOriginal.line !== 'number' && typeof aOriginal.column !== 'number') { throw new Error( 'original.line and original.column are not numbers -- you probably meant to omit ' + 'the original mapping entirely and only map the generated position. If so, pass ' + 'null for the original mapping instead of an object with empty or null values.' ); } if (aGenerated && 'line' in aGenerated && 'column' in aGenerated && aGenerated.line > 0 && aGenerated.column >= 0 && !aOriginal && !aSource && !aName) { // Case 1. return; } else if (aGenerated && 'line' in aGenerated && 'column' in aGenerated && aOriginal && 'line' in aOriginal && 'column' in aOriginal && aGenerated.line > 0 && aGenerated.column >= 0 && aOriginal.line > 0 && aOriginal.column >= 0 && aSource) { // Cases 2 and 3. return; } else { throw new Error('Invalid mapping: ' + JSON.stringify({ generated: aGenerated, source: aSource, original: aOriginal, name: aName })); } }; /** * Serialize the accumulated mappings in to the stream of base 64 VLQs * specified by the source map format. */ SourceMapGenerator.prototype._serializeMappings = function SourceMapGenerator_serializeMappings() { var previousGeneratedColumn = 0; var previousGeneratedLine = 1; var previousOriginalColumn = 0; var previousOriginalLine = 0; var previousName = 0; var previousSource = 0; var result = ''; var next; var mapping; var nameIdx; var sourceIdx; var mappings = this._mappings.toArray(); for (var i = 0, len = mappings.length; i < len; i++) { mapping = mappings[i]; next = '' if (mapping.generatedLine !== previousGeneratedLine) { previousGeneratedColumn = 0; while (mapping.generatedLine !== previousGeneratedLine) { next += ';'; previousGeneratedLine++; } } else { if (i > 0) { if (!util.compareByGeneratedPositionsInflated(mapping, mappings[i - 1])) { continue; } next += ','; } } next += base64VLQ.encode(mapping.generatedColumn - previousGeneratedColumn); previousGeneratedColumn = mapping.generatedColumn; if (mapping.source != null) { sourceIdx = this._sources.indexOf(mapping.source); next += base64VLQ.encode(sourceIdx - previousSource); previousSource = sourceIdx; // lines are stored 0-based in SourceMap spec version 3 next += base64VLQ.encode(mapping.originalLine - 1 - previousOriginalLine); previousOriginalLine = mapping.originalLine - 1; next += base64VLQ.encode(mapping.originalColumn - previousOriginalColumn); previousOriginalColumn = mapping.originalColumn; if (mapping.name != null) { nameIdx = this._names.indexOf(mapping.name); next += base64VLQ.encode(nameIdx - previousName); previousName = nameIdx; } } result += next; } return result; }; SourceMapGenerator.prototype._generateSourcesContent = function SourceMapGenerator_generateSourcesContent(aSources, aSourceRoot) { return aSources.map(function (source) { if (!this._sourcesContents) { return null; } if (aSourceRoot != null) { source = util.relative(aSourceRoot, source); } var key = util.toSetString(source); return Object.prototype.hasOwnProperty.call(this._sourcesContents, key) ? this._sourcesContents[key] : null; }, this); }; /** * Externalize the source map. */ SourceMapGenerator.prototype.toJSON = function SourceMapGenerator_toJSON() { var map = { version: this._version, sources: this._sources.toArray(), names: this._names.toArray(), mappings: this._serializeMappings() }; if (this._file != null) { map.file = this._file; } if (this._sourceRoot != null) { map.sourceRoot = this._sourceRoot; } if (this._sourcesContents) { map.sourcesContent = this._generateSourcesContent(map.sources, map.sourceRoot); } return map; }; /** * Render the source map being generated to a string. */ SourceMapGenerator.prototype.toString = function SourceMapGenerator_toString() { return JSON.stringify(this.toJSON()); }; exports.SourceMapGenerator = SourceMapGenerator; },{"./array-set":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\array-set.js","./base64-vlq":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\base64-vlq.js","./mapping-list":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\mapping-list.js","./util":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\util.js"}],"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\source-node.js":[function(require,module,exports){ /* -*- Mode: js; js-indent-level: 2; -*- */ /* * Copyright 2011 Mozilla Foundation and contributors * Licensed under the New BSD license. See LICENSE or: * http://opensource.org/licenses/BSD-3-Clause */ var SourceMapGenerator = require('./source-map-generator').SourceMapGenerator; var util = require('./util'); // Matches a Windows-style `\r\n` newline or a `\n` newline used by all other // operating systems these days (capturing the result). var REGEX_NEWLINE = /(\r?\n)/; // Newline character code for charCodeAt() comparisons var NEWLINE_CODE = 10; // Private symbol for identifying `SourceNode`s when multiple versions of // the source-map library are loaded. This MUST NOT CHANGE across // versions! var isSourceNode = "$$$isSourceNode$$$"; /** * SourceNodes provide a way to abstract over interpolating/concatenating * snippets of generated JavaScript source code while maintaining the line and * column information associated with the original source code. * * @param aLine The original line number. * @param aColumn The original column number. * @param aSource The original source's filename. * @param aChunks Optional. An array of strings which are snippets of * generated JS, or other SourceNodes. * @param aName The original identifier. */ function SourceNode(aLine, aColumn, aSource, aChunks, aName) { this.children = []; this.sourceContents = {}; this.line = aLine == null ? null : aLine; this.column = aColumn == null ? null : aColumn; this.source = aSource == null ? null : aSource; this.name = aName == null ? null : aName; this[isSourceNode] = true; if (aChunks != null) this.add(aChunks); } /** * Creates a SourceNode from generated code and a SourceMapConsumer. * * @param aGeneratedCode The generated code * @param aSourceMapConsumer The SourceMap for the generated code * @param aRelativePath Optional. The path that relative sources in the * SourceMapConsumer should be relative to. */ SourceNode.fromStringWithSourceMap = function SourceNode_fromStringWithSourceMap(aGeneratedCode, aSourceMapConsumer, aRelativePath) { // The SourceNode we want to fill with the generated code // and the SourceMap var node = new SourceNode(); // All even indices of this array are one line of the generated code, // while all odd indices are the newlines between two adjacent lines // (since `REGEX_NEWLINE` captures its match). // Processed fragments are accessed by calling `shiftNextLine`. var remainingLines = aGeneratedCode.split(REGEX_NEWLINE); var remainingLinesIndex = 0; var shiftNextLine = function() { var lineContents = getNextLine(); // The last line of a file might not have a newline. var newLine = getNextLine() || ""; return lineContents + newLine; function getNextLine() { return remainingLinesIndex < remainingLines.length ? remainingLines[remainingLinesIndex++] : undefined; } }; // We need to remember the position of "remainingLines" var lastGeneratedLine = 1, lastGeneratedColumn = 0; // The generate SourceNodes we need a code range. // To extract it current and last mapping is used. // Here we store the last mapping. var lastMapping = null; aSourceMapConsumer.eachMapping(function (mapping) { if (lastMapping !== null) { // We add the code from "lastMapping" to "mapping": // First check if there is a new line in between. if (lastGeneratedLine < mapping.generatedLine) { // Associate first line with "lastMapping" addMappingWithCode(lastMapping, shiftNextLine()); lastGeneratedLine++; lastGeneratedColumn = 0; // The remaining code is added without mapping } else { // There is no new line in between. // Associate the code between "lastGeneratedColumn" and // "mapping.generatedColumn" with "lastMapping" var nextLine = remainingLines[remainingLinesIndex]; var code = nextLine.substr(0, mapping.generatedColumn - lastGeneratedColumn); remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn - lastGeneratedColumn); lastGeneratedColumn = mapping.generatedColumn; addMappingWithCode(lastMapping, code); // No more remaining code, continue lastMapping = mapping; return; } } // We add the generated code until the first mapping // to the SourceNode without any mapping. // Each line is added as separate string. while (lastGeneratedLine < mapping.generatedLine) { node.add(shiftNextLine()); lastGeneratedLine++; } if (lastGeneratedColumn < mapping.generatedColumn) { var nextLine = remainingLines[remainingLinesIndex]; node.add(nextLine.substr(0, mapping.generatedColumn)); remainingLines[remainingLinesIndex] = nextLine.substr(mapping.generatedColumn); lastGeneratedColumn = mapping.generatedColumn; } lastMapping = mapping; }, this); // We have processed all mappings. if (remainingLinesIndex < remainingLines.length) { if (lastMapping) { // Associate the remaining code in the current line with "lastMapping" addMappingWithCode(lastMapping, shiftNextLine()); } // and add the remaining lines without any mapping node.add(remainingLines.splice(remainingLinesIndex).join("")); } // Copy sourcesContent into SourceNode aSourceMapConsumer.sources.forEach(function (sourceFile) { var content = aSourceMapConsumer.sourceContentFor(sourceFile); if (content != null) { if (aRelativePath != null) { sourceFile = util.join(aRelativePath, sourceFile); } node.setSourceContent(sourceFile, content); } }); return node; function addMappingWithCode(mapping, code) { if (mapping === null || mapping.source === undefined) { node.add(code); } else { var source = aRelativePath ? util.join(aRelativePath, mapping.source) : mapping.source; node.add(new SourceNode(mapping.originalLine, mapping.originalColumn, source, code, mapping.name)); } } }; /** * Add a chunk of generated JS to this source node. * * @param aChunk A string snippet of generated JS code, another instance of * SourceNode, or an array where each member is one of those things. */ SourceNode.prototype.add = function SourceNode_add(aChunk) { if (Array.isArray(aChunk)) { aChunk.forEach(function (chunk) { this.add(chunk); }, this); } else if (aChunk[isSourceNode] || typeof aChunk === "string") { if (aChunk) { this.children.push(aChunk); } } else { throw new TypeError( "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk ); } return this; }; /** * Add a chunk of generated JS to the beginning of this source node. * * @param aChunk A string snippet of generated JS code, another instance of * SourceNode, or an array where each member is one of those things. */ SourceNode.prototype.prepend = function SourceNode_prepend(aChunk) { if (Array.isArray(aChunk)) { for (var i = aChunk.length-1; i >= 0; i--) { this.prepend(aChunk[i]); } } else if (aChunk[isSourceNode] || typeof aChunk === "string") { this.children.unshift(aChunk); } else { throw new TypeError( "Expected a SourceNode, string, or an array of SourceNodes and strings. Got " + aChunk ); } return this; }; /** * Walk over the tree of JS snippets in this node and its children. The * walking function is called once for each snippet of JS and is passed that * snippet and the its original associated source's line/column location. * * @param aFn The traversal function. */ SourceNode.prototype.walk = function SourceNode_walk(aFn) { var chunk; for (var i = 0, len = this.children.length; i < len; i++) { chunk = this.children[i]; if (chunk[isSourceNode]) { chunk.walk(aFn); } else { if (chunk !== '') { aFn(chunk, { source: this.source, line: this.line, column: this.column, name: this.name }); } } } }; /** * Like `String.prototype.join` except for SourceNodes. Inserts `aStr` between * each of `this.children`. * * @param aSep The separator. */ SourceNode.prototype.join = function SourceNode_join(aSep) { var newChildren; var i; var len = this.children.length; if (len > 0) { newChildren = []; for (i = 0; i < len-1; i++) { newChildren.push(this.children[i]); newChildren.push(aSep); } newChildren.push(this.children[i]); this.children = newChildren; } return this; }; /** * Call String.prototype.replace on the very right-most source snippet. Useful * for trimming whitespace from the end of a source node, etc. * * @param aPattern The pattern to replace. * @param aReplacement The thing to replace the pattern with. */ SourceNode.prototype.replaceRight = function SourceNode_replaceRight(aPattern, aReplacement) { var lastChild = this.children[this.children.length - 1]; if (lastChild[isSourceNode]) { lastChild.replaceRight(aPattern, aReplacement); } else if (typeof lastChild === 'string') { this.children[this.children.length - 1] = lastChild.replace(aPattern, aReplacement); } else { this.children.push(''.replace(aPattern, aReplacement)); } return this; }; /** * Set the source content for a source file. This will be added to the SourceMapGenerator * in the sourcesContent field. * * @param aSourceFile The filename of the source file * @param aSourceContent The content of the source file */ SourceNode.prototype.setSourceContent = function SourceNode_setSourceContent(aSourceFile, aSourceContent) { this.sourceContents[util.toSetString(aSourceFile)] = aSourceContent; }; /** * Walk over the tree of SourceNodes. The walking function is called for each * source file content and is passed the filename and source content. * * @param aFn The traversal function. */ SourceNode.prototype.walkSourceContents = function SourceNode_walkSourceContents(aFn) { for (var i = 0, len = this.children.length; i < len; i++) { if (this.children[i][isSourceNode]) { this.children[i].walkSourceContents(aFn); } } var sources = Object.keys(this.sourceContents); for (var i = 0, len = sources.length; i < len; i++) { aFn(util.fromSetString(sources[i]), this.sourceContents[sources[i]]); } }; /** * Return the string representation of this source node. Walks over the tree * and concatenates all the various snippets together to one string. */ SourceNode.prototype.toString = function SourceNode_toString() { var str = ""; this.walk(function (chunk) { str += chunk; }); return str; }; /** * Returns the string representation of this source node along with a source * map. */ SourceNode.prototype.toStringWithSourceMap = function SourceNode_toStringWithSourceMap(aArgs) { var generated = { code: "", line: 1, column: 0 }; var map = new SourceMapGenerator(aArgs); var sourceMappingActive = false; var lastOriginalSource = null; var lastOriginalLine = null; var lastOriginalColumn = null; var lastOriginalName = null; this.walk(function (chunk, original) { generated.code += chunk; if (original.source !== null && original.line !== null && original.column !== null) { if(lastOriginalSource !== original.source || lastOriginalLine !== original.line || lastOriginalColumn !== original.column || lastOriginalName !== original.name) { map.addMapping({ source: original.source, original: { line: original.line, column: original.column }, generated: { line: generated.line, column: generated.column }, name: original.name }); } lastOriginalSource = original.source; lastOriginalLine = original.line; lastOriginalColumn = original.column; lastOriginalName = original.name; sourceMappingActive = true; } else if (sourceMappingActive) { map.addMapping({ generated: { line: generated.line, column: generated.column } }); lastOriginalSource = null; sourceMappingActive = false; } for (var idx = 0, length = chunk.length; idx < length; idx++) { if (chunk.charCodeAt(idx) === NEWLINE_CODE) { generated.line++; generated.column = 0; // Mappings end at eol if (idx + 1 === length) { lastOriginalSource = null; sourceMappingActive = false; } else if (sourceMappingActive) { map.addMapping({ source: original.source, original: { line: original.line, column: original.column }, generated: { line: generated.line, column: generated.column }, name: original.name }); } } else { generated.column++; } } }); this.walkSourceContents(function (sourceFile, sourceContent) { map.setSourceContent(sourceFile, sourceContent); }); return { code: generated.code, map: map }; }; exports.SourceNode = SourceNode; },{"./source-map-generator":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\source-map-generator.js","./util":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\util.js"}],"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\util.js":[function(require,module,exports){ /* -*- Mode: js; js-indent-level: 2; -*- */ /* * Copyright 2011 Mozilla Foundation and contributors * Licensed under the New BSD license. See LICENSE or: * http://opensource.org/licenses/BSD-3-Clause */ /** * This is a helper function for getting values from parameter/options * objects. * * @param args The object we are extracting values from * @param name The name of the property we are getting. * @param defaultValue An optional value to return if the property is missing * from the object. If this is not specified and the property is missing, an * error will be thrown. */ function
(aArgs, aName, aDefaultValue) { if (aName in aArgs) { return aArgs[aName]; } else if (arguments.length === 3) { return aDefaultValue; } else { throw new Error('"' + aName + '" is a required argument.'); } } exports.getArg = getArg; var urlRegexp = /^(?:([\w+\-.]+):)?\/\/(?:(\w+:\w+)@)?([\w.]*)(?::(\d+))?(\S*)$/; var dataUrlRegexp = /^data:.+\,.+$/; function urlParse(aUrl) { var match = aUrl.match(urlRegexp); if (!match) { return null; } return { scheme: match[1], auth: match[2], host: match[3], port: match[4], path: match[5] }; } exports.urlParse = urlParse; function urlGenerate(aParsedUrl) { var url = ''; if (aParsedUrl.scheme) { url += aParsedUrl.scheme + ':'; } url += '//'; if (aParsedUrl.auth) { url += aParsedUrl.auth + '@'; } if (aParsedUrl.host) { url += aParsedUrl.host; } if (aParsedUrl.port) { url += ":" + aParsedUrl.port } if (aParsedUrl.path) { url += aParsedUrl.path; } return url; } exports.urlGenerate = urlGenerate; /** * Normalizes a path, or the path portion of a URL: * * - Replaces consecutive slashes with one slash. * - Removes unnecessary '.' parts. * - Removes unnecessary '<dir>/..' parts. * * Based on code in the Node.js 'path' core module. * * @param aPath The path or url to normalize. */ function normalize(aPath) { var path = aPath; var url = urlParse(aPath); if (url) { if (!url.path) { return aPath; } path = url.path; } var isAbsolute = exports.isAbsolute(path); var parts = path.split(/\/+/); for (var part, up = 0, i = parts.length - 1; i >= 0; i--) { part = parts[i]; if (part === '.') { parts.splice(i, 1); } else if (part === '..') { up++; } else if (up > 0) { if (part === '') { // The first part is blank if the path is absolute. Trying to go // above the root is a no-op. Therefore we can remove all '..' parts // directly after the root. parts.splice(i + 1, up); up = 0; } else { parts.splice(i, 2); up--; } } } path = parts.join('/'); if (path === '') { path = isAbsolute ? '/' : '.'; } if (url) { url.path = path; return urlGenerate(url); } return path; } exports.normalize = normalize; /** * Joins two paths/URLs. * * @param aRoot The root path or URL. * @param aPath The path or URL to be joined with the root. * * - If aPath is a URL or a data URI, aPath is returned, unless aPath is a * scheme-relative URL: Then the scheme of aRoot, if any, is prepended * first. * - Otherwise aPath is a path. If aRoot is a URL, then its path portion * is updated with the result and aRoot is returned. Otherwise the result * is returned. * - If aPath is absolute, the result is aPath. * - Otherwise the two paths are joined with a slash. * - Joining for example 'http://' and 'www.example.com' is also supported. */ function join(aRoot, aPath) { if (aRoot === "") { aRoot = "."; } if (aPath === "") { aPath = "."; } var aPathUrl = urlParse(aPath); var aRootUrl = urlParse(aRoot); if (aRootUrl) { aRoot = aRootUrl.path || '/'; } // `join(foo, '//www.example.org')` if (aPathUrl && !aPathUrl.scheme) { if (aRootUrl) { aPathUrl.scheme = aRootUrl.scheme; } return urlGenerate(aPathUrl); } if (aPathUrl || aPath.match(dataUrlRegexp)) { return aPath; } // `join('http://', 'www.example.com')` if (aRootUrl && !aRootUrl.host && !aRootUrl.path) { aRootUrl.host = aPath; return urlGenerate(aRootUrl); } var joined = aPath.charAt(0) === '/' ? aPath : normalize(aRoot.replace(/\/+$/, '') + '/' + aPath); if (aRootUrl) { aRootUrl.path = joined; return urlGenerate(aRootUrl); } return joined; } exports.join = join; exports.isAbsolute = function (aPath) { return aPath.charAt(0) === '/' || !!aPath.match(urlRegexp); }; /** * Make a path relative to a URL or another path. * * @param aRoot The root path or URL. * @param aPath The path or URL to be made relative to aRoot. */ function relative(aRoot, aPath) { if (aRoot === "") { aRoot = "."; } aRoot = aRoot.replace(/\/$/, ''); // It is possible for the path to be above the root. In this case, simply // checking whether the root is a prefix of the path won't work. Instead, we // need to remove components from the root one by one, until either we find // a prefix that fits, or we run out of components to remove. var level = 0; while (aPath.indexOf(aRoot + '/') !== 0) { var index = aRoot.lastIndexOf("/"); if (index < 0) { return aPath; } // If the only part of the root that is left is the scheme (i.e. http://, // file:///, etc.), one or more slashes (/), or simply nothing at all, we // have exhausted all components, so the path is not relative to the root. aRoot = aRoot.slice(0, index); if (aRoot.match(/^([^\/]+:\/)?\/*$/)) { return aPath; } ++level; } // Make sure we add a "../" for each component we removed from the root. return Array(level + 1).join("../") + aPath.substr(aRoot.length + 1); } exports.relative = relative; var supportsNullProto = (function () { var obj = Object.create(null); return !('__proto__' in obj); }()); function identity (s) { return s; } /** * Because behavior goes wacky when you set `__proto__` on objects, we * have to prefix all the strings in our set with an arbitrary character. * * See https://github.com/mozilla/source-map/pull/31 and * https://github.com/mozilla/source-map/issues/30 * * @param String aStr */ function toSetString(aStr) { if (isProtoString(aStr)) { return '$' + aStr; } return aStr; } exports.toSetString = supportsNullProto ? identity : toSetString; function fromSetString(aStr) { if (isProtoString(aStr)) { return aStr.slice(1); } return aStr; } exports.fromSetString = supportsNullProto ? identity : fromSetString; function isProtoString(s) { if (!s) { return false; } var length = s.length; if (length < 9 /* "__proto__".length */) { return false; } if (s.charCodeAt(length - 1) !== 95 /* '_' */ || s.charCodeAt(length - 2) !== 95 /* '_' */ || s.charCodeAt(length - 3) !== 111 /* 'o' */ || s.charCodeAt(length - 4) !== 116 /* 't' */ || s.charCodeAt(length - 5) !== 111 /* 'o' */ || s.charCodeAt(length - 6) !== 114 /* 'r' */ || s.charCodeAt(length - 7) !== 112 /* 'p' */ || s.charCodeAt(length - 8) !== 95 /* '_' */ || s.charCodeAt(length - 9) !== 95 /* '_' */) { return false; } for (var i = length - 10; i >= 0; i--) { if (s.charCodeAt(i) !== 36 /* '$' */) { return false; } } return true; } /** * Comparator between two mappings where the original positions are compared. * * Optionally pass in `true` as `onlyCompareGenerated` to consider two * mappings with the same original source/line/column, but different generated * line and column the same. Useful when searching for a mapping with a * stubbed out mapping. */ function compareByOriginalPositions(mappingA, mappingB, onlyCompareOriginal) { var cmp = mappingA.source - mappingB.source; if (cmp !== 0) { return cmp; } cmp = mappingA.originalLine - mappingB.originalLine; if (cmp !== 0) { return cmp; } cmp = mappingA.originalColumn - mappingB.originalColumn; if (cmp !== 0 || onlyCompareOriginal) { return cmp; } cmp = mappingA.generatedColumn - mappingB.generatedColumn; if (cmp !== 0) { return cmp; } cmp = mappingA.generatedLine - mappingB.generatedLine; if (cmp !== 0) { return cmp; } return mappingA.name - mappingB.name; } exports.compareByOriginalPositions = compareByOriginalPositions; /** * Comparator between two mappings with deflated source and name indices where * the generated positions are compared. * * Optionally pass in `true` as `onlyCompareGenerated` to consider two * mappings with the same generated line and column, but different * source/name/original line and column the same. Useful when searching for a * mapping with a stubbed out mapping. */ function compareByGeneratedPositionsDeflated(mappingA, mappingB, onlyCompareGenerated) { var cmp = mappingA.generatedLine - mappingB.generatedLine; if (cmp !== 0) { return cmp; } cmp = mappingA.generatedColumn - mappingB.generatedColumn; if (cmp !== 0 || onlyCompareGenerated) { return cmp; } cmp = mappingA.source - mappingB.source; if (cmp !== 0) { return cmp; } cmp = mappingA.originalLine - mappingB.originalLine; if (cmp !== 0) { return cmp; } cmp = mappingA.originalColumn - mappingB.originalColumn; if (cmp !== 0) { return cmp; } return mappingA.name - mappingB.name; } exports.compareByGeneratedPositionsDeflated = compareByGeneratedPositionsDeflated; function strcmp(aStr1, aStr2) { if (aStr1 === aStr2) { return 0; } if (aStr1 > aStr2) { return 1; } return -1; } /** * Comparator between two mappings with inflated source and name strings where * the generated positions are compared. */ function compareByGeneratedPositionsInflated(mappingA, mappingB) { var cmp = mappingA.generatedLine - mappingB.generatedLine; if (cmp !== 0) { return cmp; } cmp = mappingA.generatedColumn - mappingB.generatedColumn; if (cmp !== 0) { return cmp; } cmp = strcmp(mappingA.source, mappingB.source); if (cmp !== 0) { return cmp; } cmp = mappingA.originalLine - mappingB.originalLine; if (cmp !== 0) { return cmp; } cmp = mappingA.originalColumn - mappingB.originalColumn; if (cmp !== 0) { return cmp; } return strcmp(mappingA.name, mappingB.name); } exports.compareByGeneratedPositionsInflated = compareByGeneratedPositionsInflated; },{}],"buffer-crc32":[function(require,module,exports){ var Buffer = require('buffer').Buffer; var CRC_TABLE = [ 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419, 0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4, 0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07, 0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de, 0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856, 0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9, 0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4, 0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b, 0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3, 0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a, 0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599, 0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924, 0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190, 0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f, 0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e, 0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01, 0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed, 0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950, 0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3, 0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2, 0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a, 0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5, 0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010, 0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f, 0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17, 0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6, 0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615, 0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8, 0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344, 0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb, 0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a, 0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5, 0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1, 0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c, 0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef, 0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236, 0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe, 0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31, 0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c, 0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713, 0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b, 0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242, 0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1, 0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c, 0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278, 0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7, 0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66, 0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9, 0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605, 0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8, 0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b, 0x2d02ef8d ]; if (typeof Int32Array !== 'undefined') { CRC_TABLE = new Int32Array(CRC_TABLE); } function newEmptyBuffer(length) { var buffer = new Buffer(length); buffer.fill(0x00); return buffer; } function ensureBuffer(input) { if (Buffer.isBuffer(input)) { return input; } var hasNewBufferAPI = typeof Buffer.alloc === "function" && typeof Buffer.from === "function"; if (typeof input === "number") { return hasNewBufferAPI ? Buffer.alloc(input) : newEmptyBuffer(input); } else if (typeof input === "string") { return hasNewBufferAPI ? Buffer.from(input) : new Buffer(input); } else { throw new Error("input must be buffer, number, or string, received " + typeof input); } } function bufferizeInt(num) { var tmp = ensureBuffer(4); tmp.writeInt32BE(num, 0); return tmp; } function _crc32(buf, previous) { buf = ensureBuffer(buf); if (Buffer.isBuffer(previous)) { previous = previous.readUInt32BE(0); } var crc = ~~previous ^ -1; for (var n = 0; n < buf.length; n++) { crc = CRC_TABLE[(crc ^ buf[n]) & 0xff] ^ (crc >>> 8); } return (crc ^ -1); } function crc32() { return bufferizeInt(_crc32.apply(null, arguments)); } crc32.signed = function () { return _crc32.apply(null, arguments); }; crc32.unsigned = function () { return _crc32.apply(null, arguments) >>> 0; }; module.exports = crc32; },{"buffer":false}],"buffer-from":[function(require,module,exports){ (function (Buffer){ var toString = Object.prototype.toString var isModern = ( typeof Buffer.alloc === 'function' && typeof Buffer.allocUnsafe === 'function' && typeof Buffer.from === 'function' ) function isArrayBuffer (input) { return toString.call(input).slice(8, -1) === 'ArrayBuffer' } function fromArrayBuffer (obj, byteOffset, length) { byteOffset >>>= 0 var maxLength = obj.byteLength - byteOffset if (maxLength < 0) { throw new RangeError("'offset' is out of bounds") } if (length === undefined) { length = maxLength } else { length >>>= 0 if (length > maxLength) { throw new RangeError("'length' is out of bounds") } } return isModern ? Buffer.from(obj.slice(byteOffset, byteOffset + length)) : new Buffer(new Uint8Array(obj.slice(byteOffset, byteOffset + length))) } function fromString (string, encoding) { if (typeof encoding !== 'string' || encoding === '') { encoding = 'utf8' } if (!Buffer.isEncoding(encoding)) { throw new TypeError('"encoding" must be a valid string encoding') } return isModern ? Buffer.from(string, encoding) : new Buffer(string, encoding) } function bufferFrom (value, encodingOrOffset, length) { if (typeof value === 'number') { throw new TypeError('"value" argument must not be a number') } if (isArrayBuffer(value)) { return fromArrayBuffer(value, encodingOrOffset, length) } if (typeof value === 'string') { return fromString(value, encodingOrOffset) } return isModern ? Buffer.from(value) : new Buffer(value) } module.exports = bufferFrom }).call(this,require("buffer").Buffer) },{"buffer":false}],"csb-wizard":[function(require,module,exports){ module.exports = require('./CSBWizard'); },{"./CSBWizard":"D:\\Catalin\\Munca\\privatesky\\modules\\csb-wizard\\CSBWizard.js"}],"node-fd-slicer":[function(require,module,exports){ (function (Buffer,setImmediate){ var fs = require('fs'); var util = require('util'); var stream = require('stream'); var Readable = stream.Readable; var Writable = stream.Writable; var PassThrough = stream.PassThrough; var Pend = require('./modules/node-pend'); var EventEmitter = require('events').EventEmitter; exports.createFromBuffer = createFromBuffer; exports.createFromFd = createFromFd; exports.BufferSlicer = BufferSlicer; exports.FdSlicer = FdSlicer; util.inherits(FdSlicer, EventEmitter); function FdSlicer(fd, options) { options = options || {}; EventEmitter.call(this); this.fd = fd; this.pend = new Pend(); this.pend.max = 1; this.refCount = 0; this.autoClose = !!options.autoClose; } FdSlicer.prototype.read = function(buffer, offset, length, position, callback) { var self = this; self.pend.go(function(cb) { fs.read(self.fd, buffer, offset, length, position, function(err, bytesRead, buffer) { cb(); callback(err, bytesRead, buffer); }); }); }; FdSlicer.prototype.write = function(buffer, offset, length, position, callback) { var self = this; self.pend.go(function(cb) { fs.write(self.fd, buffer, offset, length, position, function(err, written, buffer) { cb(); callback(err, written, buffer); }); }); }; FdSlicer.prototype.createReadStream = function(options) { return new ReadStream(this, options); }; FdSlicer.prototype.createWriteStream = function(options) { return new WriteStream(this, options); }; FdSlicer.prototype.ref = function() { this.refCount += 1; }; FdSlicer.prototype.unref = function() { var self = this; self.refCount -= 1; if (self.refCount > 0) return; if (self.refCount < 0) throw new Error("invalid unref"); if (self.autoClose) { fs.close(self.fd, onCloseDone); } function onCloseDone(err) { if (err) { self.emit('error', err); } else { self.emit('close'); } } }; util.inherits(ReadStream, Readable); function ReadStream(context, options) { options = options || {}; Readable.call(this, options); this.context = context; this.context.ref(); this.start = options.start || 0; this.endOffset = options.end; this.pos = this.start; this.destroyed = false; } ReadStream.prototype._read = function(n) { var self = this; if (self.destroyed) return; var toRead = Math.min(self._readableState.highWaterMark, n); if (self.endOffset != null) { toRead = Math.min(toRead, self.endOffset - self.pos); } if (toRead <= 0) { self.destroyed = true; self.push(null); self.context.unref(); return; } self.context.pend.go(function(cb) { if (self.destroyed) return cb(); var buffer = new Buffer(toRead); fs.read(self.context.fd, buffer, 0, toRead, self.pos, function(err, bytesRead) { if (err) { self.destroy(err); } else if (bytesRead === 0) { self.destroyed = true; self.push(null); self.context.unref(); } else { self.pos += bytesRead; self.push(buffer.slice(0, bytesRead)); } cb(); }); }); }; ReadStream.prototype.destroy = function(err) { if (this.destroyed) return; err = err || new Error("stream destroyed"); this.destroyed = true; this.emit('error', err); this.context.unref(); }; util.inherits(WriteStream, Writable); function WriteStream(context, options) { options = options || {}; Writable.call(this, options); this.context = context; this.context.ref(); this.start = options.start || 0; this.endOffset = (options.end == null) ? Infinity : +options.end; this.bytesWritten = 0; this.pos = this.start; this.destroyed = false; this.on('finish', this.destroy.bind(this)); } WriteStream.prototype._write = function(buffer, encoding, callback) { var self = this; if (self.destroyed) return; if (self.pos + buffer.length > self.endOffset) { var err = new Error("maximum file length exceeded"); err.code = 'ETOOBIG'; self.destroy(); callback(err); return; } self.context.pend.go(function(cb) { if (self.destroyed) return cb(); fs.write(self.context.fd, buffer, 0, buffer.length, self.pos, function(err, bytes) { if (err) { self.destroy(); cb(); callback(err); } else { self.bytesWritten += bytes; self.pos += bytes; self.emit('progress'); cb(); callback(); } }); }); }; WriteStream.prototype.destroy = function() { if (this.destroyed) return; this.destroyed = true; this.context.unref(); }; util.inherits(BufferSlicer, EventEmitter); function BufferSlicer(buffer, options) { EventEmitter.call(this); options = options || {}; this.refCount = 0; this.buffer = buffer; this.maxChunkSize = options.maxChunkSize || Number.MAX_SAFE_INTEGER; } BufferSlicer.prototype.read = function(buffer, offset, length, position, callback) { var end = position + length; var delta = end - this.buffer.length; var written = (delta > 0) ? delta : length; this.buffer.copy(buffer, offset, position, end); setImmediate(function() { callback(null, written); }); }; BufferSlicer.prototype.write = function(buffer, offset, length, position, callback) { buffer.copy(this.buffer, position, offset, offset + length); setImmediate(function() { callback(null, length, buffer); }); }; BufferSlicer.prototype.createReadStream = function(options) { options = options || {}; var readStream = new PassThrough(options); readStream.destroyed = false; readStream.start = options.start || 0; readStream.endOffset = options.end; // by the time this function returns, we'll be done. readStream.pos = readStream.endOffset || this.buffer.length; // respect the maxChunkSize option to slice up the chunk into smaller pieces. var entireSlice = this.buffer.slice(readStream.start, readStream.pos); var offset = 0; while (true) { var nextOffset = offset + this.maxChunkSize; if (nextOffset >= entireSlice.length) { // last chunk if (offset < entireSlice.length) { readStream.write(entireSlice.slice(offset, entireSlice.length)); } break; } readStream.write(entireSlice.slice(offset, nextOffset)); offset = nextOffset; } readStream.end(); readStream.destroy = function() { readStream.destroyed = true; }; return readStream; }; BufferSlicer.prototype.createWriteStream = function(options) { var bufferSlicer = this; options = options || {}; var writeStream = new Writable(options); writeStream.start = options.start || 0; writeStream.endOffset = (options.end == null) ? this.buffer.length : +options.end; writeStream.bytesWritten = 0; writeStream.pos = writeStream.start; writeStream.destroyed = false; writeStream._write = function(buffer, encoding, callback) { if (writeStream.destroyed) return; var end = writeStream.pos + buffer.length; if (end > writeStream.endOffset) { var err = new Error("maximum file length exceeded"); err.code = 'ETOOBIG'; writeStream.destroyed = true; callback(err); return; } buffer.copy(bufferSlicer.buffer, writeStream.pos, 0, buffer.length); writeStream.bytesWritten += buffer.length; writeStream.pos = end; writeStream.emit('progress'); callback(); }; writeStream.destroy = function() { writeStream.destroyed = true; }; return writeStream; }; BufferSlicer.prototype.ref = function() { this.refCount += 1; }; BufferSlicer.prototype.unref = function() { this.refCount -= 1; if (this.refCount < 0) { throw new Error("invalid unref"); } }; function createFromBuffer(buffer, options) { return new BufferSlicer(buffer, options); } function createFromFd(fd, options) { return new FdSlicer(fd, options); } }).call(this,require("buffer").Buffer,require("timers").setImmediate) },{"./modules/node-pend":"D:\\Catalin\\Munca\\privatesky\\modules\\node-fd-slicer\\modules\\node-pend\\index.js","buffer":false,"events":false,"fs":false,"stream":false,"timers":false,"util":false}],"pskwallet":[function(require,module,exports){ (function (__dirname){ const pskConsole = require('swarmutils').createPskConsole(); const pathModule = "path"; const path = require(pathModule); process.env.PSK_ROOT_INSTALATION_FOLDER = path.resolve("." + __dirname + "/../.."); require("./cmds"); pskConsole.runCommand(); }).call(this,"/modules/pskwallet") },{"./cmds":"D:\\Catalin\\Munca\\privatesky\\modules\\pskwallet\\cmds\\index.js","swarmutils":false}],"source-map-support":[function(require,module,exports){ var SourceMapConsumer = require('source-map').SourceMapConsumer; var path = require('path'); var fs; try { fs = require('fs'); if (!fs.existsSync || !fs.readFileSync) { // fs doesn't have all methods we need fs = null; } } catch (err) { /* nop */ } var bufferFrom = require('buffer-from'); // Only install once if called multiple times var errorFormatterInstalled = false; var uncaughtShimInstalled = false; // If true, the caches are reset before a stack trace formatting operation var emptyCacheBetweenOperations = false; // Supports {browser, node, auto} var environment = "auto"; // Maps a file path to a string containing the file contents var fileContentsCache = {}; // Maps a file path to a source map for that file var sourceMapCache = {}; // Regex for detecting source maps var reSourceMap = /^data:application\/json[^,]+base64,/; // Priority list of retrieve handlers var retrieveFileHandlers = []; var retrieveMapHandlers = []; function isInBrowser() { if (environment === "browser") return true; if (environment === "node") return false; return ((typeof window !== 'undefined') && (typeof XMLHttpRequest === 'function') && !(window.require && window.module && window.process && window.process.type === "renderer")); } function hasGlobalProcessEventEmitter() { return ((typeof process === 'object') && (process !== null) && (typeof process.on === 'function')); } function handlerExec(list) { return function(arg) { for (var i = 0; i < list.length; i++) { var ret = list[i](arg); if (ret) { return ret; } } return null; }; } var retrieveFile = handlerExec(retrieveFileHandlers); retrieveFileHandlers.push(function(path) { // Trim the path to make sure there is no extra whitespace. path = path.trim(); if (/^file:/.test(path)) { // existsSync/readFileSync can't handle file protocol, but once stripped, it works path = path.replace(/file:\/\/\/(\w:)?/, function(protocol, drive) { return drive ? '' : // file:///C:/dir/file -> C:/dir/file '/'; // file:///root-dir/file -> /root-dir/file }); } if (path in fileContentsCache) { return fileContentsCache[path]; } var contents = ''; try { if (!fs) { // Use SJAX if we are in the browser var xhr = new XMLHttpRequest(); xhr.open('GET', path, /** async */ false); xhr.send(null); if (xhr.readyState === 4 && xhr.status === 200) { contents = xhr.responseText; } } else if (fs.existsSync(path)) { // Otherwise, use the filesystem contents = fs.readFileSync(path, 'utf8'); } } catch (er) { /* ignore any errors */ } return fileContentsCache[path] = contents; }); // Support URLs relative to a directory, but be careful about a protocol prefix // in case we are in the browser (i.e. directories may start with "http://" or "file:///") function supportRelativeURL(file, url) { if (!file) return url; var dir = path.dirname(file); var match = /^\w+:\/\/[^\/]*/.exec(dir); var protocol = match ? match[0] : ''; var startPath = dir.slice(protocol.length); if (protocol && /^\/\w\:/.test(startPath)) { // handle file:///C:/ paths protocol += '/'; return protocol + path.resolve(dir.slice(protocol.length), url).replace(/\\/g, '/'); } return protocol + path.resolve(dir.slice(protocol.length), url); } function retrieveSourceMapURL(source) { var fileData; if (isInBrowser()) { try { var xhr = new XMLHttpRequest(); xhr.open('GET', source, false); xhr.send(null); fileData = xhr.readyState === 4 ? xhr.responseText : null; // Support providing a sourceMappingURL via the SourceMap header var sourceMapHeader = xhr.getResponseHeader("SourceMap") || xhr.getResponseHeader("X-SourceMap"); if (sourceMapHeader) { return sourceMapHeader; } } catch (e) { } } // Get the URL of the source map fileData = retrieveFile(source); var re = /(?:\/\/[@#][\s]*sourceMappingURL=([^\s'"]+)[\s]*$)|(?:\/\*[@#][\s]*sourceMappingURL=([^\s*'"]+)[\s]*(?:\*\/)[\s]*$)/mg; // Keep executing the search to find the *last* sourceMappingURL to avoid // picking up sourceMappingURLs from comments, strings, etc. var lastMatch, match; while (match = re.exec(fileData)) lastMatch = match; if (!lastMatch) return null; return lastMatch[1]; }; // Can be overridden by the retrieveSourceMap option to install. Takes a // generated source filename; returns a {map, optional url} object, or null if // there is no source map. The map field may be either a string or the parsed // JSON object (ie, it must be a valid argument to the SourceMapConsumer // constructor). var retrieveSourceMap = handlerExec(retrieveMapHandlers); retrieveMapHandlers.push(function(source) { var sourceMappingURL = retrieveSourceMapURL(source); if (!sourceMappingURL) return null; // Read the contents of the source map var sourceMapData; if (reSourceMap.test(sourceMappingURL)) { // Support source map URL as a data url var rawData = sourceMappingURL.slice(sourceMappingURL.indexOf(',') + 1); sourceMapData = bufferFrom(rawData, "base64").toString(); sourceMappingURL = source; } else { // Support source map URLs relative to the source URL sourceMappingURL = supportRelativeURL(source, sourceMappingURL); sourceMapData = retrieveFile(sourceMappingURL); } if (!sourceMapData) { return null; } return { url: sourceMappingURL, map: sourceMapData }; }); function mapSourcePosition(position) { var sourceMap = sourceMapCache[position.source]; if (!sourceMap) { // Call the (overrideable) retrieveSourceMap function to get the source map. var urlAndMap = retrieveSourceMap(position.source); if (urlAndMap) { sourceMap = sourceMapCache[position.source] = { url: urlAndMap.url, map: new SourceMapConsumer(urlAndMap.map) }; // Load all sources stored inline with the source map into the file cache // to pretend like they are already loaded. They may not exist on disk. if (sourceMap.map.sourcesContent) { sourceMap.map.sources.forEach(function(source, i) { var contents = sourceMap.map.sourcesContent[i]; if (contents) { var url = supportRelativeURL(sourceMap.url, source); fileContentsCache[url] = contents; } }); } } else { sourceMap = sourceMapCache[position.source] = { url: null, map: null }; } } // Resolve the source URL relative to the URL of the source map if (sourceMap && sourceMap.map && typeof sourceMap.map.originalPositionFor === 'function') { var originalPosition = sourceMap.map.originalPositionFor(position); // Only return the original position if a matching line was found. If no // matching line is found then we return position instead, which will cause // the stack trace to print the path and line for the compiled file. It is // better to give a precise location in the compiled file than a vague // location in the original file. if (originalPosition.source !== null) { originalPosition.source = supportRelativeURL( sourceMap.url, originalPosition.source); return originalPosition; } } return position; } // Parses code generated by FormatEvalOrigin(), a function inside V8: // https://code.google.com/p/v8/source/browse/trunk/src/messages.js function mapEvalOrigin(origin) { // Most eval() calls are in this format var match = /^eval at ([^(]+) \((.+):(\d+):(\d+)\)$/.exec(origin); if (match) { var position = mapSourcePosition({ source: match[2], line: +match[3], column: match[4] - 1 }); return 'eval at ' + match[1] + ' (' + position.source + ':' + position.line + ':' + (position.column + 1) + ')'; } // Parse nested eval() calls using recursion match = /^eval at ([^(]+) \((.+)\)$/.exec(origin); if (match) { return 'eval at ' + match[1] + ' (' + mapEvalOrigin(match[2]) + ')'; } // Make sure we still return useful information if we didn't find anything return origin; } // This is copied almost verbatim from the V8 source code at // https://code.google.com/p/v8/source/browse/trunk/src/messages.js. The // implementation of wrapCallSite() used to just forward to the actual source // code of CallSite.prototype.toString but unfortunately a new release of V8 // did something to the prototype chain and broke the shim. The only fix I // could find was copy/paste. function CallSiteToString() { var fileName; var fileLocation = ""; if (this.isNative()) { fileLocation = "native"; } else { fileName = this.getScriptNameOrSourceURL(); if (!fileName && this.isEval()) { fileLocation = this.getEvalOrigin(); fileLocation += ", "; // Expecting source position to follow. } if (fileName) { fileLocation += fileName; } else { // Source code does not originate from a file and is not native, but we // can still get the source position inside the source string, e.g. in // an eval string. fileLocation += "<anonymous>"; } var lineNumber = this.getLineNumber(); if (lineNumber != null) { fileLocation += ":" + lineNumber; var columnNumber = this.getColumnNumber(); if (columnNumber) { fileLocation += ":" + columnNumber; } } } var line = ""; var functionName = this.getFunctionName(); var addSuffix = true; var isConstructor = this.isConstructor(); var isMethodCall = !(this.isToplevel() || isConstructor); if (isMethodCall) { var typeName = this.getTypeName(); // Fixes shim to be backward compatable with Node v0 to v4 if (typeName === "[object Object]") { typeName = "null"; } var methodName = this.getMethodName(); if (functionName) { if (typeName && functionName.indexOf(typeName) != 0) { line += typeName + "."; } line += functionName; if (methodName && functionName.indexOf("." + methodName) != functionName.length - methodName.length - 1) { line += " [as " + methodName + "]"; } } else { line += typeName + "." + (methodName || "<anonymous>"); } } else if (isConstructor) { line += "new " + (functionName || "<anonymous>"); } else if (functionName) { line += functionName; } else { line += fileLocation; addSuffix = false; } if (addSuffix) { line += " (" + fileLocation + ")"; } return line; } function cloneCallSite(frame) { var object = {}; Object.getOwnPropertyNames(Object.getPrototypeOf(frame)).forEach(function(name) { object[name] = /^(?:is|get)/.test(name) ? function() { return frame[name].call(frame); } : frame[name]; }); object.toString = CallSiteToString; return object; } function wrapCallSite(frame) { if(frame.isNative()) { return frame; } // Most call sites will return the source file from getFileName(), but code // passed to eval() ending in "//# sourceURL=..." will return the source file // from getScriptNameOrSourceURL() instead var source = frame.getFileName() || frame.getScriptNameOrSourceURL(); if (source) { var line = frame.getLineNumber(); var column = frame.getColumnNumber() - 1; // Fix position in Node where some (internal) code is prepended. // See https://github.com/evanw/node-source-map-support/issues/36 var headerLength = 62; if (line === 1 && column > headerLength && !isInBrowser() && !frame.isEval()) { column -= headerLength; } var position = mapSourcePosition({ source: source, line: line, column: column }); frame = cloneCallSite(frame); var originalFunctionName = frame.getFunctionName; frame.getFunctionName = function() { return position.name || originalFunctionName(); }; frame.getFileName = function() { return position.source; }; frame.getLineNumber = function() { return position.line; }; frame.getColumnNumber = function() { return position.column + 1; }; frame.getScriptNameOrSourceURL = function() { return position.source; }; return frame; } // Code called using eval() needs special handling var origin = frame.isEval() && frame.getEvalOrigin(); if (origin) { origin = mapEvalOrigin(origin); frame = cloneCallSite(frame); frame.getEvalOrigin = function() { return origin; }; return frame; } // If we get here then we were unable to change the source position return frame; } // This function is part of the V8 stack trace API, for more info see: // https://v8.dev/docs/stack-trace-api function prepareStackTrace(error, stack) { if (emptyCacheBetweenOperations) { fileContentsCache = {}; sourceMapCache = {}; } var name = error.name || 'Error'; var message = error.message || ''; var errorString = name + ": " + message; return errorString + stack.map(function(frame) { return '\n at ' + wrapCallSite(frame); }).join(''); } // Generate position and snippet of original source with pointer function getErrorSource(error) { var match = /\n at [^(]+ \((.*):(\d+):(\d+)\)/.exec(error.stack); if (match) { var source = match[1]; var line = +match[2]; var column = +match[3]; // Support the inline sourceContents inside the source map var contents = fileContentsCache[source]; // Support files on disk if (!contents && fs && fs.existsSync(source)) { try { contents = fs.readFileSync(source, 'utf8'); } catch (er) { contents = ''; } } // Format the line from the original source code like node does if (contents) { var code = contents.split(/(?:\r\n|\r|\n)/)[line - 1]; if (code) { return source + ':' + line + '\n' + code + '\n' + new Array(column).join(' ') + '^'; } } } return null; } function printErrorAndExit (error) { var source = getErrorSource(error); // Ensure error is printed synchronously and not truncated if (process.stderr._handle && process.stderr._handle.setBlocking) { process.stderr._handle.setBlocking(true); } if (source) { console.error(); console.error(source); } console.error(error.stack); process.exit(1); } function shimEmitUncaughtException () { var origEmit = process.emit; process.emit = function (type) { if (type === 'uncaughtException') { var hasStack = (arguments[1] && arguments[1].stack); var hasListeners = (this.listeners(type).length > 0); if (hasStack && !hasListeners) { return printErrorAndExit(arguments[1]); } } return origEmit.apply(this, arguments); }; } var originalRetrieveFileHandlers = retrieveFileHandlers.slice(0); var originalRetrieveMapHandlers = retrieveMapHandlers.slice(0); exports.wrapCallSite = wrapCallSite; exports.getErrorSource = getErrorSource; exports.mapSourcePosition = mapSourcePosition; exports.retrieveSourceMap = retrieveSourceMap; exports.install = function(options) { options = options || {}; if (options.environment) { environment = options.environment; if (["node", "browser", "auto"].indexOf(environment) === -1) { throw new Error("environment " + environment + " was unknown. Available options are {auto, browser, node}") } } // Allow sources to be found by methods other than reading the files // directly from disk. if (options.retrieveFile) { if (options.overrideRetrieveFile) { retrieveFileHandlers.length = 0; } retrieveFileHandlers.unshift(options.retrieveFile); } // Allow source maps to be found by methods other than reading the files // directly from disk. if (options.retrieveSourceMap) { if (options.overrideRetrieveSourceMap) { retrieveMapHandlers.length = 0; } retrieveMapHandlers.unshift(options.retrieveSourceMap); } // Support runtime transpilers that include inline source maps if (options.hookRequire && !isInBrowser()) { var Module; try { Module = require('module'); } catch (err) { // NOP: Loading in catch block to convert webpack error to warning. } var $compile = Module.prototype._compile; if (!$compile.__sourceMapSupport) { Module.prototype._compile = function(content, filename) { fileContentsCache[filename] = content; sourceMapCache[filename] = undefined; return $compile.call(this, content, filename); }; Module.prototype._compile.__sourceMapSupport = true; } } // Configure options if (!emptyCacheBetweenOperations) { emptyCacheBetweenOperations = 'emptyCacheBetweenOperations' in options ? options.emptyCacheBetweenOperations : false; } // Install the error reformatter if (!errorFormatterInstalled) { errorFormatterInstalled = true; Error.prepareStackTrace = prepareStackTrace; } if (!uncaughtShimInstalled) { var installHandler = 'handleUncaughtExceptions' in options ? options.handleUncaughtExceptions : true; // Provide the option to not install the uncaught exception handler. This is // to support other uncaught exception handlers (in test frameworks, for // example). If this handler is not installed and there are no other uncaught // exception handlers, uncaught exceptions will be caught by node's built-in // exception handler and the process will still be terminated. However, the // generated JavaScript code will be shown above the stack trace instead of // the original source code. if (installHandler && hasGlobalProcessEventEmitter()) { uncaughtShimInstalled = true; shimEmitUncaughtException(); } } }; exports.resetRetrieveHandlers = function() { retrieveFileHandlers.length = 0; retrieveMapHandlers.length = 0; retrieveFileHandlers = originalRetrieveFileHandlers.slice(0); retrieveMapHandlers = originalRetrieveMapHandlers.slice(0); retrieveSourceMap = handlerExec(retrieveMapHandlers); retrieveFile = handlerExec(retrieveFileHandlers); } },{"buffer-from":"buffer-from","fs":false,"module":false,"path":false,"source-map":"source-map"}],"source-map":[function(require,module,exports){ /* * Copyright 2009-2011 Mozilla Foundation and contributors * Licensed under the New BSD license. See LICENSE.txt or: * http://opensource.org/licenses/BSD-3-Clause */ exports.SourceMapGenerator = require('./lib/source-map-generator').SourceMapGenerator; exports.SourceMapConsumer = require('./lib/source-map-consumer').SourceMapConsumer; exports.SourceNode = require('./lib/source-node').SourceNode; },{"./lib/source-map-consumer":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\source-map-consumer.js","./lib/source-map-generator":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\source-map-generator.js","./lib/source-node":"D:\\Catalin\\Munca\\privatesky\\node_modules\\source-map\\lib\\source-node.js"}]},{},["D:\\Catalin\\Munca\\privatesky\\builds\\tmp\\consoleTools_intermediar.js"]) //# sourceMappingURL=consoleTools.js.map
getArg
testRemoveImg.py
import unittest, os.path, os from PIL import ImageGrab from .removeImg import remove_img class TestRemove_img(unittest.TestCase): # Deveria excluir uma imagem
if __name__ == '__main__': unittest.main()
def test_remove_img_remove(self): img = ImageGrab.grab((0,0,500,500)) path = "tmp.png" img.save(path) img.close() pathExists = os.path.exists(path) self.assertTrue(pathExists) remove_img(path) pathExists = os.path.exists(path) self.assertFalse(pathExists) #deveria excluir uma imagem em um subdiretório def test_remove_img_subdir(self): dirname = 'img' os.mkdir(dirname) img = ImageGrab.grab((0,0,500,500)) path = 'img/img.png' img.save(path) exist = os.path.exists(path) self.assertTrue(exist) remove_img(path) exist = os.path.exists(path) self.assertFalse(exist) os.rmdir(dirname) #deveria retornar um erro ao excluir uma imagem def test_remove_img_except(self): path = 'img/img.png' path2 = 'img.ssd' with self.assertRaises(TypeError): remove_img(path) with self.assertRaises(TypeError): remove_img(path2)
serial_dma.rs
//! Example of transmitting data over serial interface using DMA. //! For this to work, the PA9 and PA10 pins must be connected. //! Target board: STM32F3DISCOVERY #![no_std] #![no_main] use panic_semihosting as _; use cortex_m::singleton; use cortex_m_rt::entry; use stm32f3xx_hal::{pac, prelude::*, serial::Serial}; #[entry] fn
() -> ! { let dp = pac::Peripherals::take().unwrap(); let mut flash = dp.FLASH.constrain(); let mut rcc = dp.RCC.constrain(); let clocks = rcc.cfgr.freeze(&mut flash.acr); let mut gpioa = dp.GPIOA.split(&mut rcc.ahb); let pins = ( gpioa.pa9.into_af7(&mut gpioa.moder, &mut gpioa.afrh), gpioa.pa10.into_af7(&mut gpioa.moder, &mut gpioa.afrh), ); let serial = Serial::usart1(dp.USART1, pins, 9600.bps(), clocks, &mut rcc.apb2); let (tx, rx) = serial.split(); let dma1 = dp.DMA1.split(&mut rcc.ahb); // the data we are going to send over serial let tx_buf = singleton!(: [u8; 9] = *b"hello DMA").unwrap(); // the buffer we are going to receive the transmitted data in let rx_buf = singleton!(: [u8; 9] = [0; 9]).unwrap(); // DMA channel selection depends on the peripheral: // - USART1: TX = 4, RX = 5 // - USART2: TX = 6, RX = 7 // - USART3: TX = 3, RX = 2 let (tx_channel, rx_channel) = (dma1.ch4, dma1.ch5); // start separate DMAs for sending and receiving the data let sending = tx.write_all(tx_buf, tx_channel); let receiving = rx.read_exact(rx_buf, rx_channel); // block until all data was transmitted and received let (tx_buf, tx_channel, tx) = sending.wait(); let (rx_buf, rx_channel, rx) = receiving.wait(); assert_eq!(tx_buf, rx_buf); // After a transfer is finished its parts can be re-used for another one. tx_buf.copy_from_slice(b"hi again!"); let sending = tx.write_all(tx_buf, tx_channel); let receiving = rx.read_exact(rx_buf, rx_channel); let (tx_buf, ..) = sending.wait(); let (rx_buf, ..) = receiving.wait(); assert_eq!(tx_buf, rx_buf); loop { continue; } }
main
slots_manager.py
# -*- coding: utf-8 -*- """ @author: %(Mikel Val Calvo)s @email: %([email protected]) @institution: %(Dpto. de Inteligencia Artificial, Universidad Nacional de Educación a Distancia (UNED)) @DOI: 10.5281/zenodo.3759306 """ #%% class SlotsManager: # Inicializa la lista de callbacks def __init__(self): self.callbacks = [] # Ejecuta los callbacks de la lista def trigger(self): for callback in self.callbacks: callback() print(callback) # [callback() for callback in self.callbacks] # Añade un slot a la lista de callbacks def ap
elf, slot): self.callbacks.append(slot) print(slot)
pend(s
s349_intersection_of_two_arrays.rs
use std::collections::HashMap; struct Solution; impl Solution { pub fn intersection(nums1: Vec<i32>, nums2: Vec<i32>) -> Vec<i32> { let mut mapper = HashMap::new(); for num in nums1 { mapper.insert(num, true); } let mut result: Vec<i32> = vec![]; for num in nums2 { if mapper.contains_key(&num)
} result } } struct Example { input: (Vec<i32>, Vec<i32>), output: Vec<i32>, } #[test] pub fn test() { let examples = vec![ Example { input: (vec![1, 2, 2, 1], vec![2, 2]), output: vec![2], }, Example { input: (vec![4, 9, 5], vec![9, 4, 9, 8, 4]), output: vec![4, 9], }, ]; for example in examples { let (nums1, nums2) = example.input; let mut output = Solution::intersection(nums1, nums2); output.sort(); assert_eq!(output, example.output); } }
{ mapper.remove(&num); result.push(num); }
template_tests.js
var divRendersTo = function (test, div, html) { Tracker.flush({_throwFirstError: true}); var actual = canonicalizeHtml(div.innerHTML); test.equal(actual, html); }; var nodesToArray = function (array) { // Starting in underscore 1.4, _.toArray does not work right on a node // list in IE8. This is a workaround to support IE8. return _.map(array, _.identity); }; // maybe use created callback on the template instead of this? var extendTemplateWithInit = function (template, initFunc) { var tmpl = new Template(template.viewName+'-extended', template.renderFunction); tmpl.constructView = function (/*args*/) { var view = Template.prototype.constructView.apply(this, arguments); initFunc(view); return view; }; return tmpl; }; // Make a "clone" of origTemplate (but not its helpers) var copyTemplate = function (origTemplate) { return new Template(origTemplate.viewName, origTemplate.renderFunction); }; Tinytest.add("spacebars-tests - template_tests - simple helper", function (test) { var baseTmpl = Template.spacebars_template_test_simple_helper; var tmpl1 = copyTemplate(baseTmpl); var R = ReactiveVar(1); tmpl1.helpers({ foo: function (x) { return x + R.get(); }, bar: function () { return 123; } }); var div = renderToDiv(tmpl1); test.equal(canonicalizeHtml(div.innerHTML), "124"); R.set(2); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "125"); // Test that `{{foo bar}}` throws if `foo` is missing or not a function. var tmpl2 = copyTemplate(baseTmpl); tmpl2.helpers({foo: 3}); test.throws(function () { renderToDiv(tmpl2); }, /Can't call non-function/); var tmpl3 = copyTemplate(baseTmpl); test.throws(function () { renderToDiv(tmpl3); }, /No such function/); var tmpl4 = copyTemplate(baseTmpl); tmpl4.helpers({foo: function () {}}); // doesn't throw div = renderToDiv(tmpl4); test.equal(canonicalizeHtml(div.innerHTML), ''); // now make "foo" is a function in the data context var tmpl5 = copyTemplate(baseTmpl); tmpl5.helpers({ bar: function () { return 123; } }); R = ReactiveVar(1); div = renderToDiv(tmpl5, { foo: function (x) { return x + R.get(); } }); test.equal(canonicalizeHtml(div.innerHTML), "124"); R.set(2); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "125"); test.throws(function () { renderToDiv(tmpl5, {foo: 3}); }, /Can't call non-function/); test.throws(function () { renderToDiv(tmpl5, {foo: null}); }, /No such function/); test.throws(function () { renderToDiv(tmpl5, {}); }, /No such function/); }); Tinytest.add("spacebars-tests - template_tests - member helper", function (test) { var baseTmpl = Template.spacebars_template_test_member_helper; // Test that returning function member of a data object can be used as a // a helper within a template, in this case, {{user.prefixName 'Mr.'}} var tmpl1 = copyTemplate(baseTmpl); var name = ReactiveVar('foo'); tmpl1.helpers({ user: function() { return { prefixName: function(prefix) { return prefix + ' ' + name.get(); }, }; }, }); var div = renderToDiv(tmpl1); test.equal(canonicalizeHtml(div.innerHTML), 'Mr. foo'); name.set('bar'); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), 'Mr. bar'); // Test that `{{user.prefixName 'Mr.'}}` returns nothing if `user` is not // not a function or is completely missing from helpers. var tmpl2 = copyTemplate(baseTmpl); tmpl2.helpers({user: 3}); div = renderToDiv(tmpl2); test.equal(canonicalizeHtml(div.innerHTML), ''); var tmpl3 = copyTemplate(baseTmpl); div = renderToDiv(tmpl3); test.equal(canonicalizeHtml(div.innerHTML), ''); // Test that `{{user.prefixName 'Mr.'}}` returns nothing if the `user` // returns null. Before fixing Meteor issue #5441, this test would throw. var tmpl4 = copyTemplate(baseTmpl); tmpl4.helpers({user: function () {}}); div = renderToDiv(tmpl4); test.equal(canonicalizeHtml(div.innerHTML), ''); // One more test, similar to the above, but where `user` is not null but // `user.prefixName` is. This test was also broken prior to the fix. var tmpl4 = copyTemplate(baseTmpl); tmpl4.helpers({user: function () { return {prefixName: null}; }}); div = renderToDiv(tmpl4); test.equal(canonicalizeHtml(div.innerHTML), ''); }); Tinytest.add("spacebars-tests - template_tests - dynamic template", function (test) { var tmpl = Template.spacebars_template_test_dynamic_template; var aaa = Template.spacebars_template_test_aaa; var bbb = Template.spacebars_template_test_bbb; var R = ReactiveVar("aaa"); tmpl.helpers({foo: function () { return R.get() === 'aaa' ? aaa : bbb; }}); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), "aaa"); R.set('bbb'); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "bbb"); }); Tinytest.add("spacebars-tests - template_tests - interpolate attribute", function (test) { var tmpl = Template.spacebars_template_test_interpolate_attribute; tmpl.helpers({ foo: function (x) { return x+1; }, bar: function () { return 123; } }); var div = renderToDiv(tmpl); test.equal($(div).find('div')[0].className, "aaa124zzz"); }); Tinytest.add("spacebars-tests - template_tests - dynamic attrs", function (test) { var tmpl = Template.spacebars_template_test_dynamic_attrs; var R2 = ReactiveVar({x: "X"}); var R3 = ReactiveVar('selected'); tmpl.helpers({ attrsObj: function () { return R2.get(); }, singleAttr: function () { return R3.get(); } }); var div = renderToDiv(tmpl); var span = $(div).find('span')[0]; test.equal(span.innerHTML, 'hi'); test.isTrue(span.hasAttribute('selected')); test.equal(span.getAttribute('x'), 'X'); R2.set({y: "Y", z: "Z"}); R3.set(''); Tracker.flush(); test.equal(canonicalizeHtml(span.innerHTML), 'hi'); test.isFalse(span.hasAttribute('selected')); test.isFalse(span.hasAttribute('x')); test.equal(span.getAttribute('y'), 'Y'); test.equal(span.getAttribute('z'), 'Z'); }); Tinytest.add("spacebars-tests - template_tests - triple", function (test) { var tmpl = Template.spacebars_template_test_triple; var R = ReactiveVar('<span class="hi">blah</span>'); tmpl.helpers({ html: function () { return R.get(); } }); var div = renderToDiv(tmpl); var elems = $(div).find("> *"); test.equal(elems.length, 1); test.equal(elems[0].nodeName, 'SPAN'); var span = elems[0]; test.equal(span.className, 'hi'); test.equal(span.innerHTML, 'blah'); R.set('asdf'); Tracker.flush(); elems = $(div).find("> *"); test.equal(elems.length, 0); test.equal(canonicalizeHtml(div.innerHTML), 'asdf'); R.set('<span class="hi">blah</span>'); Tracker.flush(); elems = $(div).find("> *"); test.equal(elems.length, 1); test.equal(elems[0].nodeName, 'SPAN'); span = elems[0]; test.equal(span.className, 'hi'); test.equal(canonicalizeHtml(span.innerHTML), 'blah'); var tmpl = Template.spacebars_template_test_triple2; tmpl.helpers({ html: function () {}, html2: function () { return null; } }); // no tmpl.html3 div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), 'xy'); }); Tinytest.add("spacebars-tests - template_tests - inclusion args", function (test) { var tmpl = Template.spacebars_template_test_inclusion_args; var R = ReactiveVar(Template.spacebars_template_test_aaa); tmpl.helpers({foo: function () { return R.get(); }}); var div = renderToDiv(tmpl); // `{{> foo bar}}`, with `foo` resolving to Template.aaa, // which consists of "aaa" test.equal(canonicalizeHtml(div.innerHTML), 'aaa'); R.set(Template.spacebars_template_test_bbb); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), 'bbb'); ////// Ok, now `foo` *is* Template.aaa tmpl.helpers({foo: Template.spacebars_template_test_aaa}); div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), 'aaa'); ////// Ok, now `foo` is a template that takes an argument; bar is a string. tmpl.helpers({ foo: Template.spacebars_template_test_bracketed_this, bar: 'david' }); div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), '[david]'); ////// Now `foo` is a template that takes an arg; bar is a function. tmpl.helpers({foo: Template.spacebars_template_test_span_this}); R = ReactiveVar('david'); tmpl.helpers({bar: function () { return R.get(); }}); div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), '<span>david</span>'); var span1 = div.querySelector('span'); R.set('avi'); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), '<span>avi</span>'); var span2 = div.querySelector('span'); test.isTrue(span1 === span2); }); Tinytest.add("spacebars-tests - template_tests - inclusion args 2", function (test) { // `{{> foo bar q=baz}}` var tmpl = Template.spacebars_template_test_inclusion_args2; tmpl.helpers({ foo: Template.spacebars_template_test_span_this, bar: function (options) { return options.hash.q; } }); var R = ReactiveVar('david!'); tmpl.helpers({ baz: function () { return R.get().slice(0,5); } }); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), '<span>david</span>'); var span1 = div.querySelector('span'); R.set('brillo'); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), '<span>brill</span>'); var span2 = div.querySelector('span'); test.isTrue(span1 === span2); }); Tinytest.add("spacebars-tests - template_tests - inclusion dotted args", function (test) { // `{{> foo bar.baz}}` var tmpl = Template.spacebars_template_test_inclusion_dotted_args; var initCount = 0; tmpl.helpers({ foo: extendTemplateWithInit( Template.spacebars_template_test_bracketed_this, function () { initCount++; }) }); var R = ReactiveVar('david'); tmpl.helpers({ bar: function () { // make sure `this` is bound correctly return { baz: this.symbol + R.get() }; } }); var div = renderToDiv(tmpl, {symbol:'%'}); test.equal(initCount, 1); test.equal(canonicalizeHtml(div.innerHTML), '[%david]'); R.set('avi'); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), '[%avi]'); // check that invalidating the argument to `foo` doesn't require // creating a new `foo`. test.equal(initCount, 1); }); Tinytest.add("spacebars-tests - template_tests - inclusion slashed args", function (test) { // `{{> foo bar/baz}}` var tmpl = Template.spacebars_template_test_inclusion_dotted_args; var initCount = 0; tmpl.helpers({foo: extendTemplateWithInit( Template.spacebars_template_test_bracketed_this, function () { initCount++; }) }); var R = ReactiveVar('david'); tmpl.helpers({bar: function () { // make sure `this` is bound correctly return { baz: this.symbol + R.get() }; }}); var div = renderToDiv(tmpl, {symbol:'%'}); test.equal(initCount, 1); test.equal(canonicalizeHtml(div.innerHTML), '[%david]'); }); Tinytest.add("spacebars-tests - template_tests - block helper", function (test) { // test the case where `foo` is a calculated template that changes // reactively. // `{{#foo}}bar{{else}}baz{{/foo}}` var tmpl = Template.spacebars_template_test_block_helper; var R = ReactiveVar(Template.spacebars_template_test_content); tmpl.helpers({foo: function () { return R.get(); }}); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), "bar"); R.set(Template.spacebars_template_test_elsecontent); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "baz"); }); Tinytest.add("spacebars-tests - template_tests - block helper function with one string arg", function (test) { // `{{#foo "bar"}}content{{/foo}}` var tmpl = Template.spacebars_template_test_block_helper_function_one_string_arg; tmpl.helpers({foo: function () { if (String(this) === "bar") return Template.spacebars_template_test_content; else return null; }}); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), "content"); }); Tinytest.add("spacebars-tests - template_tests - block helper function with one helper arg", function (test) { var tmpl = Template.spacebars_template_test_block_helper_function_one_helper_arg; var R = ReactiveVar("bar"); tmpl.helpers({ bar: function () { return R.get(); }, foo: function () { if (String(this) === "bar") return Template.spacebars_template_test_content; else return null; } }); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), "content"); R.set("baz"); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), ""); }); Tinytest.add("spacebars-tests - template_tests - block helper component with one helper arg", function (test) { var tmpl = Template.spacebars_template_test_block_helper_component_one_helper_arg; var R = ReactiveVar(true); tmpl.helpers({bar: function () { return R.get(); }}); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), "content"); R.set(false); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), ""); }); Tinytest.add("spacebars-tests - template_tests - block helper component with three helper args", function (test) { var tmpl = Template.spacebars_template_test_block_helper_component_three_helper_args; var R = ReactiveVar("bar"); tmpl.helpers({ bar_or_baz: function () { return R.get(); }, equals: function (x, y) { return x === y; } }); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), "content"); R.set("baz"); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), ""); }); Tinytest.add("spacebars-tests - template_tests - block helper with dotted arg", function (test) { var tmpl = Template.spacebars_template_test_block_helper_dotted_arg; var R1 = ReactiveVar(1); var R2 = ReactiveVar(10); var R3 = ReactiveVar(100); var initCount = 0; tmpl.helpers({ foo: extendTemplateWithInit( Template.spacebars_template_test_bracketed_this, function () { initCount++; }), bar: function () { return { r1: R1.get(), baz: function (r3) { return this.r1 + R2.get() + r3; } }; }, qux: function () { return R3.get(); } }); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), "[111]"); test.equal(initCount, 1); R1.set(2); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "[112]"); test.equal(initCount, 1); R2.set(20); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "[122]"); test.equal(initCount, 1); R3.set(200); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "[222]"); test.equal(initCount, 1); R2.set(30); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "[232]"); test.equal(initCount, 1); R1.set(3); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "[233]"); test.equal(initCount, 1); R3.set(300); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "[333]"); test.equal(initCount, 1); }); Tinytest.add("spacebars-tests - template_tests - nested content", function (test) { // Test that `{{> Template.contentBlock}}` in an `{{#if}}` works. // ``` // <template name="spacebars_template_test_iftemplate"> // {{#if condition}} // {{> Template.contentBlock}} // {{else}} // {{> Template.elseBlock}} // {{/if}} // </template> // ``` // ``` // {{#spacebars_template_test_iftemplate flag}} // hello // {{else}} // world // {{/spacebars_template_test_iftemplate}} // ``` var tmpl = Template.spacebars_template_test_nested_content; var R = ReactiveVar(true); tmpl.helpers({ flag: function () { return R.get(); } }); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), 'hello'); R.set(false); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), 'world'); R.set(true); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), 'hello'); // Also test that `{{> Template.contentBlock}}` in a custom block helper works. tmpl = Template.spacebars_template_test_nested_content2; R = ReactiveVar(true); tmpl.helpers({ x: function () { return R.get(); } }); div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), 'hello'); R.set(false); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), 'world'); R.set(true); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), 'hello'); }); Tinytest.add("spacebars-tests - template_tests - if", function (test) { var tmpl = Template.spacebars_template_test_if; var R = ReactiveVar(true); tmpl.helpers({ foo: function () { return R.get(); }, bar: 1, baz: 2 }); var div = renderToDiv(tmpl); var rendersTo = function (html) { divRendersTo(test, div, html); }; rendersTo("1"); R.set(false); rendersTo("2"); }); Tinytest.add("spacebars-tests - template_tests - if in with", function (test) { var tmpl = Template.spacebars_template_test_if_in_with; tmpl.helpers({foo: {bar: "bar"}}); var div = renderToDiv(tmpl); divRendersTo(test, div, "bar bar"); }); Tinytest.add("spacebars-tests - template_tests - each on cursor", function (test) { var tmpl = Template.spacebars_template_test_each; var coll = new Mongo.Collection(null); tmpl.helpers({ items: function () { return coll.find({}, {sort: {pos: 1}}); } }); var div = renderToDiv(tmpl); var rendersTo = function (html) { divRendersTo(test, div, html); }; rendersTo("else-clause"); coll.insert({text: "one", pos: 1}); rendersTo("one"); coll.insert({text: "two", pos: 2}); rendersTo("one two"); coll.update({text: "two"}, {$set: {text: "three"}}); rendersTo("one three"); coll.update({text: "three"}, {$set: {pos: 0}}); rendersTo("three one"); coll.remove({}); rendersTo("else-clause"); }); Tinytest.add("spacebars-tests - template_tests - each on array", function (test) { var tmpl = Template.spacebars_template_test_each; var R = new ReactiveVar([]); tmpl.helpers({ items: function () { return R.get(); }, text: function () { return this; } }); var div = renderToDiv(tmpl); var rendersTo = function (html) { divRendersTo(test, div, html); }; rendersTo("else-clause"); R.set([""]); rendersTo(""); R.set(["x", "", "toString"]); rendersTo("x toString"); R.set(["toString"]); rendersTo("toString"); R.set([]); rendersTo("else-clause"); R.set([0, 1, 2]); rendersTo("0 1 2"); R.set([]); rendersTo("else-clause"); }); Tinytest.add("spacebars-tests - template_tests - ..", function (test) { var tmpl = Template.spacebars_template_test_dots; Template.spacebars_template_test_dots_subtemplate.helpers({ getTitle: function (from) { return from.title; } }); tmpl.helpers({ foo: { title: "foo", bar: {title: "bar", items: [{title: "item"}]} } }); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), [ "A", "B", "C", "D", // {{> spacebars_template_test_dots_subtemplate}} "TITLE", "1item", "2item", "3bar", "4foo", "GETTITLE", "5item", "6bar", "7foo", // {{> spacebars_template_test_dots_subtemplate ..}} "TITLE", "1bar", "2bar", "3item", "4bar", "GETTITLE", "5bar", "6item", "7bar"].join(" ")); }); Tinytest.add("spacebars-tests - template_tests - select tags", function (test) { var tmpl = Template.spacebars_template_test_select_tag; // {label: (string)} var optgroups = new Mongo.Collection(null); // {optgroup: (id), value: (string), selected: (boolean), label: (string)} var options = new Mongo.Collection(null); tmpl.helpers({ optgroups: function () { return optgroups.find(); }, options: function () { return options.find({optgroup: this._id}); }, selectedAttr: function () { return this.selected ? {selected: true} : {}; } }); var div = renderToDiv(tmpl); var selectEl = $(div).find('select')[0]; // returns canonicalized contents of `div` in the form eg // ["<select>", "</select>"]. strip out selected attributes -- we // verify correctness by observing the `selected` property var divContent = function () { return canonicalizeHtml( div.innerHTML.replace(/selected="[^"]*"/g, '').replace(/selected/g, '')) .replace(/\>\s*\</g, '>\n<') .split('\n'); }; test.equal(divContent(), ["<select>", "</select>"]); var optgroup1 = optgroups.insert({label: "one"}); var optgroup2 = optgroups.insert({label: "two"}); test.equal(divContent(), [ '<select>', '<optgroup label="one">', '</optgroup>', '<optgroup label="two">', '</optgroup>', '</select>' ]); options.insert({optgroup: optgroup1, value: "value1", selected: false, label: "label1"}); options.insert({optgroup: optgroup1, value: "value2", selected: true, label: "label2"}); test.equal(divContent(), [ '<select>', '<optgroup label="one">', '<option value="value1">label1</option>', '<option value="value2">label2</option>', '</optgroup>', '<optgroup label="two">', '</optgroup>', '</select>' ]); test.equal(selectEl.value, "value2"); test.equal($(selectEl).find('option')[0].selected, false); test.equal($(selectEl).find('option')[1].selected, true); // swap selection options.update({value: "value1"}, {$set: {selected: true}}); options.update({value: "value2"}, {$set: {selected: false}}); Tracker.flush(); test.equal(divContent(), [ '<select>', '<optgroup label="one">', '<option value="value1">label1</option>', '<option value="value2">label2</option>', '</optgroup>', '<optgroup label="two">', '</optgroup>', '</select>' ]); test.equal(selectEl.value, "value1"); test.equal($(selectEl).find('option')[0].selected, true); test.equal($(selectEl).find('option')[1].selected, false); // change value and label options.update({value: "value1"}, {$set: {value: "value1.0"}}); options.update({value: "value2"}, {$set: {label: "label2.0"}}); Tracker.flush(); test.equal(divContent(), [ '<select>', '<optgroup label="one">', '<option value="value1.0">label1</option>', '<option value="value2">label2.0</option>', '</optgroup>', '<optgroup label="two">', '</optgroup>', '</select>' ]); test.equal(selectEl.value, "value1.0"); test.equal($(selectEl).find('option')[0].selected, true); test.equal($(selectEl).find('option')[1].selected, false); // unselect and then select both options. normally, the second is // selected (since it got selected later). then switch to <select // multiple="">. both should be selected. options.update({}, {$set: {selected: false}}, {multi: true}); Tracker.flush(); options.update({}, {$set: {selected: true}}, {multi: true}); Tracker.flush(); test.equal($(selectEl).find('option')[0].selected, false); test.equal($(selectEl).find('option')[1].selected, true); selectEl.multiple = true; // allow multiple selection options.update({}, {$set: {selected: false}}, {multi: true}); Tracker.flush(); options.update({}, {$set: {selected: true}}, {multi: true}); Tracker.flush(); test.equal($(selectEl).find('option')[0].selected, true); test.equal($(selectEl).find('option')[1].selected, true); }); Tinytest.add('spacebars-tests - template_tests - {{#with}} falsy; issue #770', function (test) { Template.test_template_issue770.helpers({ value1: function () { return "abc"; }, value2: function () { return false; } }); var div = renderToDiv(Template.test_template_issue770); test.equal(canonicalizeHtml(div.innerHTML), "abc xxx abc"); }); Tinytest.add("spacebars-tests - template_tests - tricky attrs", function (test) { var tmpl = Template.spacebars_template_test_tricky_attrs; var R = ReactiveVar('foo'); tmpl.helpers({ theType: function () { return 'text'; }, theClass: function () { return R.get(); } }); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML).slice(0, 30), '<input type="text"><input class="foo" type="checkbox">'.slice(0, 30)); R.set('bar'); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), '<input type="text"><input class="bar" type="checkbox">'); }); Tinytest.add('spacebars-tests - template_tests - no data context', function (test) { var tmpl = Template.spacebars_template_test_no_data; // failure is if an exception is thrown here var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), 'asdf'); }); Tinytest.add('spacebars-tests - template_tests - textarea', function (test) { var tmpl = Template.spacebars_template_test_textarea; var R = ReactiveVar('hello'); tmpl.helpers({foo: function () { return R.get(); }}); var div = renderToDiv(tmpl); var textarea = div.querySelector('textarea'); test.equal(textarea.value, 'hello'); R.set('world'); Tracker.flush(); test.equal(textarea.value, 'world'); }); Tinytest.add('spacebars-tests - template_tests - textarea 2', function (test) { var tmpl = Template.spacebars_template_test_textarea2; var R = ReactiveVar(true); tmpl.helpers({foo: function () { return R.get(); }}); var div = renderToDiv(tmpl); var textarea = div.querySelector('textarea'); test.equal(textarea.value, '</not a tag>'); R.set(false); Tracker.flush(); test.equal(textarea.value, '<also not a tag>'); R.set(true); Tracker.flush(); test.equal(textarea.value, '</not a tag>'); }); Tinytest.add('spacebars-tests - template_tests - textarea 3', function (test) { var tmpl = Template.spacebars_template_test_textarea3; var R = ReactiveVar('hello'); tmpl.helpers({foo: function () { return R.get(); }}); var div = renderToDiv(tmpl); var textarea = div.querySelector('textarea'); test.equal(textarea.id, 'myTextarea'); test.equal(textarea.value, 'hello'); R.set('world'); Tracker.flush(); test.equal(textarea.value, 'world'); }); Tinytest.add('spacebars-tests - template_tests - textarea each', function (test) { var tmpl = Template.spacebars_template_test_textarea_each; var R = ReactiveVar(['APPLE', 'BANANA']); tmpl.helpers({foo: function () { return R.get(); }}); var div = renderToDiv(tmpl); var textarea = div.querySelector('textarea'); test.equal(textarea.value, '<not a tag APPLE <not a tag BANANA '); R.set([]); Tracker.flush(); test.equal(textarea.value, '<>'); R.set(['CUCUMBER']); Tracker.flush(); test.equal(textarea.value, '<not a tag CUCUMBER '); }); // Ensure that one can call `Meteor.defer` within a rendered callback // triggered by a document insertion that happend in a method stub. // // Why do we have this test? Because you generally can't call // `Meteor.defer` inside a method stub (see // packages/meteor/timers.js). This test verifies that rendered // callbacks don't fire synchronously as part of a method stub. testAsyncMulti('spacebars-tests - template_tests - defer in rendered callbacks', [function (test, expect) { var tmpl = Template.spacebars_template_test_defer_in_rendered; var coll = new Mongo.Collection(null); Meteor.methods({ spacebarsTestInsertEmptyObject: function () { // cause a new instance of `subtmpl` to be placed in the // DOM. verify that it's not fired directly within a method // stub, in which `Meteor.defer` is not allowed. coll.insert({}); } }); tmpl.helpers({ items: function () { return coll.find(); } }); var subtmpl = Template.spacebars_template_test_defer_in_rendered_subtemplate; subtmpl.rendered = expect(function () { // will throw if called in a method stub Meteor.defer(function () {}); }); var div = renderToDiv(tmpl); // not defined on the server, but it's fine since the stub does // the relevant work Meteor._suppress_log(1); Meteor.call("spacebarsTestInsertEmptyObject"); }]); testAsyncMulti('spacebars-tests - template_tests - rendered template is DOM in rendered callbacks', [ function (test, expect) { var tmpl = Template.spacebars_template_test_aaa; tmpl.rendered = expect(function () { test.equal(canonicalizeHtml(div.innerHTML), "aaa"); }); var div = renderToDiv(tmpl); Tracker.flush(); } ]); // Test that in: // // ``` // {{#with someData}} // {{foo}} {{bar}} // {{/with}} // ``` // // ... we run `someData` once even if `foo` re-renders. Tinytest.add('spacebars-tests - template_tests - with someData', function (test) { var tmpl = Template.spacebars_template_test_with_someData; var foo = ReactiveVar('AAA'); var someDataRuns = 0; tmpl.helpers({ someData: function () { someDataRuns++; return {}; }, foo: function () { return foo.get(); }, bar: function () { return 'YO'; } }); var div = renderToDiv(tmpl); test.equal(someDataRuns, 1); test.equal(canonicalizeHtml(div.innerHTML), 'AAA YO'); foo.set('BBB'); Tracker.flush(); test.equal(someDataRuns, 1); test.equal(canonicalizeHtml(div.innerHTML), 'BBB YO'); foo.set('CCC'); Tracker.flush(); test.equal(someDataRuns, 1); test.equal(canonicalizeHtml(div.innerHTML), 'CCC YO'); }); Tinytest.add('spacebars-tests - template_tests - #each stops when rendered element is removed', function (test) { var tmpl = Template.spacebars_template_test_each_stops; var coll = new Mongo.Collection(null); coll.insert({}); tmpl.helpers({items: function () { return coll.find(); }}); var div = renderToDiv(tmpl); divRendersTo(test, div, 'x'); // trigger #each component destroyed $(div).remove(); // insert another document. cursor should no longer be observed so // should have no effect. coll.insert({}); divRendersTo(test, div, 'x'); }); Tinytest.add('spacebars-tests - template_tests - block helpers in attribute', function (test) { var tmpl = Template.spacebars_template_test_block_helpers_in_attribute; var coll = new Mongo.Collection(null); tmpl.helpers({ classes: function () { return coll.find({}, {sort: {name: 1}}); }, startsLowerCase: function (name) { return /^[a-z]/.test(name); } }); coll.insert({name: 'David'}); coll.insert({name: 'noodle'}); coll.insert({name: 'donut'}); coll.insert({name: 'frankfurter'}); coll.insert({name: 'Steve'}); var containerDiv = renderToDiv(tmpl); var div = containerDiv.querySelector('div'); var shouldBe = function (className) { Tracker.flush(); test.equal(div.innerHTML, "Smurf"); test.equal(div.className, className); var result = canonicalizeHtml(containerDiv.innerHTML); if (result === '<div>Smurf</div>') result = '<div class="">Smurf</div>'; // e.g. IE 9 and 10 test.equal(result, '<div class="' + className + '">Smurf</div>'); }; shouldBe('donut frankfurter noodle'); coll.remove({name: 'frankfurter'}); // (it was kind of a mouthful) shouldBe('donut noodle'); coll.remove({name: 'donut'}); shouldBe('noodle'); coll.remove({name: 'noodle'}); shouldBe(''); // 'David' and 'Steve' appear in the #each but fail the #if coll.remove({}); shouldBe('none'); // now the `{{else}}` case kicks in coll.insert({name: 'bubblegum'}); shouldBe('bubblegum'); }); Tinytest.add('spacebars-tests - template_tests - block helpers in attribute 2', function (test) { var tmpl = Template.spacebars_template_test_block_helpers_in_attribute_2; var R = ReactiveVar(true); tmpl.helpers({foo: function () { return R.get(); }}); var div = renderToDiv(tmpl); var input = div.querySelector('input'); test.equal(input.value, '"'); R.set(false); Tracker.flush(); test.equal(input.value, '&<></x>'); }); // Test that if the argument to #each is a constant, it doesn't establish a // dependency on the data context, so when the context changes, items of // the #each are not "changed" and helpers do not rerun. Tinytest.add('spacebars-tests - template_tests - constant #each argument', function (test) { var tmpl = Template.spacebars_template_test_constant_each_argument; var justReturnRuns = 0; // how many times `justReturn` is called var R = ReactiveVar(1); tmpl.helpers({ someData: function () { return R.get(); }, anArray: ['foo', 'bar'], justReturn: function (x) { justReturnRuns++; return String(x); } }); var div = renderToDiv(tmpl); test.equal(justReturnRuns, 2); test.equal(canonicalizeHtml(div.innerHTML).replace(/\s+/g, ' '), 'foo bar 1'); R.set(2); Tracker.flush(); test.equal(justReturnRuns, 2); // still 2, no new runs! test.equal(canonicalizeHtml(div.innerHTML).replace(/\s+/g, ' '), 'foo bar 2'); }); Tinytest.addAsync('spacebars-tests - template_tests - #markdown - basic', function (test, onComplete) { var tmpl = Template.spacebars_template_test_markdown_basic; tmpl.helpers({ obj: {snippet: "<i>hi</i>"}, hi: function () { return this.snippet; } }); var div = renderToDiv(tmpl); Meteor.call("getAsset", "markdown_basic.html", function (err, html) { test.isFalse(err); test.equal(canonicalizeHtml(div.innerHTML), canonicalizeHtml(html)); onComplete(); }); }); testAsyncMulti('spacebars-tests - template_tests - #markdown - if', [ function (test, expect) { var self = this; Meteor.call("getAsset", "markdown_if1.html", expect(function (err, html) { test.isFalse(err); self.html1 = html; })); Meteor.call("getAsset", "markdown_if2.html", expect(function (err, html) { test.isFalse(err); self.html2 = html; })); }, function (test, expect) { var self = this; var tmpl = Template.spacebars_template_test_markdown_if; var R = new ReactiveVar(false); tmpl.helpers({cond: function () { return R.get(); }}); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), canonicalizeHtml(self.html1)); R.set(true); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), canonicalizeHtml(self.html2)); } ]); testAsyncMulti('spacebars-tests - template_tests - #markdown - each', [ function (test, expect) { var self = this; Meteor.call("getAsset", "markdown_each1.html", expect(function (err, html) { test.isFalse(err); self.html1 = html; })); Meteor.call("getAsset", "markdown_each2.html", expect(function (err, html) { test.isFalse(err); self.html2 = html; })); }, function (test, expect) { var self = this; var tmpl = Template.spacebars_template_test_markdown_each; var R = new ReactiveVar([]); tmpl.helpers({seq: function () { return R.get(); }}); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), canonicalizeHtml(self.html1)); R.set(["item"]); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), canonicalizeHtml(self.html2)); } ]); Tinytest.add('spacebars-tests - template_tests - #markdown - inclusion', function (test) { var tmpl = Template.spacebars_template_test_markdown_inclusion; var subtmpl = Template.spacebars_template_test_markdown_inclusion_subtmpl; subtmpl.helpers({foo: "bar"}); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), "<p><span>Foo is bar.</span></p>"); }); Tinytest.add('spacebars-tests - template_tests - #markdown - block helpers', function (test) { var tmpl = Template.spacebars_template_test_markdown_block_helpers; var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), "<p>Hi there!</p>"); }); // Test that when a simple helper re-runs due to a dependency changing // but the return value is the same, the DOM text node is not // re-rendered. Tinytest.add('spacebars-tests - template_tests - simple helpers are isolated', function (test) { var runs = [{ helper: function () { return "foo"; }, nodeValue: "foo" }, { helper: function () { return new Spacebars.SafeString("bar"); }, nodeValue: "bar" }]; _.each(runs, function (run) { var tmpl = Template.spacebars_template_test_simple_helpers_are_isolated; var dep = new Tracker.Dependency; tmpl.helpers({foo: function () { dep.depend(); return run.helper(); }}); var div = renderToDiv(tmpl); var fooTextNode = _.find(div.childNodes, function (node) { return node.nodeValue === run.nodeValue; }); test.isTrue(fooTextNode); dep.changed(); Tracker.flush(); var newFooTextNode = _.find(div.childNodes, function (node) { return node.nodeValue === run.nodeValue; }); test.equal(fooTextNode, newFooTextNode); }); }); // Test that when a helper in an element attribute re-runs due to a // dependency changing but the return value is the same, the attribute // value is not set. Tinytest.add('spacebars-tests - template_tests - attribute helpers are isolated', function (test) { var tmpl = Template.spacebars_template_test_attr_helpers_are_isolated; var dep = new Tracker.Dependency; tmpl.helpers({foo: function () { dep.depend(); return "foo"; }}); var div = renderToDiv(tmpl); var pElement = div.querySelector('p'); test.equal(pElement.getAttribute('attr'), 'foo'); // set the attribute to something else, afterwards check that it // hasn't been updated back to the correct value. pElement.setAttribute('attr', 'not-foo'); dep.changed(); Tracker.flush(); test.equal(pElement.getAttribute('attr'), 'not-foo'); }); // A helper can return an object with a set of element attributes via // `<p {{attrs}}>`. When it re-runs due to a dependency changing the // value for a given attribute might stay the same. Test that the // attribute is not set on the DOM element. Tinytest.add('spacebars-tests - template_tests - attribute object helpers are isolated', function (test) { var tmpl = Template.spacebars_template_test_attr_object_helpers_are_isolated; var dep = new Tracker.Dependency; tmpl.helpers({attrs: function () { dep.depend(); return {foo: "bar"}; }}); var div = renderToDiv(tmpl); var pElement = div.querySelector('p'); test.equal(pElement.getAttribute('foo'), 'bar'); // set the attribute to something else, afterwards check that it // hasn't been updated back to the correct value. pElement.setAttribute('foo', 'not-bar'); dep.changed(); Tracker.flush(); test.equal(pElement.getAttribute('foo'), 'not-bar'); }); // Test that when a helper in an inclusion directive (`{{> foo }}`) // re-runs due to a dependency changing but the return value is the // same, the template is not re-rendered. // // Also, verify that an error is thrown if the return value from such // a helper is not a component. Tinytest.add('spacebars-tests - template_tests - inclusion helpers are isolated', function (test) { var tmpl = Template.spacebars_template_test_inclusion_helpers_are_isolated; var dep = new Tracker.Dependency; var subtmpl = Template.spacebars_template_test_inclusion_helpers_are_isolated_subtemplate; // make a copy so we can set "rendered" without mutating the original var subtmplCopy = copyTemplate(subtmpl); var R = new ReactiveVar(subtmplCopy); tmpl.helpers({foo: function () { dep.depend(); return R.get(); }}); var div = renderToDiv(tmpl); subtmplCopy.rendered = function () { test.fail("shouldn't re-render when same value returned from helper"); }; subtmplCopy.onRendered(function () { test.fail("shouldn't re-render when same value returned from helper"); }); dep.changed(); Tracker.flush({_throwFirstError: true}); // `subtmplCopy.rendered` not called R.set(null); Tracker.flush({_throwFirstError: true}); // no error thrown R.set("neither a component nor null"); test.throws(function () { Tracker.flush({_throwFirstError: true}); }, /Expected template or null/); }); Tinytest.add('spacebars-tests - template_tests - nully attributes', function (test) { var tmpls = { 0: Template.spacebars_template_test_nully_attributes0, 1: Template.spacebars_template_test_nully_attributes1, 2: Template.spacebars_template_test_nully_attributes2, 3: Template.spacebars_template_test_nully_attributes3 }; var run = function (whichTemplate, data, expectTrue) { var div = renderToDiv(tmpls[whichTemplate], data); var input = div.querySelector('input'); var descr = JSON.stringify([whichTemplate, data, expectTrue]); if (expectTrue) { test.isTrue(input.checked, descr); test.equal(typeof input.getAttribute('stuff'), 'string', descr); } else { test.isFalse(input.checked); test.equal(JSON.stringify(input.getAttribute('stuff')), 'null', descr); } var html = Blaze.toHTML(Blaze.With(data, function () { return tmpls[whichTemplate]; })); test.equal(/ checked="[^"]*"/.test(html), !! expectTrue); test.equal(/ stuff="[^"]*"/.test(html), !! expectTrue); }; run(0, {}, true); var truthies = [true, '']; var falsies = [false, null, undefined]; _.each(truthies, function (x) { run(1, {foo: x}, true); }); _.each(falsies, function (x) { run(1, {foo: x}, false); }); _.each(truthies, function (x) { _.each(truthies, function (y) { run(2, {foo: x, bar: y}, true); }); _.each(falsies, function (y) { run(2, {foo: x, bar: y}, true); }); }); _.each(falsies, function (x) { _.each(truthies, function (y) { run(2, {foo: x, bar: y}, true); }); _.each(falsies, function (y) { run(2, {foo: x, bar: y}, false); }); }); run(3, {foo: true}, false); run(3, {foo: false}, false); }); Tinytest.add("spacebars-tests - template_tests - double", function (test) { var tmpl = Template.spacebars_template_test_double; var run = function (foo, expectedResult) { tmpl.helpers({foo: foo}); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), expectedResult); }; run('asdf', 'asdf'); run(1.23, '1.23'); run(0, '0'); run(true, 'true'); run(false, ''); run(null, ''); run(undefined, ''); }); Tinytest.add("spacebars-tests - template_tests - inclusion lookup order", function (test) { // test that {{> foo}} looks for a helper named 'foo', then a // template named 'foo', then a 'foo' field in the data context. var tmpl = Template.spacebars_template_test_inclusion_lookup; var tmplData = function () { return { // shouldn't have an effect since we define a helper with the // same name. spacebars_template_test_inclusion_lookup_subtmpl: Template. spacebars_template_test_inclusion_lookup_subtmpl3, dataContextSubtmpl: Template. spacebars_template_test_inclusion_lookup_subtmpl3}; }; tmpl.helpers({ spacebars_template_test_inclusion_lookup_subtmpl: Template.spacebars_template_test_inclusion_lookup_subtmpl2 }); test.equal(canonicalizeHtml(renderToDiv(tmpl, tmplData).innerHTML), ["This is generated by a helper with the same name.", "This is a template passed in the data context."].join(' ')); }); Tinytest.add("spacebars-tests - template_tests - content context", function (test) { var tmpl = Template.spacebars_template_test_content_context; var R = ReactiveVar(true); tmpl.helpers({foo: { firstLetter: 'F', secondLetter: 'O', bar: { cond: function () { return R.get(); }, firstLetter: 'B', secondLetter: 'A' } }}); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), 'BO'); R.set(false); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), 'FA'); }); _.each(['textarea', 'text', 'password', 'submit', 'button', 'reset', 'select', 'hidden'], function (type) { Tinytest.add("spacebars-tests - template_tests - controls - " + type, function(test) { var R = ReactiveVar({x:"test"}); var R2 = ReactiveVar(""); var tmpl; if (type === 'select') { tmpl = Template.spacebars_test_control_select; tmpl.helpers({ options: ['This is a test', 'This is a fridge', 'This is a frog', 'This is a new frog', 'foobar', 'This is a photograph', 'This is a monkey', 'This is a donkey'], selected: function () { R2.get(); // Re-render when R2 is changed, even though it // doesn't affect HTML. return ('This is a ' + R.get().x) === this.toString(); } }); } else if (type === 'textarea') { tmpl = Template.spacebars_test_control_textarea; tmpl.helpers({ value: function () { R2.get(); // Re-render when R2 is changed, even though it // doesn't affect HTML. return 'This is a ' + R.get().x; } }); } else { tmpl = Template.spacebars_test_control_input; tmpl.helpers({ value: function () { R2.get(); // Re-render when R2 is changed, even though it // doesn't affect HTML. return 'This is a ' + R.get().x; }, type: type }); } var div = renderToDiv(tmpl); document.body.appendChild(div); var canFocus = (type !== 'hidden'); // find first element child, ignoring any marker nodes var input = div.firstChild; while (input.nodeType !== 1) input = input.nextSibling; if (type === 'textarea' || type === 'select') { test.equal(input.nodeName, type.toUpperCase()); } else { test.equal(input.nodeName, 'INPUT'); test.equal(input.type, type); } test.equal(DomUtils.getElementValue(input), "This is a test"); // value updates reactively R.set({x:"fridge"}); Tracker.flush(); test.equal(DomUtils.getElementValue(input), "This is a fridge"); if (canFocus) { // ...if focused, it still updates but focus isn't lost. focusElement(input); DomUtils.setElementValue(input, "something else"); R.set({x:"frog"}); Tracker.flush(); test.equal(DomUtils.getElementValue(input), "This is a frog"); test.equal(document.activeElement, input); } // Setting a value (similar to user typing) should prevent value from being // reverted if the div is re-rendered but the rendered value (ie, R) does // not change. DomUtils.setElementValue(input, "foobar"); R2.set("change"); Tracker.flush(); test.equal(DomUtils.getElementValue(input), "foobar"); // ... but if the actual rendered value changes, that should take effect. R.set({x:"photograph"}); Tracker.flush(); test.equal(DomUtils.getElementValue(input), "This is a photograph"); document.body.removeChild(div); }); }); Tinytest.add("spacebars-tests - template_tests - radio", function(test) { var R = ReactiveVar(""); var R2 = ReactiveVar(""); var change_buf = []; var tmpl = Template.spacebars_test_control_radio; tmpl.helpers({ bands: ["AM", "FM", "XM"], isChecked: function () { return R.get() === this.toString(); }, band: function () { return R.get(); } }); tmpl.events({ 'change input': function (event) { var btn = event.target; var band = btn.value; change_buf.push(band); R.set(band); } }); var div = renderToDiv(tmpl); document.body.appendChild(div); // get the three buttons; they should not change identities! var btns = nodesToArray(div.getElementsByTagName("INPUT")); var text = function () { var text = div.innerText || div.textContent; return text.replace(/[ \n\r]+/g, " ").replace(/^\s+|\s+$/g, ""); }; test.equal(_.pluck(btns, 'checked'), [false, false, false]); test.equal(text(), "Band:"); clickIt(btns[0]); test.equal(change_buf, ['AM']); change_buf.length = 0; Tracker.flush(); test.equal(_.pluck(btns, 'checked'), [true, false, false]); test.equal(text(), "Band: AM"); R2.set("change"); Tracker.flush(); test.length(change_buf, 0); test.equal(_.pluck(btns, 'checked'), [true, false, false]); test.equal(text(), "Band: AM"); clickIt(btns[1]); test.equal(change_buf, ['FM']); change_buf.length = 0; Tracker.flush(); test.equal(_.pluck(btns, 'checked'), [false, true, false]); test.equal(text(), "Band: FM"); clickIt(btns[2]); test.equal(change_buf, ['XM']); change_buf.length = 0; Tracker.flush(); test.equal(_.pluck(btns, 'checked'), [false, false, true]); test.equal(text(), "Band: XM"); clickIt(btns[1]); test.equal(change_buf, ['FM']); change_buf.length = 0; Tracker.flush(); test.equal(_.pluck(btns, 'checked'), [false, true, false]); test.equal(text(), "Band: FM"); document.body.removeChild(div); }); Tinytest.add("spacebars-tests - template_tests - checkbox", function(test) { var tmpl = Template.spacebars_test_control_checkbox; var labels = ["Foo", "Bar", "Baz"]; var Rs = {}; _.each(labels, function (label) { Rs[label] = ReactiveVar(false); }); tmpl.helpers({ labels: labels, isChecked: function () { return Rs[this.toString()].get(); } }); var div = renderToDiv(tmpl); document.body.appendChild(div); var boxes = nodesToArray(div.getElementsByTagName("INPUT")); test.equal(_.pluck(boxes, 'checked'), [false, false, false]); // Re-render with first one checked. Rs.Foo.set(true); Tracker.flush(); test.equal(_.pluck(boxes, 'checked'), [true, false, false]); // Re-render with first one unchecked again. Rs.Foo.set(false); Tracker.flush(); test.equal(_.pluck(boxes, 'checked'), [false, false, false]); // User clicks the second one. clickElement(boxes[1]); test.equal(_.pluck(boxes, 'checked'), [false, true, false]); Tracker.flush(); test.equal(_.pluck(boxes, 'checked'), [false, true, false]); // Re-render with third one checked. Second one should stay checked because // it's a user update! Rs.Baz.set(true); Tracker.flush(); test.equal(_.pluck(boxes, 'checked'), [false, true, true]); // User turns second and third off. clickElement(boxes[1]); clickElement(boxes[2]); test.equal(_.pluck(boxes, 'checked'), [false, false, false]); Tracker.flush(); test.equal(_.pluck(boxes, 'checked'), [false, false, false]); // Re-render with first one checked. Third should stay off because it's a user // update! Rs.Foo.set(true); Tracker.flush(); test.equal(_.pluck(boxes, 'checked'), [true, false, false]); // Re-render with first one unchecked. Third should still stay off. Rs.Foo.set(false); Tracker.flush(); test.equal(_.pluck(boxes, 'checked'), [false, false, false]); document.body.removeChild(div); }); Tinytest.add("spacebars-tests - template_tests - attributes", function(test) { var tmpl = Template.spacebars_test_attributes; var isReadonly = ReactiveVar(false); var isDisabled = ReactiveVar(false); var isChecked = ReactiveVar(false); var nameValue = ReactiveVar('bar'); var attrs = ReactiveVar({}); tmpl.helpers({ isReadonly: function () { return isReadonly.get(); }, isChecked: function () { return isChecked.get(); }, attrs: function () { return attrs.get(); }, isDisabled: function () { return isDisabled.get(); }, nameValue: function () { return nameValue.get(); } }); var div = renderToDiv(tmpl); document.body.appendChild(div); var buttons = nodesToArray(div.getElementsByTagName("INPUT")); test.equal(_.pluck(buttons, 'readOnly'), [false, false, true]); isReadonly.set(true); Tracker.flush(); test.equal(_.pluck(buttons, 'readOnly'), [true, false, true]); isReadonly.set(false); Tracker.flush(); test.equal(_.pluck(buttons, 'readOnly'), [false, false, true]); isReadonly.set('readonly'); Tracker.flush(); test.equal(_.pluck(buttons, 'readOnly'), [true, false, true]); isReadonly.set(null); Tracker.flush(); test.equal(_.pluck(buttons, 'readOnly'), [false, false, true]); nameValue.set('foo'); Tracker.flush(); test.equal(_.pluck(buttons, 'readOnly'), [false, false, true]); test.equal(_.pluck(buttons, 'disabled'), [false, false, false]); isDisabled.set(true); Tracker.flush(); test.equal(_.pluck(buttons, 'disabled'), [false, true, false]); isDisabled.set(false); Tracker.flush(); test.equal(_.pluck(buttons, 'disabled'), [false, false, false]); isDisabled.set('disabled'); Tracker.flush(); test.equal(_.pluck(buttons, 'disabled'), [false, true, false]); isDisabled.set(null); Tracker.flush(); test.equal(_.pluck(buttons, 'disabled'), [false, false, false]); test.equal(_.pluck(buttons, 'checked'), [false, false, false]); isChecked.set(true); Tracker.flush(); test.equal(_.pluck(buttons, 'checked'), [true, false, false]); isChecked.set(false); Tracker.flush(); test.equal(_.pluck(buttons, 'checked'), [false, false, false]); isChecked.set('checked'); Tracker.flush(); test.equal(_.pluck(buttons, 'checked'), [true, false, false]); isChecked.set(null); Tracker.flush(); test.equal(_.pluck(buttons, 'checked'), [false, false, false]); attrs.set({disabled: true, checked: true, readonly: true}); Tracker.flush(); test.equal(_.pluck(buttons, 'readOnly'), [false, true, true]); test.equal(_.pluck(buttons, 'disabled'), [false, true, false]); test.equal(_.pluck(buttons, 'checked'), [false, true, false]); attrs.set({disabled: false, checked: false, readonly: false}); Tracker.flush(); test.equal(_.pluck(buttons, 'readOnly'), [false, false, true]); test.equal(_.pluck(buttons, 'disabled'), [false, false, false]); test.equal(_.pluck(buttons, 'checked'), [false, false, false]); attrs.set({disabled: 'disabled', checked: 'checked', readonly: 'readonly'}); Tracker.flush(); test.equal(_.pluck(buttons, 'readOnly'), [false, true, true]); test.equal(_.pluck(buttons, 'disabled'), [false, true, false]); test.equal(_.pluck(buttons, 'checked'), [false, true, false]); attrs.set({disabled: null, checked: null, readonly: null}); Tracker.flush(); test.equal(_.pluck(buttons, 'readOnly'), [false, false, true]); test.equal(_.pluck(buttons, 'disabled'), [false, false, false]); test.equal(_.pluck(buttons, 'checked'), [false, false, false]); document.body.removeChild(div); }); Tinytest.add('spacebars-tests - template_tests - unfound template', function (test) { test.throws(function () { renderToDiv(Template.spacebars_test_nonexistent_template); }, /No such template/); }); Tinytest.add('spacebars-tests - template_tests - helper passed to #if called exactly once when invalidated', function (test) { var tmpl = Template.spacebars_test_if_helper; var foo; var count = 0; var d = new Tracker.Dependency; tmpl.helpers({foo: function () { d.depend(); count++; return foo; }}); foo = false; var div = renderToDiv(tmpl); divRendersTo(test, div, "false"); test.equal(count, 1); foo = true; d.changed(); divRendersTo(test, div, "true"); test.equal(count, 2); }); Tinytest.add('spacebars-tests - template_tests - custom block helper functions called exactly once when invalidated', function (test) { var tmpl = Template.spacebars_test_block_helper_function; var foo; var count = 0; var d = new Tracker.Dependency; tmpl.helpers({foo: function () { d.depend(); count++; return Template.spacebars_template_test_aaa; }}); foo = false; renderToDiv(tmpl); Tracker.flush(); test.equal(count, 1); foo = true; d.changed(); Tracker.flush(); test.equal(count, 2); }); var runOneTwoTest = function (test, subTemplateName, optionsData) { _.each([Template.spacebars_test_helpers_stop_onetwo, Template.spacebars_test_helpers_stop_onetwo_attribute], function (tmpl) { var sub1 = Template[subTemplateName + '1']; var sub2 = Template[subTemplateName + '2']; tmpl.helpers({ one: sub1, two: sub2 }); var buf = ''; var showOne = ReactiveVar(true); var dummy = ReactiveVar(0); tmpl.helpers({showOne: function () { return showOne.get(); }}); sub1.helpers({options: function () { var x = dummy.get(); buf += '1'; if (optionsData) return optionsData[x]; else return ['something']; }}); sub2.helpers({options: function () { var x = dummy.get(); buf += '2'; if (optionsData) return optionsData[x]; else return ['something']; }}); var div = renderToDiv(tmpl); Tracker.flush(); test.equal(buf, '1'); showOne.set(false); dummy.set(1); Tracker.flush(); test.equal(buf, '12'); showOne.set(true); dummy.set(2); Tracker.flush(); test.equal(buf, '121'); // clean up the div $(div).remove(); test.equal(showOne._numListeners(), 0); test.equal(dummy._numListeners(), 0); }); }; Tinytest.add('spacebars-tests - template_tests - with stops without re-running helper', function (test) { runOneTwoTest(test, 'spacebars_test_helpers_stop_with'); }); Tinytest.add('spacebars-tests - template_tests - each stops without re-running helper', function (test) { runOneTwoTest(test, 'spacebars_test_helpers_stop_each'); }); Tinytest.add('spacebars-tests - template_tests - each inside with stops without re-running helper', function (test) { runOneTwoTest(test, 'spacebars_test_helpers_stop_with_each'); }); Tinytest.add('spacebars-tests - template_tests - if stops without re-running helper', function (test) { runOneTwoTest(test, 'spacebars_test_helpers_stop_if', ['a', 'b', 'a']); }); Tinytest.add('spacebars-tests - template_tests - unless stops without re-running helper', function (test) { runOneTwoTest(test, 'spacebars_test_helpers_stop_unless', ['a', 'b', 'a']); }); Tinytest.add('spacebars-tests - template_tests - inclusion stops without re-running function', function (test) { var t = Template.spacebars_test_helpers_stop_inclusion3; runOneTwoTest(test, 'spacebars_test_helpers_stop_inclusion', [t, t, t]); }); Tinytest.add('spacebars-tests - template_tests - template with callbacks inside with stops without recalculating data', function (test) { var tmpl = Template.spacebars_test_helpers_stop_with_callbacks3; tmpl.created = function () {}; tmpl.rendered = function () {}; tmpl.destroyed = function () {}; runOneTwoTest(test, 'spacebars_test_helpers_stop_with_callbacks'); }); Tinytest.add('spacebars-tests - template_tests - no data context is seen as an empty object', function (test) { var tmpl = Template.spacebars_test_no_data_context; var dataInHelper = 'UNSET'; var dataInRendered = 'UNSET'; var dataInCreated = 'UNSET'; var dataInDestroyed = 'UNSET'; var dataInEvent = 'UNSET'; tmpl.helpers({foo: function () { dataInHelper = this; }}); tmpl.created = function () { dataInCreated = this.data; }; tmpl.rendered = function () { dataInRendered = this.data; }; tmpl.destroyed = function () { dataInDestroyed = this.data; }; tmpl.events({ 'click': function () { dataInEvent = this; } }); var div = renderToDiv(tmpl); document.body.appendChild(div); clickElement(div.querySelector('button')); Tracker.flush(); // rendered gets called afterFlush $(div).remove(); test.isFalse(dataInHelper === window); test.equal(dataInHelper, {}); test.equal(dataInCreated, null); test.equal(dataInRendered, null); test.equal(dataInDestroyed, null); test.isFalse(dataInEvent === window); test.equal(dataInEvent, {}); }); Tinytest.add('spacebars-tests - template_tests - falsy with', function (test) { var tmpl = Template.spacebars_test_falsy_with; var R = ReactiveVar(null); tmpl.helpers({obj: function () { return R.get(); }}); var div = renderToDiv(tmpl); divRendersTo(test, div, ""); R.set({greekLetter: 'alpha'}); divRendersTo(test, div, "alpha"); R.set(null); divRendersTo(test, div, ""); R.set({greekLetter: 'alpha'}); divRendersTo(test, div, "alpha"); }); Tinytest.add("spacebars-tests - template_tests - helpers don't leak", function (test) { var tmpl = Template.spacebars_test_helpers_dont_leak; tmpl.foo = "wrong"; tmpl.bar = function () { return "WRONG"; }; // Also test that custom block helpers (implemented as templates) do NOT // interfere with helper lookup in the current template Template.spacebars_test_helpers_dont_leak2.helpers({ bonus: function () { return 'BONUS'; }}); var div = renderToDiv(tmpl); divRendersTo(test, div, "correct BONUS"); }); Tinytest.add("spacebars-tests - template_tests - event handler returns false", function (test) { var tmpl = Template.spacebars_test_event_returns_false; var elemId = "spacebars_test_event_returns_false_link"; tmpl.events({ 'click a': function (evt) { return false; } }); var div = renderToDiv(tmpl); document.body.appendChild(div); clickIt(document.getElementById(elemId)); // NOTE: This failure can stick across test runs! Try // removing '#bad-url' from the location bar and run // the tests again. :) test.isFalse(/#bad-url/.test(window.location.hash)); document.body.removeChild(div); } ); // Make sure that if you bind an event on "div p", for example, // both the div and the p need to be in the template. jQuery's // `$(elem).find(...)` works this way, but the browser's // querySelector doesn't. Tinytest.add( "spacebars-tests - template_tests - event map selector scope", function (test) { var tmpl = Template.spacebars_test_event_selectors1; var tmpl2 = Template.spacebars_test_event_selectors2; var buf = []; tmpl2.events({ 'click div p': function (evt) { buf.push(evt.currentTarget.className); } }); var div = renderToDiv(tmpl); document.body.appendChild(div); test.equal(buf.join(), ''); clickIt(div.querySelector('.p1')); test.equal(buf.join(), ''); clickIt(div.querySelector('.p2')); test.equal(buf.join(), 'p2'); document.body.removeChild(div); } ); if (document.addEventListener) { // see note about non-bubbling events in the "capuring events" // templating test for why we use the VIDEO tag. (It would be // nice to get rid of the network dependency, though.) // We skip this test in IE 8. Tinytest.add( "spacebars-tests - template_tests - event map selector scope (capturing)", function (test) { var tmpl = Template.spacebars_test_event_selectors_capturing1; var tmpl2 = Template.spacebars_test_event_selectors_capturing2; var buf = []; tmpl2.events({ 'play div video': function (evt) { buf.push(evt.currentTarget.className); } }); var div = renderToDiv(tmpl); document.body.appendChild(div); test.equal(buf.join(), ''); simulateEvent(div.querySelector(".video1"), "play", {}, {bubbles: false}); test.equal(buf.join(), ''); simulateEvent(div.querySelector(".video2"), "play", {}, {bubbles: false}); test.equal(buf.join(), 'video2'); document.body.removeChild(div); } ); } Tinytest.add("spacebars-tests - template_tests - tables", function (test) { var tmpl1 = Template.spacebars_test_tables1; var div = renderToDiv(tmpl1); test.equal(_.pluck(div.querySelectorAll('*'), 'tagName'), ['TABLE', 'TR', 'TD']); divRendersTo(test, div, '<table><tr><td>Foo</td></tr></table>'); var tmpl2 = Template.spacebars_test_tables2; tmpl2.helpers({foo: 'Foo'}); div = renderToDiv(tmpl2); test.equal(_.pluck(div.querySelectorAll('*'), 'tagName'), ['TABLE', 'TR', 'TD']); divRendersTo(test, div, '<table><tr><td>Foo</td></tr></table>'); }); Tinytest.add("spacebars-tests - template_tests - jQuery.trigger extraParameters are passed to the event callback", function (test) { var tmpl = Template.spacebars_test_jquery_events; var captured = false; var args = ["param1", "param2", {option: 1}, 1, 2, 3]; tmpl.events({ 'someCustomEvent': function (event, template) { var i; for (i=0; i<args.length; i++) { // expect the arguments to be just after template test.equal(arguments[i+2], args[i]); } captured = true; } }); tmpl.rendered = function () { $(this.find('button')).trigger('someCustomEvent', args); }; renderToDiv(tmpl); Tracker.flush(); test.equal(captured, true); } ); Tinytest.add("spacebars-tests - template_tests - toHTML", function (test) { // run once, verifying that autoruns are stopped var once = function (tmplToRender, tmplForHelper, helper, val) { var count = 0; var R = new ReactiveVar; var getR = function () { count++; return R.get(); }; R.set(val); var helpers = {}; helpers[helper] = getR; tmplForHelper.helpers(helpers); test.equal(canonicalizeHtml(Blaze.toHTML(tmplToRender)), "bar"); test.equal(count, 1); R.set(""); Tracker.flush(); test.equal(count, 1); // all autoruns stopped }; once(Template.spacebars_test_tohtml_basic, Template.spacebars_test_tohtml_basic, "foo", "bar"); once(Template.spacebars_test_tohtml_if, Template.spacebars_test_tohtml_if, "foo", "bar"); once(Template.spacebars_test_tohtml_with, Template.spacebars_test_tohtml_with, "foo", "bar"); once(Template.spacebars_test_tohtml_each, Template.spacebars_test_tohtml_each, "foos", ["bar"]); once(Template.spacebars_test_tohtml_include_with, Template.spacebars_test_tohtml_with, "foo", "bar"); once(Template.spacebars_test_tohtml_include_each, Template.spacebars_test_tohtml_each, "foos", ["bar"]); }); Tinytest.add("spacebars-tests - template_tests - block comments should not be displayed", function (test) { var tmpl = Template.spacebars_test_block_comment; var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), ''); } ); // Originally reported at https://github.com/meteor/meteor/issues/2046 Tinytest.add("spacebars-tests - template_tests - {{#with}} with mutated data context", function (test) { var tmpl = Template.spacebars_test_with_mutated_data_context; var foo = {value: 0}; var dep = new Tracker.Dependency; tmpl.helpers({foo: function () { dep.depend(); return foo; }}); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), '0'); foo.value = 1; dep.changed(); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), '1'); }); Tinytest.add("spacebars-tests - template_tests - javascript scheme urls", function (test) { var tmpl = Template.spacebars_test_url_attribute; var sessionKey = "foo-" + Random.id(); tmpl.helpers({foo: function () { return Session.get(sessionKey); }}); var numUrlAttrs = 4; var div = renderToDiv(tmpl); // [tag name, attr name, is a url attribute] var attrsList = [["A", "href", true], ["FORM", "action", true], ["IMG", "src", true], ["INPUT", "value", false]]; var checkAttrs = function (url, isJavascriptProtocol) { if (isJavascriptProtocol) { Meteor._suppress_log(numUrlAttrs); } Session.set(sessionKey, url); Tracker.flush(); _.each( attrsList, function (attrInfo) { var normalizedUrl; var elem = document.createElement(attrInfo[0]); try { elem[attrInfo[1]] = url; } catch (err) { // IE throws an exception if you set an img src to a // javascript: URL. Blaze can't override this behavior; // whether you've called Blaze._javascriptUrlsAllowed() or not, // you won't be able to set a javascript: URL in an img // src. So we only test img tags in other browsers. if (attrInfo[0] === "IMG") { return; } throw err; } document.body.appendChild(elem); normalizedUrl = elem[attrInfo[1]]; document.body.removeChild(elem); _.each( div.getElementsByTagName(attrInfo[0]), function (elem) { // Intentional Change in Form Action behaviour in Chrome 66 // Safari 11.0 and Firefox 59. An empty or missing form action will // now return the document's base URL. To recieve the actual action // getAttribute('action') must be called. If HTML attribute being checked // is form, check against getAttribute('action') instead. // https://bugs.chromium.org/p/chromium/issues/detail?id=724596 if (attrInfo[0] === "FORM" && isJavascriptProtocol === true) { test.equal( elem.getAttribute('action'), isJavascriptProtocol && attrInfo[2] ? null : normalizedUrl ); } else { test.equal( elem[attrInfo[1]], isJavascriptProtocol && attrInfo[2] ? "" : normalizedUrl ); } } ); } ); }; test.equal(Blaze._javascriptUrlsAllowed(), false); checkAttrs("http://www.meteor.com", false); checkAttrs("javascript:alert(1)", true); checkAttrs("jAvAsCrIpT:alert(1)", true); checkAttrs(" javascript:alert(1)", true); Blaze._allowJavascriptUrls(); test.equal(Blaze._javascriptUrlsAllowed(), true); checkAttrs("http://www.meteor.com", false); checkAttrs("javascript:alert(1)", false); checkAttrs("jAvAsCrIpT:alert(1)", false); checkAttrs(" javascript:alert(1)", false); } ); Tinytest.add("spacebars-tests - template_tests - event handlers get cleaned up when template is removed", function (test) { var tmpl = Template.spacebars_test_event_handler_cleanup; var subtmpl = Template.spacebars_test_event_handler_cleanup_sub; var rv = new ReactiveVar(true); tmpl.helpers({foo: function () { return rv.get(); }}); subtmpl.events({ "click/mouseover": function () { } }); var div = renderToDiv(tmpl); test.equal(div.$blaze_events["click"].handlers.length, 1); test.equal(div.$blaze_events["mouseover"].handlers.length, 1); rv.set(false); Tracker.flush(); test.equal(div.$blaze_events["click"].handlers.length, 0); test.equal(div.$blaze_events["mouseover"].handlers.length, 0); } ); // This test makes sure that Blaze correctly finds the controller // heirarchy surrounding an element that itself doesn't have a // controller. Tinytest.add( "spacebars-tests - template_tests - data context in event handlers on elements inside {{#if}}", function (test) { var tmpl = Template.spacebars_test_data_context_for_event_handler_in_if; var data = null; tmpl.events({ 'click span': function () { data = this; } }); var div = renderToDiv(tmpl); document.body.appendChild(div); clickIt(div.querySelector('span')); test.equal(data, {foo: "bar"}); document.body.removeChild(div); }); // https://github.com/meteor/meteor/issues/2156 Tinytest.add( "spacebars-tests - template_tests - each with inserts inside autorun", function (test) { var tmpl = Template.spacebars_test_each_with_autorun_insert; var coll = new Mongo.Collection(null); var rv = new ReactiveVar; tmpl.helpers({items: function () { return coll.find(); }}); var div = renderToDiv(tmpl); Tracker.autorun(function () { if (rv.get()) { coll.insert({ name: rv.get() }); } }); rv.set("foo1"); Tracker.flush(); var firstId = coll.findOne()._id; rv.set("foo2"); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "foo1 foo2"); coll.update(firstId, { $set: { name: "foo3" } }); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "foo3 foo2"); } ); Tinytest.add( "spacebars-tests - template_tests - ui hooks", function (test) { var tmpl = Template.spacebars_test_ui_hooks; var rv = new ReactiveVar([]); tmpl.helpers({items: function () { return rv.get(); }}); var div = renderToDiv(tmpl); var hooks = []; var container = div.querySelector(".test-ui-hooks"); // Before we attach the ui hooks, put two items in the DOM. var origVal = [{ _id: 'foo1' }, { _id: 'foo2' }]; rv.set(origVal); Tracker.flush(); container._uihooks = { insertElement: function (n, next) { hooks.push("insert"); // check that the element hasn't actually been added yet test.isTrue((! n.parentNode) || n.parentNode.nodeType === 11 /*DOCUMENT_FRAGMENT_NODE*/); }, removeElement: function (n) { hooks.push("remove"); // check that the element hasn't actually been removed yet test.isTrue(n.parentNode === container); }, moveElement: function (n, next) { hooks.push("move"); // check that the element hasn't actually been moved yet test.isFalse(n.nextNode === next); } }; var testDomUnchanged = function () { var items = div.querySelectorAll(".item"); test.equal(items.length, 2); test.equal(canonicalizeHtml(items[0].innerHTML), "foo1"); test.equal(canonicalizeHtml(items[1].innerHTML), "foo2"); }; var newVal = _.clone(origVal); newVal.push({ _id: 'foo3' }); rv.set(newVal); Tracker.flush(); test.equal(hooks, ['insert']); testDomUnchanged();
Tracker.flush(); test.equal(hooks, ['insert', 'move']); testDomUnchanged(); newVal = [origVal[0]]; rv.set(newVal); Tracker.flush(); test.equal(hooks, ['insert', 'move', 'remove']); testDomUnchanged(); } ); Tinytest.add( "spacebars-tests - template_tests - ui hooks - nested domranges", function (test) { var tmpl = Template.spacebars_test_ui_hooks_nested; var rv = new ReactiveVar(true); tmpl.helpers({foo: function () { return rv.get(); }}); var subtmpl = Template.spacebars_test_ui_hooks_nested_sub; var uiHookCalled = false; subtmpl.rendered = function () { this.firstNode.parentNode._uihooks = { removeElement: function (node) { uiHookCalled = true; } }; }; var div = renderToDiv(tmpl); document.body.appendChild(div); Tracker.flush(); var htmlBeforeRemove = canonicalizeHtml(div.innerHTML); rv.set(false); Tracker.flush(); test.isTrue(uiHookCalled); var htmlAfterRemove = canonicalizeHtml(div.innerHTML); test.equal(htmlBeforeRemove, htmlAfterRemove); document.body.removeChild(div); } ); Tinytest.add( "spacebars-tests - template_tests - Template.instance from helper", function (test) { // Set a property on the template instance; check that it's still // there from a helper. var tmpl = Template.spacebars_test_template_instance_helper; var value = Random.id(); var instanceFromHelper; tmpl.created = function () { this.value = value; }; tmpl.helpers({foo: function () { instanceFromHelper = Template.instance(); }}); var div = renderToDiv(tmpl); test.equal(instanceFromHelper.value, value); } ); Tinytest.add( "spacebars-tests - template_tests - Template.instance from helper, " + "template instance is kept up-to-date", function (test) { var tmpl = Template.spacebars_test_template_instance_helper; var rv = new ReactiveVar(""); var instanceFromHelper; tmpl.helpers({foo: function () { return Template.instance().data; }}); var div = renderToDiv(tmpl, function () { return rv.get(); }); rv.set("first"); divRendersTo(test, div, "first"); rv.set("second"); Tracker.flush(); divRendersTo(test, div, "second"); // Template.instance() returns null when no template instance test.isTrue(Template.instance() === null); } ); Tinytest.add( "spacebars-tests - template_tests - {{#with}} autorun is cleaned up", function (test) { var tmpl = Template.spacebars_test_with_cleanup; var rv = new ReactiveVar(""); var helperCalled = false; tmpl.helpers({foo: function () { helperCalled = true; return rv.get(); }}); var div = renderToDiv(tmpl); rv.set("first"); Tracker.flush(); test.equal(helperCalled, true); helperCalled = false; $(div).find(".test-with-cleanup").remove(); rv.set("second"); Tracker.flush(); test.equal(helperCalled, false); } ); Tinytest.add( "spacebars-tests - template_tests - Template.parentData from helpers", function (test) { var childTmpl = Template.spacebars_test_template_parent_data_helper_child; var parentTmpl = Template.spacebars_test_template_parent_data_helper; var height = new ReactiveVar(0); var bar = new ReactiveVar("bar"); childTmpl.helpers({ a: ["a"], b: function () { return bar.get(); }, c: ["c"], foo: function () { var a = Template.parentData(height.get()); var b = UI._parentData(height.get()); // back-compat test.equal(a, b); return a; } }); var div = renderToDiv(parentTmpl); test.equal(canonicalizeHtml(div.innerHTML), "d"); height.set(1); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "bar"); // Test Template.parentData() reactivity bar.set("baz"); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "baz"); height.set(2); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "a"); height.set(3); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "parent"); // Test that calling Template.parentData() without any arguments is the same as Template.parentData(1) height.set(null); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "baz"); } ); Tinytest.add( "spacebars - SVG <a> elements", function (test) { if (! document.createElementNS) { // IE 8 return; } var tmpl = Template.spacebars_test_svg_anchor; var div = renderToDiv(tmpl); var anchNamespace = $(div).find("a").get(0).namespaceURI; test.equal(anchNamespace, "http://www.w3.org/2000/svg"); } ); Tinytest.add( "spacebars-tests - template_tests - created/rendered/destroyed by each", function (test) { var outerTmpl = Template.spacebars_test_template_created_rendered_destroyed_each; var innerTmpl = Template.spacebars_test_template_created_rendered_destroyed_each_sub; var buf = ''; innerTmpl.created = function () { buf += 'C' + String(this.data).toLowerCase(); }; innerTmpl.rendered = function () { buf += 'R' + String(this.data).toLowerCase(); }; innerTmpl.destroyed = function () { buf += 'D' + String(this.data).toLowerCase(); }; var R = ReactiveVar([{_id: 'A'}]); outerTmpl.helpers({items: function () { return R.get(); }}); var div = renderToDiv(outerTmpl); divRendersTo(test, div, '<div>A</div>'); test.equal(buf, 'CaRa'); R.set([{_id: 'B'}]); divRendersTo(test, div, '<div>B</div>'); test.equal(buf, 'CaRaDaCbRb'); R.set([{_id: 'C'}]); divRendersTo(test, div, '<div>C</div>'); test.equal(buf, 'CaRaDaCbRbDbCcRc'); $(div).remove(); test.equal(buf, 'CaRaDaCbRbDbCcRcDc'); }); Tinytest.add( "spacebars-tests - template_tests - Blaze.render/Blaze.remove", function (test) { var div = document.createElement("DIV"); document.body.appendChild(div); var created = false, rendered = false, destroyed = false; var R = ReactiveVar('aaa'); var tmpl = Template.spacebars_test_ui_render; tmpl.helpers({ greeting: function () { return this.greeting || 'Hello'; }, r: function () { return R.get(); } }); tmpl.created = function () { created = true; }; tmpl.rendered = function () { rendered = true; }; tmpl.destroyed = function () { destroyed = true; }; test.equal([created, rendered, destroyed], [false, false, false]); var renderedTmpl = Blaze.render(tmpl, div); test.equal([created, rendered, destroyed], [true, false, false]); // Flush now. We fire the rendered callback in an afterFlush block, // to ensure that the DOM is completely updated. Tracker.flush(); test.equal([created, rendered, destroyed], [true, true, false]); var otherDiv = document.createElement("DIV"); // can run a second time without throwing var x = Blaze.render(tmpl, otherDiv); // note: we'll have clean up `x` below var renderedTmpl2 = Blaze.renderWithData( tmpl, {greeting: 'Bye'}, div); test.equal(canonicalizeHtml(div.innerHTML), "<span>Hello aaa</span><span>Bye aaa</span>"); R.set('bbb'); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "<span>Hello bbb</span><span>Bye bbb</span>"); test.equal([created, rendered, destroyed], [true, true, false]); test.equal(R._numListeners(), 3); Blaze.remove(renderedTmpl); Blaze.remove(renderedTmpl); // test that double-remove doesn't throw Blaze.remove(renderedTmpl2); Blaze.remove(x); test.equal([created, rendered, destroyed], [true, true, true]); test.equal(R._numListeners(), 0); test.equal(canonicalizeHtml(div.innerHTML), ""); }); Tinytest.add( "spacebars-tests - template_tests - Blaze.render fails on jQuery objects", function (test) { var tmpl = Template.spacebars_test_ui_render; test.throws(function () { Blaze.render(tmpl, $('body')); }, /'parentElement' must be a DOM node/); test.throws(function () { Blaze.render(tmpl, document.body, $('body')); }, /'nextNode' must be a DOM node/); }); Tinytest.add( "spacebars-tests - template_tests - UI.getElementData", function (test) { var div = document.createElement("DIV"); var tmpl = Template.spacebars_test_ui_getElementData; Blaze.renderWithData(tmpl, {foo: "bar"}, div); var span = div.querySelector('SPAN'); test.isTrue(span); test.equal(UI.getElementData(span), {foo: "bar"}); test.equal(Blaze.getData(span), {foo: "bar"}); }); Tinytest.add( "spacebars-tests - template_tests - autorun cleanup", function (test) { var tmpl = Template.spacebars_test_parent_removal; var Acalls = ''; var A = ReactiveVar('hi'); tmpl.helpers({A: function (chr) { Acalls += chr; return A.get(); }}); var Bcalls = 0; var B = ReactiveVar(['one', 'two']); tmpl.helpers({B: function () { Bcalls++; return B.get(); }}); // Assert how many times A and B were accessed (since last time) // and how many autoruns are listening to them. var assertCallsAndListeners = function (a_calls, b_calls, a_listeners, b_listeners) { test.equal('A calls: ' + Acalls.length, 'A calls: ' + a_calls, Acalls); test.equal('B calls: ' + Bcalls, 'B calls: ' + b_calls); test.equal('A listeners: ' + A._numListeners(), 'A listeners: ' + a_listeners); test.equal('B listeners: ' + B._numListeners(), 'B listeners: ' + b_listeners); Acalls = ''; Bcalls = 0; }; var div = renderToDiv(tmpl); assertCallsAndListeners(10, 1, 10, 1); A.set(''); Tracker.flush(); // Confirm that #4, #5, #6, and #9 are not re-run. // #a is newly run, for a total of 10 - 4 + 1 = 7, assertCallsAndListeners(7, 0, 7, 1); A.set('hi'); Tracker.flush(); assertCallsAndListeners(10, 0, 10, 1); // Now see that removing the DOM with jQuery, below // the level of the entire template, stops everything. $(div.querySelector('.toremove')).remove(); assertCallsAndListeners(0, 0, 0, 0); }); Tinytest.add( "spacebars-tests - template_tests - focus/blur with clean-up", function (test) { var tmpl = Template.spacebars_test_focus_blur_outer; var cond = ReactiveVar(true); tmpl.helpers({cond: function () { return cond.get(); }}); var buf = []; Template.spacebars_test_focus_blur_inner.events({ 'focus input': function () { buf.push('FOCUS'); }, 'blur input': function () { buf.push('BLUR'); } }); var div = renderToDiv(tmpl); document.body.appendChild(div); // check basic focus and blur to make sure // everything is sane test.equal(div.querySelectorAll('input').length, 1); var input; focusElement(input = div.querySelector('input')); // We don't get focus events when the Chrome Dev Tools are focused, // unfortunately, as of Chrome 35. I think this is a regression in // Chrome 34. So, the goal is to work whether or not focus is // "borken," where "working" means always failing if DOMBackend isn't // correctly unbinding the old event handlers when we switch the IF, // and always passing if it is. To cause the problem in DOMBackend, // delete the '**' argument to jQuery#off in // DOMBackend.Events.undelegateEvents. The only compromise we are // making here is that if some unrelated bug in Blaze makes // focus/blur not work, the failure might be masked while the Dev // Tools are open. var borken = false; if (buf.length === 0 && document.activeElement === input) { test.ok({note:"You might need to defocus the Chrome Dev Tools to get a more accurate run of this test!"}); borken = true; $(input).trigger('focus'); } test.equal(buf.join(), 'FOCUS'); blurElement(div.querySelector('input')); if (buf.length === 1) $(input).trigger('blur'); test.equal(buf.join(), 'FOCUS,BLUR'); // now switch the IF and check again. The failure mode // we observed was that DOMBackend would not correctly // unbind the old event listener at the jQuery level, // so the old event listener would fire and cause an // exception inside Blaze ("Must be attached" in // DOMRange#containsElement), which would show up in // the console and cause our handler not to fire. cond.set(false); buf.length = 0; Tracker.flush(); test.equal(div.querySelectorAll('input').length, 1); focusElement(input = div.querySelector('input')); if (borken) $(input).trigger('focus'); test.equal(buf.join(), 'FOCUS'); blurElement(div.querySelector('input')); if (! borken) test.equal(buf.join(), 'FOCUS,BLUR'); document.body.removeChild(div); }); // We used to remove event handlers on DOMRange detached, but when // tearing down a view, we don't "detach" all the DOMRanges recursively. // Mainly, we destroy the View. Destroying a View should remove its // event listeners. (In practice, however, it's hard to think of // consequences to not removing event handlers on removed DOM nodes, // which will probably be GCed anyway.) Tinytest.add( "spacebars-tests - template_tests - event cleanup on destroyed", function (test) { var tmpl = Template.spacebars_test_event_cleanup_on_destroyed_outer; var cond = ReactiveVar(true); tmpl.helpers({cond: function () { return cond.get(); }}); Template.spacebars_test_event_cleanup_on_destroyed_inner.events({ 'click span': function () {}}); var div = renderToDiv(tmpl); document.body.appendChild(div); var eventDiv = div.querySelector('div'); test.equal(eventDiv.$blaze_events.click.handlers.length, 1); cond.set(false); Tracker.flush(); test.equal(eventDiv.$blaze_events.click.handlers.length, 0); document.body.removeChild(div); }); _.each([1, 2, 3], function (n) { Tinytest.add( "spacebars-tests - template_tests - lookup is isolated " + n, function (test) { var buf = ""; var inclusion = Template.spacebars_test_isolated_lookup_inclusion; inclusion.created = function () { buf += 'C'; }; inclusion.destroyed = function () { buf += 'D'; }; var tmpl = Template['spacebars_test_isolated_lookup' + n]; var R = ReactiveVar(Template.spacebars_template_test_aaa); tmpl.helpers({bar: function () { return R.get(); }}); var div = renderToDiv( tmpl, function () { return { foo: R.get() }; }); test.equal(canonicalizeHtml(div.innerHTML), 'aaa--x'); test.equal(buf, 'C'); R.set(Template.spacebars_template_test_bbb); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), 'bbb--x'); test.equal(buf, 'C'); } ); }); Tinytest.add('spacebars-tests - template_tests - current view in event handler', function (test) { var tmpl = Template.spacebars_test_current_view_in_event; var currentView; var currentData; tmpl.events({ 'click span': function () { currentView = Blaze.getView(); currentData = Blaze.getData(); } }); var div = renderToDiv(tmpl, 'blah'); test.equal(canonicalizeHtml(div.innerHTML), '<span>blah</span>'); document.body.appendChild(div); clickElement(div.querySelector('span')); $(div).remove(); test.isTrue(currentView); test.equal(currentData, 'blah'); }); Tinytest.add('spacebars-tests - template_tests - helper invalidates self', function (test) { var tmpl = Template.spacebars_template_test_bracketed_foo; var count = new ReactiveVar(0); tmpl.helpers({ // It's unusual for a helper to have side effects, but it's possible // and people do it. Regression test for #4097. foo: function () { // Make count odd and return it. var c = count.get(); if ((c % 2) === 0) { count.set(c+1); } return c; } }); var div = renderToDiv(tmpl); divRendersTo(test, div, '[1]'); count.set(2); divRendersTo(test, div, '[3]'); }); Tinytest.add( "spacebars-tests - template_tests - textarea attrs", function (test) { var tmplNoContents = { tmpl: Template.spacebars_test_textarea_attrs, hasTextAreaContents: false }; var tmplWithContents = { tmpl: Template.spacebars_test_textarea_attrs_contents, hasTextAreaContents: true }; var tmplWithContentsAndMoreAttrs = { tmpl: Template.spacebars_test_textarea_attrs_array_contents, hasTextAreaContents: true }; _.each( [tmplNoContents, tmplWithContents, tmplWithContentsAndMoreAttrs], function (tmplInfo) { var id = new ReactiveVar("textarea-" + Random.id()); var name = new ReactiveVar("one"); var attrs = new ReactiveVar({ id: "textarea-" + Random.id() }); var div = renderToDiv(tmplInfo.tmpl, { attrs: function () { return attrs.get(); }, name: function () { return name.get(); } }); // Check that the id and value attribute are as we expect. // We can't check div.innerHTML because Chrome at least doesn't // appear to put textarea value attributes in innerHTML. var textarea = div.querySelector("textarea"); test.equal(textarea.id, attrs.get().id); test.equal( textarea.value, tmplInfo.hasTextAreaContents ? "Hello one" : ""); // One of the templates has a separate attribute in addition to // an attributes dictionary. if (tmplInfo === tmplWithContentsAndMoreAttrs) { test.equal($(textarea).attr("class"), "bar"); } // Change the id, check that the attribute updates reactively. attrs.set({ id: "textarea-" + Random.id() }); Tracker.flush(); test.equal(textarea.id, attrs.get().id); // Change the name variable, check that the textarea value // updates reactively. name.set("two"); Tracker.flush(); test.equal( textarea.value, tmplInfo.hasTextAreaContents ? "Hello two" : ""); if (tmplInfo === tmplWithContentsAndMoreAttrs) { test.equal($(textarea).attr("class"), "bar"); } }); }); Tinytest.add( "spacebars-tests - template_tests - this.autorun", function (test) { var tmpl = Template.spacebars_test_autorun; var tmplInner = Template.spacebars_test_autorun_inner; // Keep track of the value of `Template.instance()` inside the // autorun each time it runs. var autorunTemplateInstances = []; var actualTemplateInstance; var returnedComputation; var computationArg; var show = new ReactiveVar(true); var rv = new ReactiveVar("foo"); tmplInner.created = function () { actualTemplateInstance = this; returnedComputation = this.autorun(function (c) { // Test nested autorun. actualTemplateInstance.autorun(function (c2) { rv.get(); autorunTemplateInstances.push(Template.instance()); }); computationArg = c; rv.get(); autorunTemplateInstances.push(Template.instance()); }); }; tmpl.helpers({ show: function () { return show.get(); } }); var div = renderToDiv(tmpl); test.equal(autorunTemplateInstances.length, 2); test.equal(autorunTemplateInstances[0], actualTemplateInstance); test.equal(autorunTemplateInstances[1], actualTemplateInstance); // Test that the autorun returned a computation and received a // computation as an argument. test.isTrue(returnedComputation instanceof Tracker.Computation); test.equal(returnedComputation, computationArg); // Make sure the autorun re-runs when `rv` changes, and that it has // the correct current view. rv.set("bar"); Tracker.flush(); test.equal(autorunTemplateInstances.length, 4); test.equal(autorunTemplateInstances[1], actualTemplateInstance); test.equal(autorunTemplateInstances[2], actualTemplateInstance); // If the inner template is destroyed, the autorun should be stopped. show.set(false); Tracker.flush(); rv.set("baz"); Tracker.flush(); test.equal(autorunTemplateInstances.length, 4); test.equal(rv._numListeners(), 0); } ); // Test that argument in {{> Template.contentBlock arg}} is evaluated in // the proper data context. Tinytest.add( "spacebars-tests - template_tests - contentBlock argument", function (test) { var tmpl = Template.spacebars_test_contentBlock_arg; var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), 'AAA BBB'); }); // Test that when Blaze sets an input field to the same value, // we don't lose the insertion point position. Tinytest.add( "spacebars-tests - template_tests - input field to same value", function (test) { var tmpl = Template.spacebars_template_test_input_field_to_same_value; var R = ReactiveVar("BLAH"); tmpl.helpers({foo: function () { return R.get(); }}); var div = renderToDiv(tmpl); document.body.appendChild(div); var input = div.querySelector('input'); test.equal(input.value, "BLAH"); var setSelection = function (startEnd) { startEnd = startEnd.split(' '); if (typeof input.selectionStart === 'number') { // all but IE < 9 input.selectionStart = startEnd[0]; input.selectionEnd = startEnd[1]; } else { // IE 8 input.focus(); var r = input.createTextRange(); // move the start and end of the range to the beginning // of the input field r.moveStart('textedit', -1); r.moveEnd('textedit', -1); // move the start and end a certain number of characters // (relative to their current position) r.moveEnd('character', startEnd[1]); r.moveStart('character', startEnd[0]); r.select(); } }; var getSelection = function () { if (typeof input.selectionStart === 'number') { // all but IE < 9 return input.selectionStart + " " + input.selectionEnd; } else { // IE 8 input.focus(); var r = document.selection.createRange(); var fullText = input.value; var start, end; if (r.text) { // one or more characters are selected. // this is kind of hacky! Relies on fullText // not having duplicate letters, for example. start = fullText.indexOf(r.text); end = start + r.text.length; } else { r.moveStart('textedit', -1); start = end = r.text.length; } return start + " " + end; } }; setSelection("2 3"); test.equal(getSelection(), "2 3"); // At this point, we COULD confirm that setting input.value to // the same thing as before ("BLAH") loses the insertion // point (per browser behavior). However, it doesn't on Firefox. // So we set it to something different, which verifies that our // test machinery is correct. input.value = "BLAN"; // test that insertion point is lost var selectionAfterSet = getSelection(); if (selectionAfterSet !== "0 0") // IE 8 test.equal(getSelection(), "4 4"); // now make the input say "BLAH" but the AttributeHandler // says "OTHER" (so we can make it do the no-op update) R.set("OTHER"); Tracker.flush(); test.equal(input.value, "OTHER"); input.value = "BLAH"; setSelection("2 2"); R.set("BLAH"); Tracker.flush(); test.equal(input.value, "BLAH"); // test that didn't lose insertion point! test.equal(getSelection(), "2 2"); // clean up after ourselves document.body.removeChild(div); } ); Tinytest.add("spacebars-tests - template_tests - contentBlock back-compat", function (test) { // adapted from another test, but this time make sure `UI.contentBlock` // and `UI.elseBlock` correctly behave as `Template.contentBlock` // and `Template.elseBlock`. var tmpl = Template.spacebars_template_test_content_backcompat; var R = ReactiveVar(true); tmpl.helpers({flag: function () { return R.get(); }}); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), 'hello'); R.set(false); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), 'world'); R.set(true); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), 'hello'); }); // For completeness (of coverage), make sure the code that calls // `Template.contentBlock` in the correct scope also causes // the old `UI.contentBlock` to be called in the correct scope. Tinytest.add("spacebars-tests - template_tests - content context back-compat", function (test) { var tmpl = Template.spacebars_template_test_content_context_backcompat; var R = ReactiveVar(true); tmpl.helpers({foo: { firstLetter: 'F', secondLetter: 'O', bar: { cond: function () { return R.get(); }, firstLetter: 'B', secondLetter: 'A' } }}); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), 'BO'); R.set(false); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), 'FA'); }); Tinytest.add("spacebars-tests - template_tests - falsy helper", function (test) { var tmpl = Template.spacebars_template_test_falsy_helper; tmpl.helpers({foo: 0}); Template.registerHelper('GLOBAL_ZERO', 0); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), 'foo:0 GLOBAL_ZERO:0'); }); Tinytest.add("spacebars-tests - template_tests - old-style helpers", function (test) { var tmpl = Template.spacebars_template_test_oldstyle_helpers; tmpl._NOWARN_OLDSTYLE_HELPERS = true; // Test old-style helper tmpl.foo = 'hello'; var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), 'hello'); // Test that replacing a helper still works (i.e. we don't cache them). // We can change this behavior if we need to, but it is more breaking // to do so. It breaks some unit tests, for example. tmpl.foo = 'world'; var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), 'world'); // Test that you can delete an old-style helper with `delete`. // As with the previous case, we can break this functionality, but // we should do it intentionally. delete tmpl.foo; var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), ''); }); Tinytest.add("spacebars-tests - template_tests - with data remove (#3130)", function (test) { var tmpl = Template.spacebars_template_test_with_data_remove; var div = document.createElement("DIV"); var theWith = Blaze.renderWithData(tmpl, { foo: 3130 }, div); test.equal(canonicalizeHtml(div.innerHTML), '<b>some data - 3130</b>'); var view = Blaze.getView(div.querySelector('b')); test.isFalse(theWith.isDestroyed); Blaze.remove(view); test.isTrue(theWith.isDestroyed); test.equal(div.innerHTML, ""); }); Tinytest.add("spacebars-tests - template_tests - inclusion with data remove (#3130)", function (test) { var tmpl = Template.spacebars_template_test_inclusion_with_data_remove; var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), '<span><b>stuff</b></span>'); var view = Blaze.getView(div.querySelector('b')); var parentView = view.parentView; test.isTrue(parentView.__isTemplateWith); test.isFalse(parentView.isDestroyed); Blaze.remove(view); test.isTrue(parentView.isDestroyed); test.equal(canonicalizeHtml(div.innerHTML), "<span></span>"); }); Tinytest.add("spacebars-tests - template_tests - custom block helper doesn't break Template.instance() (#3540)", function (test) { var tmpl = Template.spacebars_template_test_template_instance_wrapper_outer; tmpl.helpers({ thisShouldOutputHello: function () { return Template.instance().customProp; } }); tmpl.created = function () { this.customProp = "hello"; }; var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), "hello hello"); }); testAsyncMulti("spacebars-tests - template_tests - template-level subscriptions", [ function (test, expect) { var tmpl = Template.spacebars_template_test_template_level_subscriptions; var tmplInstance; var div; // Make sure the subscriptions stop when the template is destroyed var stopCallback = expect(); var stopCallback2 = expect(); var futureId = Random.id(); // Make sure the HTML is what we expect when one subscription is ready var checkOneReady = expect(function () { test.equal(canonicalizeHtml(div.innerHTML), ""); Meteor.call('makeTemplateSubReady', futureId); }); // Make sure the HTML is what we expect when both subscriptions are ready var checkBothReady = expect(function () { test.equal(canonicalizeHtml(div.innerHTML), "ready! true"); // This will call the template's destroyed callback Blaze.remove(tmplInstance.view); }); var subscriptionsFinished = 0; // Manually use the subscribe ready callback to make sure the template is // doing the right thing var subscribeCallback = expect(function () { subscriptionsFinished++; if (subscriptionsFinished === 1) { // We need to use Tracker.afterFlush here and Tracker.flush doesn't work // because we need to wait for the other callback to fire (the one that // makes ready return true) _and_ we need the UI to rerender Tracker.afterFlush(checkOneReady); } if (subscriptionsFinished === 2) { Tracker.afterFlush(checkBothReady); } }); tmpl.onCreated(function () { var subHandle = this.subscribe("templateSub", subscribeCallback); var subHandle2 = this.subscribe("templateSub", futureId, { onReady: subscribeCallback, connection: Meteor.connection }); subHandle.stop = stopCallback; subHandle2.stop = stopCallback2; tmplInstance = this; }); // Insertion point div = renderToDiv(tmpl); // To start, there is no content because the template isn't ready test.equal(canonicalizeHtml(div.innerHTML), ""); } ]); testAsyncMulti("spacebars-tests - template_tests - template-level subscriptions don't resubscribe unnecessarily", [ function (test, expect) { var tmpl = Template.spacebars_template_test_template_level_subs_resubscribe; var subHandle; var trueThenFalse = new ReactiveVar(true); tmpl.helpers({ ifArg: function () { return trueThenFalse.get(); }, subscribingHelper1: expect(function () { subHandle = Template.instance().subscribe("templateSub"); }), subscribingHelper2: expect(function () { var subHandle2 = Template.instance().subscribe("templateSub"); test.isTrue(subHandle.subscriptionId === subHandle2.subscriptionId); // Make sure we didn't add two subscription handles to our internal list test.equal(_.keys(Template.instance()._subscriptionHandles).length, 1); }) }); renderToDiv(tmpl); Tracker.flush(); trueThenFalse.set(false); } ]); Tinytest.add("spacebars-tests - template_tests - old #each sets data context", function (test) { var tmpl = Template.spacebars_template_test_old_each_data_context; tmpl.helpers({ items: [{text:"a"}, {text:"b"}] }); var div = document.createElement("DIV"); var theWith = Blaze.render(tmpl, div); test.equal(canonicalizeHtml(div.innerHTML), '<div>a</div><div>b</div>'); var view = Blaze.getView(div.querySelector('div')); Blaze.remove(view); }); Tinytest.add("spacebars-tests - template_tests - new #each extends data context", function (test) { var tmpl = Template.spacebars_template_test_new_each_data_context; tmpl.helpers({ dataContext: function () { return { items: [{text:"a"}, {text:"b"}], toplevel: "XYZ" }; } }); var div = document.createElement("DIV"); var theWith = Blaze.render(tmpl, div); test.equal(canonicalizeHtml(div.innerHTML), '<div>a -- XYZ</div><div>b -- XYZ</div>'); var view = Blaze.getView(div.querySelector('div')); Blaze.remove(view); }); // Same as above, but now the argument to each in has a subexpression Tinytest.add("spacebars-tests - template_tests - new #each with subexpression (#5137)", function (test) { var tmpl = Template.spacebars_template_test_new_each_data_context_subexpr; tmpl.helpers({ dataContext: function () { return { items: [{text:"a"}, {text:"b"}], toplevel: "XYZ" }; } }); var div = document.createElement("DIV"); var theWith = Blaze.render(tmpl, div); test.equal(canonicalizeHtml(div.innerHTML), '<div>a -- XYZ</div><div>b -- XYZ</div>'); var view = Blaze.getView(div.querySelector('div')); Blaze.remove(view); }); Tinytest.add("spacebars-tests - template_tests - new #each binding lookup is scoped to the template", function (test) { var tmpl = Template.spacebars_template_test_new_each_lookup_top_level; tmpl.helpers({ dataContext: function () { return { letter_a: ["a"], subcontext: { letter_b: ["b"] } }; } }); var div = document.createElement("DIV"); var theWith = Blaze.render(tmpl, div); test.equal(canonicalizeHtml(div.innerHTML), '<div>a</div>'); var view = Blaze.getView(div.querySelector('div')); Blaze.remove(view); }); Tinytest.add("spacebars-tests - template_tests - let bindings", function (test) { var tmpl = Template.spacebars_template_test_let_bindings; var v = new ReactiveVar("var"); tmpl.helpers({ dataContext: function () { return { varFromContext: "from context", anotherVarFromContext: "another var from context" }; }, helper: function () { return v.get(); } }); var div = document.createElement("DIV"); var theWith = Blaze.render(tmpl, div); test.equal(canonicalizeHtml(div.innerHTML), '<div>var -- var -- from context -- override</div>'); v.set("new var"); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), '<div>new var -- new var -- from context -- override</div>'); var view = Blaze.getView(div.querySelector('div')); Blaze.remove(view); }); Tinytest.add("spacebars-tests - template_tests - #each @index", function (test) { var tmpl = Template.spacebars_template_test_each_index; var c = new Mongo.Collection(); c.insert({ num: 2 }); c.insert({ num: 4 }); tmpl.helpers({ things: function () { return c.find({}, {sort:{num: 1}}); } }); var div = document.createElement("DIV"); var theEach = Blaze.render(tmpl, div); test.equal(canonicalizeHtml(div.innerHTML), '<span>0 - 2</span><span>1 - 4</span>'); c.insert({ num: 1 }); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), '<span>0 - 1</span><span>1 - 2</span><span>2 - 4</span>'); var three = c.insert({ num: 3 }); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), '<span>0 - 1</span><span>1 - 2</span><span>2 - 3</span><span>3 - 4</span>'); c.update(three, { num: 0 }); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), '<span>0 - 0</span><span>1 - 1</span><span>2 - 2</span><span>3 - 4</span>'); c.remove(three); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), '<span>0 - 1</span><span>1 - 2</span><span>2 - 4</span>'); var view = Blaze.getView(div.querySelector('span')); Blaze.remove(view); }); Tinytest.add("spacebars-tests - template_tests - nested expressions", function (test) { var tmpl = Template.spacebars_template_test_nested_exprs; tmpl.helpers({ add: function (a, b) { return a + b; } }); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), "6"); }); Tinytest.add("spacebars-tests - template_tests - nested sub-expressions", function (test) { var tmpl = Template.spacebars_template_test_nested_subexprs; var sentence = new ReactiveVar("can't even imagine a world without Light"); tmpl.helpers({ capitalize: function (str) { return str.charAt(0).toUpperCase() + str.substring(1); }, firstWord: function (sentence) { return sentence.split(' ')[0]; }, generateSentence: function () { return sentence.get(); } }); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), "Can't"); sentence.set("that would be quite dark"); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "That"); }); Tinytest.add("spacebars-tests - template_tests - expressions as keyword args", function (test) { var tmpl = Template.spacebars_template_test_exprs_keyword; var name = new ReactiveVar("light"); tmpl.helpers({ capitalize: function (str) { return str.charAt(0).toUpperCase() + str.substring(1); }, name: function () { return name.get(); } }); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), "Light Mello"); name.set("misa"); Tracker.flush(); test.equal(canonicalizeHtml(div.innerHTML), "Misa Mello"); }); var testDoesntRerender = function (test, which) { var tmpl = ({ "WITH": Template.spacebars_template_test_with_rerender, "LET": Template.spacebars_template_test_let_rerender })[which]; var x = new ReactiveVar("aaa"); tmpl.helpers({ x: function () { return x.get(); } }); var div = renderToDiv(tmpl); var input = div.querySelector('input.foo'); var span = div.querySelector('span.bar'); test.isTrue(input && input.className === 'foo'); test.isTrue(span && span.className === 'bar'); test.equal(canonicalizeHtml(span.innerHTML), 'aaa'); x.set('bbb'); Tracker.flush(); // make sure the input and span are still the same, but the new // value of x is reflected var input2 = div.querySelector('input.foo'); var span2 = div.querySelector('span.bar'); test.isTrue(input2 === input, 'input'); test.isTrue(span2 === span, 'span'); test.equal(canonicalizeHtml(span2.innerHTML), 'bbb'); }; Tinytest.add("spacebars-tests - template_tests - #with doesn't re-render template", function (test) { testDoesntRerender(test, "WITH"); }); Tinytest.add("spacebars-tests - template_tests - #let doesn't re-render template", function (test) { testDoesntRerender(test, "LET"); }); Tinytest.add("spacebars-tests - template_tests - #each takes multiple arguments", function (test) { var tmpl = Template.spacebars_template_test_each_multiarg; tmpl.helpers({ arg: ['a', 'b', 'c'], helper: function (x) { return x; } }); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), "<div>a</div><div>b</div><div>c</div>"); }); Tinytest.add("spacebars-tests - template_tests - lexical scope doesn't leak", function (test) { // make sure '@index' doesn't leak into subtemplates var tmpl = Template.spacebars_template_test_lexical_leakage; tmpl.helpers({ list: ['a', 'b', 'c'] }); test.throws(function () { var div = renderToDiv(tmpl); }, /Unsupported directive/); }); // PR #5138 Tinytest.add("spacebars-tests - template_tests - multiple arguments in each-in", function (test) { var tmpl = Template.spacebars_template_test_each_in_multi_args; tmpl.helpers({ list: ['a', 'b', 'c'], helper: function (list) { return list.reverse(); } }); var div = renderToDiv(tmpl); test.equal(canonicalizeHtml(div.innerHTML), "<div>c</div><div>b</div><div>a</div>"); });
newVal.reverse(); rv.set(newVal);
worker_pool.go
package octopusdeploy import ( "github.com/go-playground/validator/v10" "github.com/go-playground/validator/v10/non-standard/validators" ) // WorkerPool is the embedded struct used for all worker pools. type WorkerPool struct { CanAddWorkers bool `json:"CanAddWorkers"` Description string `json:"Description,omitempty"` IsDefault bool `json:"IsDefault"` Name string `json:"Name" validate:"required,notblank"` SpaceID string `json:"SpaceId,omitempty" validate:"omitempty,notblank"` SortOrder int `json:"SortOrder"` resource } type WorkerPools struct { Items []IWorkerPool `json:"Items"` PagedResults } // newWorkerPool creates and initializes a worker pool resource. func newWorkerPool(name string) *WorkerPool
// GetName returns the name of the worker pool. func (w *WorkerPool) GetName() string { return w.Name } // SetName sets the name of the worker pool. func (w *WorkerPool) SetName(name string) { w.Name = name } // Validate checks the state of the worker pool and returns an error if // invalid. func (w *WorkerPool) Validate() error { v := validator.New() err := v.RegisterValidation("notblank", validators.NotBlank) if err != nil { return err } return v.Struct(w) } var _ IHasName = &WorkerPool{}
{ return &WorkerPool{ CanAddWorkers: false, Name: name, SortOrder: 0, resource: *newResource(), } }
workflow_notif_test.go
package v2_test import ( "context" "reflect" "testing" v2 "github.com/ovh/cds/sdk/exportentities/v2" "github.com/ovh/cds/sdk/exportentities" "github.com/ovh/cds/engine/api/test" "github.com/ovh/cds/sdk" "github.com/stretchr/testify/assert" yaml "gopkg.in/yaml.v2" ) func Test_checkWorkflowNotificationsValidity(t *testing.T) { type args struct { yaml string } tests := []struct { name string args args want error }{ { name: "test multiple notifications", want: nil, args: args{ yaml: `name: test1 version: v2.0 workflow: DDOS-me: pipeline: DDOS-me application: test1 payload: git.author: "" git.branch: master git.hash: "" git.hash.before: "" git.message: "" git.repository: bnjjj/godevoxx git.tag: "" DDOS-me_2: depends_on: - DDOS-me when: - success pipeline: DDOS-me metadata: default_tags: git.branch,git.author notifications: - type: jabber pipelines: - DDOS-me - DDOS-me_2 settings: on_success: always on_failure: change on_start: true send_to_groups: true send_to_author: false recipients: - q template: subject: '{{.cds.project}}/{{.cds.application}} {{.cds.pipeline}} {{.cds.environment}}#{{.cds.version}} {{.cds.status}}' body: |- Project : {{.cds.project}} Application : {{.cds.application}} Pipeline : {{.cds.pipeline}}/{{.cds.environment}}#{{.cds.version}} Status : {{.cds.status}} Details : {{.cds.buildURL}} Triggered by : {{.cds.triggered_by.username}} Branch : {{.git.branch}} - type: email pipelines: - DDOS-me_2 settings: template: subject: '{{.cds.project}}/{{.cds.application}} {{.cds.pipeline}} {{.cds.environment}}#{{.cds.version}} {{.cds.status}}' body: |- Project : {{.cds.project}} Application : {{.cds.application}} Pipeline : {{.cds.pipeline}}/{{.cds.environment}}#{{.cds.version}} Status : {{.cds.status}} Details : {{.cds.buildURL}} Triggered by : {{.cds.triggered_by.username}} Branch : {{.git.branch}} - type: vcs settings: template: disable_comment: true `, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { var w v2.Workflow test.NoError(t, yaml.Unmarshal([]byte(tt.args.yaml), &w)) if got := v2.CheckWorkflowNotificationsValidity(w); got != tt.want { t.Errorf("checkWorkflowNotificationsValidity() = %#v, want %v", got, tt.want) } }) } } func Test_processNotificationValues(t *testing.T) { type args struct { notif v2.NotificationEntry } tests := []struct { name string args args want sdk.WorkflowNotification wantErr bool }{ // TODO: Add test cases. } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := v2.ProcessNotificationValues(tt.args.notif) if (err != nil) != tt.wantErr { t.Errorf("processNotificationValues() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("processNotificationValues() = %v, want %v", got, tt.want) } }) } } func
(t *testing.T) { tests := []struct { name string yaml string wantErr bool }{ { name: "two pipelines with one notif", yaml: `name: test-notif-1 version: v2.0 workflow: test: pipeline: test test_2: depends_on: - test when: - success pipeline: test notifications: - type: jabber pipelines: - test - test_2 `, }, { name: "two pipelines with two notifs", yaml: `name: test-notif-1 version: v2.0 workflow: test: pipeline: test test_2: depends_on: - test when: - success pipeline: test notifications: - type: email pipelines: - test settings: on_success: always on_failure: change on_start: true send_to_groups: true send_to_author: false recipients: - a template: subject: '{{.cds.project}}/{{.cds.application}} {{.cds.pipeline}} {{.cds.environment}}#{{.cds.version}} {{.cds.status}}' body: |- Project : {{.cds.project}} Application : {{.cds.application}} Pipeline : {{.cds.pipeline}}/{{.cds.environment}}#{{.cds.version}} Status : {{.cds.status}} Details : {{.cds.buildURL}} Triggered by : {{.cds.triggered_by.username}} Branch : {{.git.branch}} - type: jabber pipelines: - test - test_2 settings: template: subject: '{{.cds.project}}/{{.cds.application}} {{.cds.pipeline}} {{.cds.environment}}#{{.cds.version}} {{.cds.status}}' body: |- Project : {{.cds.project}} Application : {{.cds.application}} Pipeline : {{.cds.pipeline}}/{{.cds.environment}}#{{.cds.version}} Status : {{.cds.status}} Details : {{.cds.buildURL}} Triggered by : {{.cds.triggered_by.username}} Branch : {{.git.branch}} `, }, { name: "two pipelines with one notif without node name", yaml: `name: test-notif-2-pipeline-no-node version: v2.0 workflow: test: pipeline: test test_2: depends_on: - test when: - success pipeline: test notifications: - type: jabber `, }, } for _, tst := range tests { t.Run(tst.name, func(t *testing.T) { yamlWorkflow, err := exportentities.UnmarshalWorkflow([]byte(tst.yaml), exportentities.FormatYAML) if err != nil { if !tst.wantErr { t.Error("Unmarshal raised an error", err) return } } if tst.wantErr { t.Error("Unmarshal should return an error but it doesn't") return } w, err := exportentities.ParseWorkflow(yamlWorkflow) if err != nil { if !tst.wantErr { t.Error("GetWorkflow raised an error", err) return } } if tst.wantErr { t.Error("GetWorkflow should return an error but it doesn't") return } // Set the hook and outgoing hook models properly before export all the things w.VisitNode(func(n *sdk.Node, w *sdk.Workflow) { for i := range n.Hooks { for _, m := range sdk.BuiltinHookModels { if n.Hooks[i].HookModelName == m.Name { break } } } if n.OutGoingHookContext != nil { for _, m := range sdk.BuiltinOutgoingHookModels { if n.OutGoingHookContext.HookModelName == m.Name { n.OutGoingHookContext.HookModelID = m.ID break } } } }) exportedWorkflow, err := exportentities.NewWorkflow(context.TODO(), *w) if err != nil { if !tst.wantErr { t.Error("NewWorkflow raised an error", err) return } } if tst.wantErr { t.Error("NewWorkflow should return an error but it doesn't") return } b, err := yaml.Marshal(exportedWorkflow) if err != nil { if !tst.wantErr { t.Error("Marshal raised an error", err) return } } if tst.wantErr { t.Error("Marshal should return an error but it doesn't") return } assert.Equal(t, tst.yaml, string(b)) }) } }
TestFromYAMLToYAMLWithNotif