file_name
stringlengths
3
137
prefix
stringlengths
0
918k
suffix
stringlengths
0
962k
middle
stringlengths
0
812k
64-web-servers.go
package main import ( "fmt" "log" "net/http" ) type Hello struct {} func (h Hello) ServeHTTP( w http.ResponseWriter, r *http.Request) { fmt.Fprint(w, "Hello!") } func
() { var h Hello err := http.ListenAndServe("localhost:4000", h) if err != nil { log.Fatal(err) } }
main
csrverifier.go
// Package csrverifier defines an interface for CSR verification. package csrverifier import ( "crypto/x509" "errors" "github.com/fdurand/scep/scep" scepserver "github.com/fdurand/scep/server" ) // CSRVerifier verifies the raw decrypted CSR. type CSRVerifier interface { Verify(*scep.CSRReqMessage) (bool, error) } // Middleware wraps next in a CSRSigner that runs verifier func Middleware(verifier CSRVerifier, next scepserver.CSRSigner) scepserver.CSRSignerFunc { return func(m *scep.CSRReqMessage) (*x509.Certificate, error) { ok, err := verifier.Verify(m) if err != nil { return nil, err } if !ok
return next.SignCSR(m) } }
{ return nil, errors.New("CSR verify failed") }
single_stepping.py
from debugprov.navgiation_strategy import NavigationStrategy from debugprov.node import Node from debugprov.validity import Validity class SingleStepping(NavigationStrategy):
def recursive_navigate(self, current_node: Node): if self.there_are_nodes_with_unknown_validity(): if current_node.has_childrens(): for c in current_node.childrens: self.recursive_navigate(c) self.evaluate(current_node)
def navigate(self): self.recursive_navigate(self.exec_tree.root_node) self.finish_navigation() return self.exec_tree
model_test.go
package model import ( "strings" "testing" "github.com/gobuffalo/attrs" "github.com/gobuffalo/genny/gentest" "github.com/gobuffalo/gogen" packr "github.com/gobuffalo/packr/v2" "github.com/stretchr/testify/require" ) func clean(s string) string { s = strings.TrimSpace(s) s = strings.Replace(s, "\r\n", "\n", -1) s = strings.Replace(s, "\r", "\n", -1) s = strings.Replace(s, "\t", "", -1) return s } func Test_New(t *testing.T) { r := require.New(t) g, err := New(&Options{ Name: "widget", }) r.NoError(err) run := gentest.NewRunner() run.With(g) r.NoError(run.Run()) res := run.Results() r.Len(res.Commands, 0) r.Len(res.Files, 2) r.NoError(gentest.CompareFiles([]string{"models/widget.go", "models/widget_test.go"}, res.Files)) } func Test_New_Standard(t *testing.T) { r := require.New(t) ats, err := attrs.ParseArgs("id:uuid", "created_at:timestamp", "updated_at:timestamp", "name", "description:text", "age:int", "bar:nulls.String") r.NoError(err) g, err := New(&Options{ Name: "widget", Attrs: ats, }) r.NoError(err) run := gentest.NewRunner() run.With(g) r.NoError(run.Run()) res := run.Results() r.Len(res.Commands, 0) r.NoError(gentest.CompareFiles([]string{"models/widget.go", "models/widget_test.go"}, res.Files)) box := packr.New("Test_New_Standard", "../model/_fixtures") f, err := res.Find("models/widget_test.go") r.NoError(err) bf, err := box.FindString(f.Name()) r.NoError(err) r.Equal(bf, f.String()) f, err = res.Find("models/widget.go") tf := gogen.FmtTransformer() f, err = tf.Transform(f) r.NoError(err) bf, err = box.FindString(f.Name()) r.NoError(err) r.Equal(clean(bf), clean(f.String())) } func Test_New_No_Attrs(t *testing.T) { r := require.New(t) g, err := New(&Options{ Name: "widget", }) r.NoError(err) run := gentest.NewRunner() run.With(g) r.NoError(run.Run()) res := run.Results() f, err := res.Find("models/widget.go") r.NoError(err) tf := gogen.FmtTransformer() f, err = tf.Transform(f) r.NoError(err) box := packr.New("Test_New_No_Attrs", "../model/_fixtures") bf, err := box.FindString("models/widget_empty.go") r.NoError(err) r.Equal(clean(bf), clean(f.String())) } func Test_New_XML(t *testing.T) { r := require.New(t) ats, err := attrs.ParseArgs("id:uuid", "created_at:timestamp", "updated_at:timestamp", "name", "description:text", "age:int", "bar:nulls.String") r.NoError(err) g, err := New(&Options{ Name: "widget", Encoding: "xml", Attrs: ats, }) r.NoError(err) run := gentest.NewRunner() run.With(g) r.NoError(run.Run()) res := run.Results() r.Len(res.Commands, 0) r.NoError(gentest.CompareFiles([]string{"models/widget.go", "models/widget_test.go"}, res.Files)) box := packr.New("Test_New_XML", "../model/_fixtures") f, err := res.Find("models/widget_test.go") r.NoError(err) bf, err := box.FindString(f.Name()) r.NoError(err) r.Equal(bf, f.String()) f, err = res.Find("models/widget.go") tf := gogen.FmtTransformer() f, err = tf.Transform(f) r.NoError(err) bf, err = box.FindString("models/widget_xml.go") r.NoError(err) r.Equal(clean(bf), clean(f.String())) } func
(t *testing.T) { r := require.New(t) ats, err := attrs.ParseArgs("id:uuid", "created_at:timestamp", "updated_at:timestamp", "name", "description:text", "age:int", "bar:nulls.String") r.NoError(err) g, err := New(&Options{ Name: "widget", Encoding: "jsonapi", Attrs: ats, }) r.NoError(err) run := gentest.NewRunner() run.With(g) r.NoError(run.Run()) res := run.Results() r.Len(res.Commands, 0) r.NoError(gentest.CompareFiles([]string{"models/widget.go", "models/widget_test.go"}, res.Files)) box := packr.New("Test_New_JSONAPI", "../model/_fixtures") f, err := res.Find("models/widget_test.go") r.NoError(err) bf, err := box.FindString(f.Name()) r.NoError(err) r.Equal(bf, f.String()) f, err = res.Find("models/widget.go") tf := gogen.FmtTransformer() f, err = tf.Transform(f) r.NoError(err) bf, err = box.FindString("models/widget_jsonapi.go") r.NoError(err) r.Equal(clean(bf), clean(f.String())) } func Test_New_Package(t *testing.T) { r := require.New(t) g, err := New(&Options{ Name: "widget", Path: "models/admin", }) r.NoError(err) run := gentest.NewRunner() run.With(g) r.NoError(run.Run()) res := run.Results() r.Len(res.Commands, 0) r.Len(res.Files, 2) f, err := res.Find("models/admin/widget.go") r.NoError(err) r.Contains(f.String(), "package admin") }
Test_New_JSONAPI
transaction_comment_create.py
# coding: utf-8 import pprint import six from enum import Enum from . import AbstractTransactionCommentActive class TransactionCommentCreate(AbstractTransactionCommentActive): swagger_types = { 'transaction': 'int', } attribute_map = { 'transaction': 'transaction', } _transaction = None def __init__(self, **kwargs): self.discriminator = None self.transaction = kwargs.get('transaction') super().__init__(**kwargs) self.swagger_types.update(super().swagger_types) self.attribute_map.update(super().attribute_map) @property def transaction(self): """Gets the transaction of this TransactionCommentCreate. :return: The transaction of this TransactionCommentCreate. :rtype: int """ return self._transaction @transaction.setter def transaction(self, transaction): """Sets the transaction of this TransactionCommentCreate. :param transaction: The transaction of this TransactionCommentCreate. :type: int """ if transaction is None: raise ValueError("Invalid value for `transaction`, must not be `None`") self._transaction = transaction def to_dict(self): result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"):
elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) elif isinstance(value, Enum): result[attr] = value.value else: result[attr] = value if issubclass(TransactionCommentCreate, dict): for key, value in self.items(): result[key] = value return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other): if not isinstance(other, TransactionCommentCreate): return False return self.__dict__ == other.__dict__ def __ne__(self, other): return not self == other
result[attr] = value.to_dict()
test.py
''' a = [[1 for j in range(5)], [2 for j in range(5)], [3 for j in range(5)], [4 for j in range(5)], [5 for j in range(5)], [6 for j in range(5)]] for i in range(6): for j in range(5): print(a[i][j], end='') print() print() x, y = 0, 0 while True: if x == 6: print() x = 0 y += 1 print(a[x][y] ,end='') x += 1 if x==6 and y==4: break
''' a = 1 a = b print(b) for i in range(5): if a == 1: print('1') pass elif a == 1: print('2') else: print('x') ''' ''' a, b,c,d = 0, 0 ,0 ,1 for i in range(4): c, d, a, b = b, c, d, a print(a,b,c,d) '''
'''
metrics.go
// Copyright 2015 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package rafthttp import ( "time" "github.com/coreos/etcd-starter/Godeps/_workspace/src/github.com/coreos/etcd/pkg/types" "github.com/coreos/etcd-starter/Godeps/_workspace/src/github.com/coreos/etcd/raft/raftpb" "github.com/coreos/etcd-starter/Godeps/_workspace/src/github.com/prometheus/client_golang/prometheus" ) var ( msgSentDuration = prometheus.NewSummaryVec( prometheus.SummaryOpts{ Name: "rafthttp_message_sent_latency_microseconds", Help: "message sent latency distributions.", }, []string{"channel", "remoteID", "msgType"}, ) msgSentFailed = prometheus.NewCounterVec(prometheus.CounterOpts{ Name: "rafthttp_message_sent_failed_total", Help: "The total number of failed messages sent.", }, []string{"channel", "remoteID", "msgType"}, ) ) func init()
func reportSentDuration(channel string, m raftpb.Message, duration time.Duration) { typ := m.Type.String() if isLinkHeartbeatMessage(m) { typ = "MsgLinkHeartbeat" } msgSentDuration.WithLabelValues(channel, types.ID(m.To).String(), typ).Observe(float64(duration.Nanoseconds() / int64(time.Microsecond))) } func reportSentFailure(channel string, m raftpb.Message) { typ := m.Type.String() if isLinkHeartbeatMessage(m) { typ = "MsgLinkHeartbeat" } msgSentFailed.WithLabelValues(channel, types.ID(m.To).String(), typ).Inc() }
{ prometheus.MustRegister(msgSentDuration) prometheus.MustRegister(msgSentFailed) }
CheckOrders.js
export const GET_TYPPES = "GET_TYPPES" export const CHANGE_TYPES = "CHANGE_TYPES" import {api} from "./AY_API" export function getTypes(){ return (dispatch)=>{
dispatch({ type:GET_TYPPES, types:e.types }); }); } } export function changeTypes(item_i,ele_i){ return (dispatch,getState)=>{ let types = getState().CheckOrders.types; types[item_i]["options"][ele_i].checked = types[item_i]["options"][ele_i].checked ? false:true; let args = { "types":JSON.stringify(types) }; api("ebs.setting.saveset",args,function(e){ console.log(e); }); } }
api("ebs.setting.getcheckset","",function(e){
users_permissions_read_responses.go
// Code generated by go-swagger; DO NOT EDIT. // Copyright 2020 The go-netbox Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package users // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "fmt" "io" "github.com/go-openapi/runtime" "github.com/go-openapi/strfmt" "github.com/nzions/not-netbox/netbox/models" ) // UsersPermissionsReadReader is a Reader for the UsersPermissionsRead structure. type UsersPermissionsReadReader struct { formats strfmt.Registry } // ReadResponse reads a server response into the received o. func (o *UsersPermissionsReadReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { case 200: result := NewUsersPermissionsReadOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil default: return nil, runtime.NewAPIError("response status code does not match any response statuses defined for this endpoint in the swagger spec", response, response.Code()) } } // NewUsersPermissionsReadOK creates a UsersPermissionsReadOK with default headers values func
() *UsersPermissionsReadOK { return &UsersPermissionsReadOK{} } /*UsersPermissionsReadOK handles this case with default header values. UsersPermissionsReadOK users permissions read o k */ type UsersPermissionsReadOK struct { Payload *models.ObjectPermission } func (o *UsersPermissionsReadOK) Error() string { return fmt.Sprintf("[GET /users/permissions/{id}/][%d] usersPermissionsReadOK %+v", 200, o.Payload) } func (o *UsersPermissionsReadOK) GetPayload() *models.ObjectPermission { return o.Payload } func (o *UsersPermissionsReadOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { o.Payload = new(models.ObjectPermission) // response payload if err := consumer.Consume(response.Body(), o.Payload); err != nil && err != io.EOF { return err } return nil }
NewUsersPermissionsReadOK
unregister.js
const { createHash } = require('crypto') let handler = async function (m, { args }) { if (!args[0]) throw '*You must enter your serial number to unregister! 😿*' let name = conn.getName(m.sender) let user = global.db.data.users[m.sender] let sn = createHash('md5').update(m.sender).digest('hex') if (args[0] !== sn) throw '*Incorrect Serial Number 😼*\n*You must enter correct serial number! 😼*' user.registered = false m.reply(`😽 Successfully Unregistered From Bot Database! RIP ${name} 😼`) } handler.command = /^unreg(ister)?$/i handler.register = true
module.exports = handler
mealbox-detail.page.ts
import { Component, OnInit } from '@angular/core'; import { NavController } from '@ionic/angular'; import { PhotoViewer } from '@ionic-native/photo-viewer/ngx'; import { CartMealboxService } from '../service/cart-mealbox.service'; @Component({ selector: 'app-mealbox-detail', templateUrl: './mealbox-detail.page.html', styleUrls: ['./mealbox-detail.page.scss'], }) export class MealboxDetailPage implements OnInit { item: any = {}; show: boolean = true; constructor( private cartService: CartMealboxService, private navCtrl: NavController, private photoViewer: PhotoViewer ) { } ngOnInit() { } ionViewWillEnter() { this.item = JSON.parse(localStorage.getItem("passing"));
addProduct() { this.cartService.addItem(this.item); this.navCtrl.back(); } imageClick() { this.photoViewer.show(this.item.image); } showMore(){ this.show = false; } }
console.log(this.item); }
conftest.py
from unittest.mock import patch import os from chapter10 import C10 import pytest TESTDIR = os.path.join(os.path.dirname(__file__), 'tests') def pytest_configure(): pytest.SAMPLE = os.path.join(TESTDIR, '1.c10') pytest.EVENTS = os.path.join(TESTDIR, 'event.c10') pytest.ETHERNET = os.path.join(TESTDIR, 'ethernet.c10') pytest.ERR = os.path.join(TESTDIR, 'err.c10') pytest.BAD = os.path.join(TESTDIR, 'bad.c10') pytest.PCAP = os.path.join(TESTDIR, 'test.pcap') pytest.TMATS = os.path.join(TESTDIR, 'test.tmt') class MockC10(C10): def
(self, packets): self.packets = packets def __iter__(self): return iter(self.packets) @pytest.fixture def c10(): return MockC10 @pytest.fixture(scope='session') def fake_progress(): with patch('c10_tools.common.FileProgress'): yield
__init__
fake_globaltrafficpolicy.go
/* Copyright The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by client-gen. DO NOT EDIT. package fake import ( admiralv1 "github.com/istio-ecosystem/admiral/admiral/pkg/apis/admiral/v1" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" labels "k8s.io/apimachinery/pkg/labels" schema "k8s.io/apimachinery/pkg/runtime/schema" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" testing "k8s.io/client-go/testing" ) // FakeGlobalTrafficPolicies implements GlobalTrafficPolicyInterface type FakeGlobalTrafficPolicies struct { Fake *FakeAdmiralV1 ns string } var globaltrafficpoliciesResource = schema.GroupVersionResource{Group: "admiral.io", Version: "v1", Resource: "globaltrafficpolicies"} var globaltrafficpoliciesKind = schema.GroupVersionKind{Group: "admiral.io", Version: "v1", Kind: "GlobalTrafficPolicy"} // Get takes name of the globalTrafficPolicy, and returns the corresponding globalTrafficPolicy object, and an error if there is any. func (c *FakeGlobalTrafficPolicies) Get(name string, options v1.GetOptions) (result *admiralv1.GlobalTrafficPolicy, err error) { obj, err := c.Fake. Invokes(testing.NewGetAction(globaltrafficpoliciesResource, c.ns, name), &admiralv1.GlobalTrafficPolicy{}) if obj == nil { return nil, err } return obj.(*admiralv1.GlobalTrafficPolicy), err } // List takes label and field selectors, and returns the list of GlobalTrafficPolicies that match those selectors. func (c *FakeGlobalTrafficPolicies) List(opts v1.ListOptions) (result *admiralv1.GlobalTrafficPolicyList, err error) { obj, err := c.Fake. Invokes(testing.NewListAction(globaltrafficpoliciesResource, globaltrafficpoliciesKind, c.ns, opts), &admiralv1.GlobalTrafficPolicyList{}) if obj == nil { return nil, err } label, _, _ := testing.ExtractFromListOptions(opts) if label == nil { label = labels.Everything() } list := &admiralv1.GlobalTrafficPolicyList{ListMeta: obj.(*admiralv1.GlobalTrafficPolicyList).ListMeta} for _, item := range obj.(*admiralv1.GlobalTrafficPolicyList).Items { if label.Matches(labels.Set(item.Labels)) { list.Items = append(list.Items, item) } } return list, err } // Watch returns a watch.Interface that watches the requested globalTrafficPolicies. func (c *FakeGlobalTrafficPolicies) Watch(opts v1.ListOptions) (watch.Interface, error) { return c.Fake. InvokesWatch(testing.NewWatchAction(globaltrafficpoliciesResource, c.ns, opts)) } // Create takes the representation of a globalTrafficPolicy and creates it. Returns the server's representation of the globalTrafficPolicy, and an error, if there is any. func (c *FakeGlobalTrafficPolicies) Create(globalTrafficPolicy *admiralv1.GlobalTrafficPolicy) (result *admiralv1.GlobalTrafficPolicy, err error) { obj, err := c.Fake. Invokes(testing.NewCreateAction(globaltrafficpoliciesResource, c.ns, globalTrafficPolicy), &admiralv1.GlobalTrafficPolicy{}) if obj == nil { return nil, err } return obj.(*admiralv1.GlobalTrafficPolicy), err } // Update takes the representation of a globalTrafficPolicy and updates it. Returns the server's representation of the globalTrafficPolicy, and an error, if there is any. func (c *FakeGlobalTrafficPolicies) Update(globalTrafficPolicy *admiralv1.GlobalTrafficPolicy) (result *admiralv1.GlobalTrafficPolicy, err error) { obj, err := c.Fake. Invokes(testing.NewUpdateAction(globaltrafficpoliciesResource, c.ns, globalTrafficPolicy), &admiralv1.GlobalTrafficPolicy{}) if obj == nil { return nil, err } return obj.(*admiralv1.GlobalTrafficPolicy), err } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). func (c *FakeGlobalTrafficPolicies) UpdateStatus(globalTrafficPolicy *admiralv1.GlobalTrafficPolicy) (*admiralv1.GlobalTrafficPolicy, error) { obj, err := c.Fake. Invokes(testing.NewUpdateSubresourceAction(globaltrafficpoliciesResource, "status", c.ns, globalTrafficPolicy), &admiralv1.GlobalTrafficPolicy{}) if obj == nil { return nil, err } return obj.(*admiralv1.GlobalTrafficPolicy), err } // Delete takes name of the globalTrafficPolicy and deletes it. Returns an error if one occurs. func (c *FakeGlobalTrafficPolicies) Delete(name string, options *v1.DeleteOptions) error { _, err := c.Fake. Invokes(testing.NewDeleteAction(globaltrafficpoliciesResource, c.ns, name), &admiralv1.GlobalTrafficPolicy{}) return err }
action := testing.NewDeleteCollectionAction(globaltrafficpoliciesResource, c.ns, listOptions) _, err := c.Fake.Invokes(action, &admiralv1.GlobalTrafficPolicyList{}) return err } // Patch applies the patch and returns the patched globalTrafficPolicy. func (c *FakeGlobalTrafficPolicies) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *admiralv1.GlobalTrafficPolicy, err error) { obj, err := c.Fake. Invokes(testing.NewPatchSubresourceAction(globaltrafficpoliciesResource, c.ns, name, pt, data, subresources...), &admiralv1.GlobalTrafficPolicy{}) if obj == nil { return nil, err } return obj.(*admiralv1.GlobalTrafficPolicy), err }
// DeleteCollection deletes a collection of objects. func (c *FakeGlobalTrafficPolicies) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {
version.go
/* Copyright 2018 The Knative Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cmd import ( "fmt" "github.com/cppforlife/go-cli-ui/ui" "github.com/spf13/cobra" ) const ( Version = "0.0.9-dev.1" ) type VersionOptions struct { ui ui.UI } func NewVersionOptions(ui ui.UI) *VersionOptions { return &VersionOptions{ui} } func NewVersionCmd(o *VersionOptions, flagsFactory FlagsFactory) *cobra.Command { cmd := &cobra.Command{ Use: "version", Short: "Print client version", Annotations: map[string]string{ systemGroup.Key: systemGroup.Value, }, RunE: func(_ *cobra.Command, _ []string) error { return o.Run() }, } return cmd } func (o *VersionOptions) Run() error {
o.ui.PrintBlock([]byte(fmt.Sprintf("Client Version: %s\n", Version))) return nil }
listItemIconClasses.js
import { generateUtilityClass, generateUtilityClasses } from '@mui/base'; export function getListItemIconUtilityClass(slot) { return generateUtilityClass('MuiListItemIcon', slot);
const listItemIconClasses = generateUtilityClasses('MuiListItemIcon', ['root', 'alignItemsFlexStart']); export default listItemIconClasses;
}
1196B-4.py
# -*- coding: utf-8 -*- #!/usr/bin/python false = False true = True null = None # import math TEST = false try: import sys for arg in sys.argv: if(arg == 'test'): print('test mode') TEST = True pass except: pass def AddImports(libraryNames):
# libnames = ['fileinput', 'codecs', 'operator', 'functools', 'math', # 'io', 'platform', 'collections', 'mmap', 'logging', 'logging.handlers'] libnames = ['functools', 'math', 'collections'] # libnames = ['math'] AddImports(libnames) IntellisenseHint = False if IntellisenseHint: import functools import math import collections # import mmap # import logging # import logging.handlers # import defs # class memoized(object, ): # "Decorator. Caches a function's return value each time it is called.\n\tIf called later with the same arguments, the cached value is returned\n\t(not reevaluated).\n\t" # def __init__(self, func): # self.func = func # self.cache = {} # def __call__(self, *args): # if (not isinstance(args, collections.Hashable)): # return self.func(*args) # if (args in self.cache): # return self.cache[args] # else: # value = self.func(*args) # self.cache[args] = value # return value # def __repr__(self): # "Return the function's docstring." # return self.func.__doc__ # def __get__(self, obj, objtype): # 'Support instance methods.' # return functools.partial(self.__call__, obj) def it(args, *arg): if(TEST): print(args, *arg) # print(args, vargs) def floatEqual(a, b): diff = math.fabs(a-b) if(diff < 1e-10): return True else: return diff <= 1e-8 * max(math.fabs(a), math.fabs(b)) def ria(): return list(map(int, input().strip(' ').split(' '))) def solve(): q = ria()[0] for i in range(q): it('stepstepstepstepstep') [n,k] = ria() # it(n,k) arr = ria() odds = [] cand = [] ans = '' for j in range(len(arr)): if(arr[j] % 2 == 1): odds.append(j) if(len(cand)<k-1): cand.append(j+1) ans = ans + str(j+1) + ' ' pass cand.append(n) ans = ans + str(n) if(k <= len(odds) and (len(odds)-k) % 2 == 0): print('YES') # print(' '.join(map(str, cand))) print(ans) else: print('NO') pass pass pass solve()
for libname in libraryNames: if (type(libname) == type(tuple())): short = libname[1] libname = libname[0] else: short = None try: lib = __import__(libname) except ImportError: pass else: if short: globals()[short] = lib else: globals()[libname] = lib return True
lime_tabular.py
""" Functions for explaining classifiers that use tabular data (matrices). """ import collections import copy from functools import partial import json import warnings import numpy as np import sklearn import sklearn.preprocessing from sklearn.utils import check_random_state from lime.discretize import QuartileDiscretizer from lime.discretize import DecileDiscretizer from lime.discretize import EntropyDiscretizer from lime.discretize import BaseDiscretizer from . import explanation from . import lime_base class TableDomainMapper(explanation.DomainMapper): """Maps feature ids to names, generates table views, etc""" def __init__(self, feature_names, feature_values, scaled_row, categorical_features, discretized_feature_names=None): """Init. Args: feature_names: list of feature names, in order feature_values: list of strings with the values of the original row scaled_row: scaled row categorical_features: list of categorical features ids (ints) """ self.exp_feature_names = feature_names self.discretized_feature_names = discretized_feature_names self.feature_names = feature_names self.feature_values = feature_values self.scaled_row = scaled_row self.all_categorical = len(categorical_features) == len(scaled_row) self.categorical_features = categorical_features def map_exp_ids(self, exp): """Maps ids to feature names. Args: exp: list of tuples [(id, weight), (id,weight)] Returns: list of tuples (feature_name, weight) """ names = self.exp_feature_names if self.discretized_feature_names is not None: names = self.discretized_feature_names return [(names[x[0]], x[1]) for x in exp] def visualize_instance_html(self, exp, label, div_name, exp_object_name, show_table=True, show_all=False): """Shows the current example in a table format. Args: exp: list of tuples [(id, weight), (id,weight)] label: label id (integer) div_name: name of div object to be used for rendering(in js) exp_object_name: name of js explanation object show_table: if False, don't show table visualization. show_all: if True, show zero-weighted features in the table. """ if not show_table: return '' weights = [0] * len(self.feature_names) for x in exp: weights[x[0]] = x[1] out_list = list(zip(self.exp_feature_names, self.feature_values, weights)) if not show_all: out_list = [out_list[x[0]] for x in exp] ret = u''' %s.show_raw_tabular(%s, %d, %s); ''' % (exp_object_name, json.dumps(out_list, ensure_ascii=False), label, div_name) return ret class LimeTabularExplainer(object): """Explains predictions on tabular (i.e. matrix) data. For numerical features, perturb them by sampling from a Normal(0,1) and doing the inverse operation of mean-centering and scaling, according to the means and stds in the training data. For categorical features, perturb by sampling according to the training distribution, and making a binary feature that is 1 when the value is the same as the instance being explained.""" def __init__(self, training_data, mode="classification", training_labels=None, feature_names=None, categorical_features=None, categorical_names=None, kernel_width=None, kernel=None, verbose=False, class_names=None, feature_selection='auto', discretize_continuous=True, discretizer='quartile', sample_around_instance=False, random_state=None): """Init function. Args: training_data: numpy 2d array mode: "classification" or "regression" training_labels: labels for training data. Not required, but may be used by discretizer. feature_names: list of names (strings) corresponding to the columns in the training data. categorical_features: list of indices (ints) corresponding to the categorical columns. Everything else will be considered continuous. Values in these columns MUST be integers. categorical_names: map from int to list of names, where categorical_names[x][y] represents the name of the yth value of column x. kernel_width: kernel width for the exponential kernel. If None, defaults to sqrt (number of columns) * 0.75 kernel: similarity kernel that takes euclidean distances and kernel width as input and outputs weights in (0,1). If None, defaults to an exponential kernel. verbose: if true, print local prediction values from linear model class_names: list of class names, ordered according to whatever the classifier is using. If not present, class names will be '0', '1', ... feature_selection: feature selection method. can be 'forward_selection', 'lasso_path', 'none' or 'auto'. See function 'explain_instance_with_data' in lime_base.py for details on what each of the options does. discretize_continuous: if True, all non-categorical features will be discretized into quartiles. discretizer: only matters if discretize_continuous is True. Options are 'quartile', 'decile', 'entropy' or a BaseDiscretizer instance. sample_around_instance: if True, will sample continuous features in perturbed samples from a normal centered at the instance being explained. Otherwise, the normal is centered on the mean of the feature data. random_state: an integer or numpy.RandomState that will be used to generate random numbers. If None, the random state will be initialized using the internal numpy seed. """ self.random_state = check_random_state(random_state) self.mode = mode self.categorical_names = categorical_names or {} self.sample_around_instance = sample_around_instance if categorical_features is None: categorical_features = [] if feature_names is None: feature_names = [str(i) for i in range(training_data.shape[1])] self.categorical_features = list(categorical_features) self.feature_names = list(feature_names) self.discretizer = None if discretize_continuous: if discretizer == 'quartile': self.discretizer = QuartileDiscretizer( training_data, self.categorical_features, self.feature_names, labels=training_labels) elif discretizer == 'decile': self.discretizer = DecileDiscretizer( training_data, self.categorical_features, self.feature_names, labels=training_labels) elif discretizer == 'entropy': self.discretizer = EntropyDiscretizer( training_data, self.categorical_features, self.feature_names, labels=training_labels) elif isinstance(discretizer, BaseDiscretizer): self.discretizer = discretizer else: raise ValueError('''Discretizer must be 'quartile',''' + ''' 'decile', 'entropy' or a''' + ''' BaseDiscretizer instance''') self.categorical_features = list(range(training_data.shape[1])) discretized_training_data = self.discretizer.discretize( training_data) if kernel_width is None: kernel_width = np.sqrt(training_data.shape[1]) * .75 kernel_width = float(kernel_width) if kernel is None: def kernel(d, kernel_width): return np.sqrt(np.exp(-(d ** 2) / kernel_width ** 2)) kernel_fn = partial(kernel, kernel_width=kernel_width) self.feature_selection = feature_selection self.base = lime_base.LimeBase(kernel_fn, verbose, random_state=self.random_state) self.scaler = None self.class_names = class_names self.scaler = sklearn.preprocessing.StandardScaler(with_mean=False) self.scaler.fit(training_data) self.feature_values = {} self.feature_frequencies = {} for feature in self.categorical_features: if self.discretizer is not None: column = discretized_training_data[:, feature] else: column = training_data[:, feature] feature_count = collections.Counter(column) values, frequencies = map(list, zip(*(sorted(feature_count.items())))) self.feature_values[feature] = values self.feature_frequencies[feature] = (np.array(frequencies) / float(sum(frequencies))) self.scaler.mean_[feature] = 0 self.scaler.scale_[feature] = 1 @staticmethod def convert_and_round(values): return ['%.2f' % v for v in values] def explain_instance(self, data_row, predict_fn, labels=(1,), top_labels=None, num_features=10, num_samples=5000, distance_metric='euclidean', model_regressor=None): """Generates explanations for a prediction. First, we generate neighborhood data by randomly perturbing features from the instance (see __data_inverse). We then learn locally weighted linear models on this neighborhood data to explain each of the classes in an interpretable way (see lime_base.py). Args: data_row: 1d numpy array, corresponding to a row predict_fn: prediction function. For classifiers, this should be a function that takes a numpy array and outputs prediction probabilities. For regressors, this takes a numpy array and returns the predictions. For ScikitClassifiers, this is `classifier.predict_proba()`. For ScikitRegressors, this is `regressor.predict()`. The prediction function needs to work on multiple feature vectors (the vectors randomly perturbed from the data_row). labels: iterable with labels to be explained. top_labels: if not None, ignore labels and produce explanations for the K labels with highest prediction probabilities, where K is this parameter. num_features: maximum number of features present in explanation num_samples: size of the neighborhood to learn the linear model distance_metric: the distance metric to use for weights. model_regressor: sklearn regressor to use in explanation. Defaults to Ridge regression in LimeBase. Must have model_regressor.coef_ and 'sample_weight' as a parameter to model_regressor.fit() Returns: An Explanation object (see explanation.py) with the corresponding explanations. """ data, inverse = self.__data_inverse(data_row, num_samples) scaled_data = (data - self.scaler.mean_) / self.scaler.scale_ distances = sklearn.metrics.pairwise_distances( scaled_data, scaled_data[0].reshape(1, -1), metric=distance_metric ).ravel() yss = predict_fn(inverse) # for classification, the model needs to provide a list of tuples - classes # along with prediction probabilities if self.mode == "classification": if len(yss.shape) == 1: raise NotImplementedError("LIME does not currently support " "classifier models without probability " "scores. If this conflicts with your " "use case, please let us know: " "https://github.com/datascienceinc/lime/issues/16") elif len(yss.shape) == 2: if self.class_names is None: self.class_names = [str(x) for x in range(yss[0].shape[0])] else: self.class_names = list(self.class_names) if not np.allclose(yss.sum(axis=1), 1.0): warnings.warn(""" Prediction probabilties do not sum to 1, and thus does not constitute a probability space. Check that you classifier outputs probabilities (Not log probabilities, or actual class predictions). """) else: raise ValueError("Your model outputs " "arrays with {} dimensions".format(len(yss.shape))) # for regression, the output should be a one-dimensional array of predictions else: try: assert isinstance(yss, np.ndarray) and len(yss.shape) == 1 except AssertionError: raise ValueError("Your model needs to output single-dimensional \ numpyarrays, not arrays of {} dimensions".format(yss.shape)) predicted_value = yss[0] min_y = min(yss) max_y = max(yss) # add a dimension to be compatible with downstream machinery yss = yss[:, np.newaxis] feature_names = copy.deepcopy(self.feature_names) if feature_names is None: feature_names = [str(x) for x in range(data_row.shape[0])] values = self.convert_and_round(data_row) for i in self.categorical_features: if self.discretizer is not None and i in self.discretizer.lambdas: continue name = int(data_row[i]) if i in self.categorical_names: name = self.categorical_names[i][name] feature_names[i] = '%s=%s' % (feature_names[i], name) values[i] = 'True' categorical_features = self.categorical_features discretized_feature_names = None if self.discretizer is not None: categorical_features = range(data.shape[1]) discretized_instance = self.discretizer.discretize(data_row) discretized_feature_names = copy.deepcopy(feature_names) for f in self.discretizer.names: discretized_feature_names[f] = self.discretizer.names[f][int( discretized_instance[f])] domain_mapper = TableDomainMapper(feature_names, values, scaled_data[0], categorical_features=categorical_features, discretized_feature_names=discretized_feature_names) ret_exp = explanation.Explanation(domain_mapper, mode=self.mode, class_names=self.class_names) ret_exp.scaled_data = scaled_data if self.mode == "classification": ret_exp.predict_proba = yss[0] if top_labels: labels = np.argsort(yss[0])[-top_labels:] ret_exp.top_labels = list(labels) ret_exp.top_labels.reverse() else: ret_exp.predicted_value = predicted_value ret_exp.min_value = min_y ret_exp.max_value = max_y labels = [0] for label in labels: (ret_exp.intercept[label], ret_exp.local_exp[label], ret_exp.score, ret_exp.local_pred) = self.base.explain_instance_with_data( scaled_data, yss, distances, label, num_features, model_regressor=model_regressor, feature_selection=self.feature_selection) if self.mode == "regression": ret_exp.intercept[1] = ret_exp.intercept[0] ret_exp.local_exp[1] = [x for x in ret_exp.local_exp[0]] ret_exp.local_exp[0] = [(i, -1 * j) for i, j in ret_exp.local_exp[1]] return ret_exp def __data_inverse(self, data_row, num_samples): """Generates a neighborhood around a prediction. For numerical features, perturb them by sampling from a Normal(0,1) and doing the inverse operation of mean-centering and scaling, according to the means and stds in the training data. For categorical features, perturb by sampling according to the training distribution, and making a binary feature that is 1 when the value is the same as the instance being explained. Args: data_row: 1d numpy array, corresponding to a row num_samples: size of the neighborhood to learn the linear model Returns: A tuple (data, inverse), where: data: dense num_samples * K matrix, where categorical features are encoded with either 0 (not equal to the corresponding value in data_row) or 1. The first row is the original instance. inverse: same as data, except the categorical features are not binary, but categorical (as the original data) """ data = np.zeros((num_samples, data_row.shape[0])) categorical_features = range(data_row.shape[0]) if self.discretizer is None: data = self.random_state.normal( 0, 1, num_samples * data_row.shape[0]).reshape( num_samples, data_row.shape[0]) if self.sample_around_instance: data = data * self.scaler.scale_ + data_row else: data = data * self.scaler.scale_ + self.scaler.mean_ categorical_features = self.categorical_features first_row = data_row else: first_row = self.discretizer.discretize(data_row) data[0] = data_row.copy() inverse = data.copy() for column in categorical_features: values = self.feature_values[column] freqs = self.feature_frequencies[column] inverse_column = self.random_state.choice(values, size=num_samples, replace=True, p=freqs) binary_column = np.array([1 if x == first_row[column] else 0 for x in inverse_column]) binary_column[0] = 1 inverse_column[0] = data[0, column] data[:, column] = binary_column inverse[:, column] = inverse_column if self.discretizer is not None: inverse[1:] = self.discretizer.undiscretize(inverse[1:]) inverse[0] = data_row return data, inverse class
(LimeTabularExplainer): """ An explainer for keras-style recurrent neural networks, where the input shape is (n_samples, n_timesteps, n_features). This class just extends the LimeTabularExplainer class and reshapes the training data and feature names such that they become something like (val1_t1, val1_t2, val1_t3, ..., val2_t1, ..., valn_tn) Each of the methods that take data reshape it appropriately, so you can pass in the training/testing data exactly as you would to the recurrent neural network. """ def __init__(self, training_data, mode="classification", training_labels=None, feature_names=None, categorical_features=None, categorical_names=None, kernel_width=None, kernel=None, verbose=False, class_names=None, feature_selection='auto', discretize_continuous=True, discretizer='quartile', random_state=None): """ Args: training_data: numpy 3d array with shape (n_samples, n_timesteps, n_features) mode: "classification" or "regression" training_labels: labels for training data. Not required, but may be used by discretizer. feature_names: list of names (strings) corresponding to the columns in the training data. categorical_features: list of indices (ints) corresponding to the categorical columns. Everything else will be considered continuous. Values in these columns MUST be integers. categorical_names: map from int to list of names, where categorical_names[x][y] represents the name of the yth value of column x. kernel_width: kernel width for the exponential kernel. If None, defaults to sqrt(number of columns) * 0.75 kernel: similarity kernel that takes euclidean distances and kernel width as input and outputs weights in (0,1). If None, defaults to an exponential kernel. verbose: if true, print local prediction values from linear model class_names: list of class names, ordered according to whatever the classifier is using. If not present, class names will be '0', '1', ... feature_selection: feature selection method. can be 'forward_selection', 'lasso_path', 'none' or 'auto'. See function 'explain_instance_with_data' in lime_base.py for details on what each of the options does. discretize_continuous: if True, all non-categorical features will be discretized into quartiles. discretizer: only matters if discretize_continuous is True. Options are 'quartile', 'decile', 'entropy' or a BaseDiscretizer instance. random_state: an integer or numpy.RandomState that will be used to generate random numbers. If None, the random state will be initialized using the internal numpy seed. """ # Reshape X n_samples, n_timesteps, n_features = training_data.shape training_data = np.transpose(training_data, axes=(0, 2, 1)).reshape( n_samples, n_timesteps * n_features) self.n_timesteps = n_timesteps self.n_features = n_features # Update the feature names feature_names = ['{}_t-{}'.format(n, n_timesteps - (i + 1)) for n in feature_names for i in range(n_timesteps)] # Send off the the super class to do its magic. super(RecurrentTabularExplainer, self).__init__( training_data, mode=mode, training_labels=training_labels, feature_names=feature_names, categorical_features=categorical_features, categorical_names=categorical_names, kernel_width=kernel_width, kernel=kernel, verbose=verbose, class_names=class_names, feature_selection=feature_selection, discretize_continuous=discretize_continuous, discretizer=discretizer, random_state=random_state) def _make_predict_proba(self, func): """ The predict_proba method will expect 3d arrays, but we are reshaping them to 2D so that LIME works correctly. This wraps the function you give in explain_instance to first reshape the data to have the shape the the keras-style network expects. """ def predict_proba(X): n_samples = X.shape[0] new_shape = (n_samples, self.n_features, self.n_timesteps) X = np.transpose(X.reshape(new_shape), axes=(0, 2, 1)) return func(X) return predict_proba def explain_instance(self, data_row, classifier_fn, labels=(1,), top_labels=None, num_features=10, num_samples=5000, distance_metric='euclidean', model_regressor=None): """Generates explanations for a prediction. First, we generate neighborhood data by randomly perturbing features from the instance (see __data_inverse). We then learn locally weighted linear models on this neighborhood data to explain each of the classes in an interpretable way (see lime_base.py). Args: data_row: 2d numpy array, corresponding to a row classifier_fn: classifier prediction probability function, which takes a numpy array and outputs prediction probabilities. For ScikitClassifiers , this is classifier.predict_proba. labels: iterable with labels to be explained. top_labels: if not None, ignore labels and produce explanations for the K labels with highest prediction probabilities, where K is this parameter. num_features: maximum number of features present in explanation num_samples: size of the neighborhood to learn the linear model distance_metric: the distance metric to use for weights. model_regressor: sklearn regressor to use in explanation. Defaults to Ridge regression in LimeBase. Must have model_regressor.coef_ and 'sample_weight' as a parameter to model_regressor.fit() Returns: An Explanation object (see explanation.py) with the corresponding explanations. """ # Flatten input so that the normal explainer can handle it data_row = data_row.T.reshape(self.n_timesteps * self.n_features) # Wrap the classifier to reshape input classifier_fn = self._make_predict_proba(classifier_fn) return super(RecurrentTabularExplainer, self).explain_instance( data_row, classifier_fn, labels=labels, top_labels=top_labels, num_features=num_features, num_samples=num_samples, distance_metric=distance_metric, model_regressor=model_regressor)
RecurrentTabularExplainer
model_template_query_v2.go
package model import ( "github.com/huaweicloud/huaweicloud-sdk-go-v3/core/utils" "strings" ) type TemplateQueryV2 struct { // 模板分类数组。 Category *[]string `json:"category,omitempty"` // 搜索关键字,支持按名称和描述搜索,默认null。 Keyword *string `json:"keyword,omitempty"` // 排序字段和排序顺序指定。比如: - desc(created_at):根据创建时间降序 - desc(usage_count):根据引用次数降序 SortBy *string `json:"sort_by,omitempty"` // 标签: - all:全部 - new:最新 - hot:热门 - recommend:推荐 Label *string `json:"label,omitempty"` // 是否查询用户自己创建的模板,默认查所有模板。 MyTemplates *bool `json:"my_templates,omitempty"` // 查所有模板时只处理上架的;查用户模板,需支持按状态查询,状态: - 0:审核中 - 1:上架 - 2:下架 不传表示查所有的(默认) Status *int32 `json:"status,omitempty"`
// 是否查询有消息的模板,默认查所有模板。 HasNotices *bool `json:"has_notices,omitempty"` // 模板关联的云产品(产品短名)列表。 Productshorts *[]string `json:"productshorts,omitempty"` // 偏移量,表示从此偏移量开始查询,offset大于等于0。 Offset *int32 `json:"offset,omitempty"` // 每页的模板条数。 Limit *int32 `json:"limit,omitempty"` // 模板关联的自定义标签列表。 TagIds *[]string `json:"tag_ids,omitempty"` // 模板类型: - 0:doc - 1:code - 2:pipeline - 3:devops Types *[]int32 `json:"types,omitempty"` // 动、静态代码模板标识: - 0:动态模板codetemplate - 1:静态模板codesample IsStatic *int32 `json:"is_static,omitempty"` // 平台来源: - 0:codelabs - 1:devstar PlatformSource *[]int32 `json:"platform_source,omitempty"` // 模板关联的标签名称列表。 TagNames *[]string `json:"tag_names,omitempty"` } func (o TemplateQueryV2) String() string { data, err := utils.Marshal(o) if err != nil { return "TemplateQueryV2 struct{}" } return strings.Join([]string{"TemplateQueryV2", string(data)}, " ") }
// 模板状态数组。 StatusArray *[]int32 `json:"status_array,omitempty"`
model_adapter_ext_eth_interface_list.go
/* Cisco Intersight
API version: 1.0.9-5517 Contact: [email protected] */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. package intersight import ( "encoding/json" "reflect" "strings" ) // AdapterExtEthInterfaceList This resource list is returned as a response to a HTTP GET request that does not include a specific resource identifier. type AdapterExtEthInterfaceList struct { MoBaseResponse // The total number of 'adapter.ExtEthInterface' resources matching the request, accross all pages. The 'Count' attribute is included when the HTTP GET request includes the '$inlinecount' parameter. Count *int32 `json:"Count,omitempty"` // The array of 'adapter.ExtEthInterface' resources matching the request. Results []AdapterExtEthInterface `json:"Results,omitempty"` AdditionalProperties map[string]interface{} } type _AdapterExtEthInterfaceList AdapterExtEthInterfaceList // NewAdapterExtEthInterfaceList instantiates a new AdapterExtEthInterfaceList object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed func NewAdapterExtEthInterfaceList(objectType string) *AdapterExtEthInterfaceList { this := AdapterExtEthInterfaceList{} this.ObjectType = objectType return &this } // NewAdapterExtEthInterfaceListWithDefaults instantiates a new AdapterExtEthInterfaceList object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set func NewAdapterExtEthInterfaceListWithDefaults() *AdapterExtEthInterfaceList { this := AdapterExtEthInterfaceList{} return &this } // GetCount returns the Count field value if set, zero value otherwise. func (o *AdapterExtEthInterfaceList) GetCount() int32 { if o == nil || o.Count == nil { var ret int32 return ret } return *o.Count } // GetCountOk returns a tuple with the Count field value if set, nil otherwise // and a boolean to check if the value has been set. func (o *AdapterExtEthInterfaceList) GetCountOk() (*int32, bool) { if o == nil || o.Count == nil { return nil, false } return o.Count, true } // HasCount returns a boolean if a field has been set. func (o *AdapterExtEthInterfaceList) HasCount() bool { if o != nil && o.Count != nil { return true } return false } // SetCount gets a reference to the given int32 and assigns it to the Count field. func (o *AdapterExtEthInterfaceList) SetCount(v int32) { o.Count = &v } // GetResults returns the Results field value if set, zero value otherwise (both if not set or set to explicit null). func (o *AdapterExtEthInterfaceList) GetResults() []AdapterExtEthInterface { if o == nil { var ret []AdapterExtEthInterface return ret } return o.Results } // GetResultsOk returns a tuple with the Results field value if set, nil otherwise // and a boolean to check if the value has been set. // NOTE: If the value is an explicit nil, `nil, true` will be returned func (o *AdapterExtEthInterfaceList) GetResultsOk() (*[]AdapterExtEthInterface, bool) { if o == nil || o.Results == nil { return nil, false } return &o.Results, true } // HasResults returns a boolean if a field has been set. func (o *AdapterExtEthInterfaceList) HasResults() bool { if o != nil && o.Results != nil { return true } return false } // SetResults gets a reference to the given []AdapterExtEthInterface and assigns it to the Results field. func (o *AdapterExtEthInterfaceList) SetResults(v []AdapterExtEthInterface) { o.Results = v } func (o AdapterExtEthInterfaceList) MarshalJSON() ([]byte, error) { toSerialize := map[string]interface{}{} serializedMoBaseResponse, errMoBaseResponse := json.Marshal(o.MoBaseResponse) if errMoBaseResponse != nil { return []byte{}, errMoBaseResponse } errMoBaseResponse = json.Unmarshal([]byte(serializedMoBaseResponse), &toSerialize) if errMoBaseResponse != nil { return []byte{}, errMoBaseResponse } if o.Count != nil { toSerialize["Count"] = o.Count } if o.Results != nil { toSerialize["Results"] = o.Results } for key, value := range o.AdditionalProperties { toSerialize[key] = value } return json.Marshal(toSerialize) } func (o *AdapterExtEthInterfaceList) UnmarshalJSON(bytes []byte) (err error) { type AdapterExtEthInterfaceListWithoutEmbeddedStruct struct { // The total number of 'adapter.ExtEthInterface' resources matching the request, accross all pages. The 'Count' attribute is included when the HTTP GET request includes the '$inlinecount' parameter. Count *int32 `json:"Count,omitempty"` // The array of 'adapter.ExtEthInterface' resources matching the request. Results []AdapterExtEthInterface `json:"Results,omitempty"` } varAdapterExtEthInterfaceListWithoutEmbeddedStruct := AdapterExtEthInterfaceListWithoutEmbeddedStruct{} err = json.Unmarshal(bytes, &varAdapterExtEthInterfaceListWithoutEmbeddedStruct) if err == nil { varAdapterExtEthInterfaceList := _AdapterExtEthInterfaceList{} varAdapterExtEthInterfaceList.Count = varAdapterExtEthInterfaceListWithoutEmbeddedStruct.Count varAdapterExtEthInterfaceList.Results = varAdapterExtEthInterfaceListWithoutEmbeddedStruct.Results *o = AdapterExtEthInterfaceList(varAdapterExtEthInterfaceList) } else { return err } varAdapterExtEthInterfaceList := _AdapterExtEthInterfaceList{} err = json.Unmarshal(bytes, &varAdapterExtEthInterfaceList) if err == nil { o.MoBaseResponse = varAdapterExtEthInterfaceList.MoBaseResponse } else { return err } additionalProperties := make(map[string]interface{}) if err = json.Unmarshal(bytes, &additionalProperties); err == nil { delete(additionalProperties, "Count") delete(additionalProperties, "Results") // remove fields from embedded structs reflectMoBaseResponse := reflect.ValueOf(o.MoBaseResponse) for i := 0; i < reflectMoBaseResponse.Type().NumField(); i++ { t := reflectMoBaseResponse.Type().Field(i) if jsonTag := t.Tag.Get("json"); jsonTag != "" { fieldName := "" if commaIdx := strings.Index(jsonTag, ","); commaIdx > 0 { fieldName = jsonTag[:commaIdx] } else { fieldName = jsonTag } if fieldName != "AdditionalProperties" { delete(additionalProperties, fieldName) } } } o.AdditionalProperties = additionalProperties } return err } type NullableAdapterExtEthInterfaceList struct { value *AdapterExtEthInterfaceList isSet bool } func (v NullableAdapterExtEthInterfaceList) Get() *AdapterExtEthInterfaceList { return v.value } func (v *NullableAdapterExtEthInterfaceList) Set(val *AdapterExtEthInterfaceList) { v.value = val v.isSet = true } func (v NullableAdapterExtEthInterfaceList) IsSet() bool { return v.isSet } func (v *NullableAdapterExtEthInterfaceList) Unset() { v.value = nil v.isSet = false } func NewNullableAdapterExtEthInterfaceList(val *AdapterExtEthInterfaceList) *NullableAdapterExtEthInterfaceList { return &NullableAdapterExtEthInterfaceList{value: val, isSet: true} } func (v NullableAdapterExtEthInterfaceList) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } func (v *NullableAdapterExtEthInterfaceList) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) }
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document.
main.go
package main import ( "fmt" "os" "go.m3o.com" "go.m3o.com/comments" ) func main() { client := m3o.New(os.Getenv("M3O_API_TOKEN")) rsp, err := client.Comments.Update(&comments.UpdateRequest{ Comment: &comments.Comment{ Id: "63c0cdf8-2121-11ec-a881-0242e36f037a", Subject: "Update Comment", Text: "Updated comment text", }, }) fmt.Println(rsp, err)
}
stock_trading_visual_continuous_env.py
#!/usr/bin/env python # Visual stock/share trading RL environment with continuous trade actions # Chapter 5, TensorFlow 2 Reinforcement Learning Cookbook | Praveen Palanisamy import os import random from typing import Dict import cv2 import gym import numpy as np import pandas as pd from gym import spaces from trading_utils import TradeVisualizer env_config = { "ticker": "MSFT", "opening_account_balance": 1000, # Number of steps (days) of data provided to the agent in one observation "observation_horizon_sequence_length": 30, } class StockTradingVisualContinuousEnv(gym.Env): def __init__(self, env_config: Dict = env_config): """Stock trading environment for RL agents with continuous action space Args: ticker (str, optional): Ticker symbol for the stock. Defaults to "MSFT". env_config (Dict): Env configuration values """ super(StockTradingVisualContinuousEnv, self).__init__() self.ticker = env_config.get("ticker", "MSFT") data_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data") self.ticker_file_stream = os.path.join(f"{data_dir}", f"{self.ticker}.csv") assert os.path.isfile( self.ticker_file_stream ), f"Historical stock data file stream not found at: data/{self.ticker}.csv" # Stock market data stream. An offline file stream is used. Alternatively, a web # API can be used to pull live data. # Data-Frame: Date Open High Low Close Adj-Close Volume self.ohlcv_df = pd.read_csv(self.ticker_file_stream) self.opening_account_balance = env_config["opening_account_balance"] # Action: 1-dim value indicating a fraction amount of shares to Buy (0 to 1) or # sell (-1 to 0). The fraction is taken on the allowable number of # shares that can be bought or sold based on the account balance (no margin). self.action_space = spaces.Box( low=np.array([-1]), high=np.array([1]), dtype=np.float ) self.observation_features = [ "Open", "High", "Low", "Close", "Adj Close", "Volume", ] self.obs_width, self.obs_height = 128, 128 self.horizon = env_config.get("observation_horizon_sequence_length") self.observation_space = spaces.Box( low=0, high=255, shape=(128, 128, 3), dtype=np.uint8, ) self.viz = None # Visualizer def step(self, action): # Execute one step within the environment self.execute_trade_action(action) self.current_step += 1 reward = self.account_value - self.opening_account_balance # Profit (loss) done = self.account_value <= 0 or self.current_step >= len( self.ohlcv_df.loc[:, "Open"].values ) obs = self.get_observation() return obs, reward, done, {} def reset(self): # Reset the state of the environment to an initial state self.cash_balance = self.opening_account_balance self.account_value = self.opening_account_balance self.num_shares_held = 0 self.cost_basis = 0 self.current_step = 0 self.trades = [] if self.viz is None: self.viz = TradeVisualizer( self.ticker, self.ticker_file_stream, "TFRL-Cookbook Ch4-StockTradingVisualContinuousEnv", ) return self.get_observation() def
(self, **kwargs): # Render the environment to the screen if self.current_step > self.horizon: self.viz.render( self.current_step, self.account_value, self.trades, window_size=self.horizon, ) def close(self): if self.viz is not None: self.viz.close() self.viz = None def get_observation(self): """Return a view of the Ticker price chart as image observation Returns: img_observation (np.ndarray): Image of ticker candle stick plot with volume bars as observation """ img_observation = self.viz.render_image_observation( self.current_step, self.horizon ) img_observation = cv2.resize( img_observation, dsize=(128, 128), interpolation=cv2.INTER_CUBIC ) return img_observation def execute_trade_action(self, action): if action == 0: # Indicates "Hold" action # Hold position; No trade to be executed return order_type = "buy" if action > 0 else "sell" order_fraction_of_allowable_shares = abs(action) # Stochastically determine the current stock price based on Market Open & Close current_price = random.uniform( self.ohlcv_df.loc[self.current_step, "Open"], self.ohlcv_df.loc[self.current_step, "Close"], ) if order_type == "buy": allowable_shares = int(self.cash_balance / current_price) # Simulate a BUY order and execute it at current_price num_shares_bought = int( allowable_shares * order_fraction_of_allowable_shares ) current_cost = self.cost_basis * self.num_shares_held additional_cost = num_shares_bought * current_price self.cash_balance -= additional_cost self.cost_basis = (current_cost + additional_cost) / ( self.num_shares_held + num_shares_bought ) self.num_shares_held += num_shares_bought if num_shares_bought > 0: self.trades.append( { "type": "buy", "step": self.current_step, "shares": num_shares_bought, "proceeds": additional_cost, } ) elif order_type == "sell": # Simulate a SELL order and execute it at current_price num_shares_sold = int( self.num_shares_held * order_fraction_of_allowable_shares ) self.cash_balance += num_shares_sold * current_price self.num_shares_held -= num_shares_sold sale_proceeds = num_shares_sold * current_price if num_shares_sold > 0: self.trades.append( { "type": "sell", "step": self.current_step, "shares": num_shares_sold, "proceeds": sale_proceeds, } ) if self.num_shares_held == 0: self.cost_basis = 0 # Update account value self.account_value = self.cash_balance + self.num_shares_held * current_price if __name__ == "__main__": env = StockTradingVisualContinuousEnv() obs = env.reset() for _ in range(600): action = env.action_space.sample() next_obs, reward, done, _ = env.step(action) env.render()
render
import-wallet.ts
import { Component } from '@angular/core'; import { FormBuilder, FormGroup, Validators } from '@angular/forms'; import { TranslateService } from '@ngx-translate/core'; import { App, Events, NavController, NavParams } from 'ionic-angular'; import { Logger } from '../../../providers/logger/logger'; // Pages import { DisclaimerPage } from '../../onboarding/disclaimer/disclaimer'; import { ScanPage } from '../../scan/scan'; import { TabsPage } from '../../tabs/tabs'; // Providers import { ActionSheetProvider } from '../../../providers/action-sheet/action-sheet'; import { BwcProvider } from '../../../providers/bwc/bwc'; import { ConfigProvider } from '../../../providers/config/config'; import { DerivationPathHelperProvider } from '../../../providers/derivation-path-helper/derivation-path-helper'; import { OnGoingProcessProvider } from '../../../providers/on-going-process/on-going-process'; import { PlatformProvider } from '../../../providers/platform/platform'; import { PopupProvider } from '../../../providers/popup/popup'; import { ProfileProvider } from '../../../providers/profile/profile'; import { PushNotificationsProvider } from '../../../providers/push-notifications/push-notifications'; import { WalletOptions, WalletProvider } from '../../../providers/wallet/wallet'; @Component({ selector: 'page-import-wallet', templateUrl: 'import-wallet.html' }) export class ImportWalletPage { private derivationPathByDefault: string; private derivationPathForTestnet: string; private importForm: FormGroup; private reader: FileReader; private defaults; private errors; public prettyFileName: string; public importErr: boolean; public fromOnboarding: boolean; public formFile; public showAdvOpts: boolean; public selectedTab: string; public isCordova: boolean; public isSafari: boolean; public isIOS: boolean; public file: File; public code; public okText: string; public cancelText: string; constructor( private app: App, private navCtrl: NavController, private navParams: NavParams, private form: FormBuilder, private bwcProvider: BwcProvider, private derivationPathHelperProvider: DerivationPathHelperProvider, private walletProvider: WalletProvider, private configProvider: ConfigProvider, private popupProvider: PopupProvider, private platformProvider: PlatformProvider, private logger: Logger, private onGoingProcessProvider: OnGoingProcessProvider, private profileProvider: ProfileProvider, private translate: TranslateService, private events: Events, private pushNotificationsProvider: PushNotificationsProvider, private actionSheetProvider: ActionSheetProvider ) { this.okText = this.translate.instant('Ok'); this.cancelText = this.translate.instant('Cancel'); this.reader = new FileReader(); this.defaults = this.configProvider.getDefaults(); this.errors = bwcProvider.getErrors(); this.isCordova = this.platformProvider.isCordova; this.isSafari = this.platformProvider.isSafari; this.isIOS = this.platformProvider.isIOS; this.importErr = false; this.fromOnboarding = this.navParams.data.fromOnboarding; this.code = this.navParams.data.code; this.selectedTab = 'words'; this.derivationPathByDefault = this.derivationPathHelperProvider.default; this.derivationPathForTestnet = this.derivationPathHelperProvider.defaultTestnet; this.showAdvOpts = false; this.formFile = null; this.importForm = this.form.group({ words: [null, Validators.required], backupText: [null], passphrase: [null], file: [null], filePassword: [null], derivationPath: [this.derivationPathByDefault, Validators.required], testnetEnabled: [false], bwsURL: [this.defaults.bws.url], coin: [null, Validators.required] }); this.events.subscribe('update:words', data => { this.processWalletInfo(data.value); }); } ionViewWillEnter() { if (this.code) { this.processWalletInfo(this.code); } } ngOnDestroy() { this.events.unsubscribe('update:words'); } selectTab(tab: string) { this.selectedTab = tab; switch (tab) { case 'words': this.file = null; this.formFile = null; this.importForm.get('words').setValidators([Validators.required]); this.importForm.get('coin').setValidators([Validators.required]); this.importForm.get('filePassword').clearValidators(); if (this.isCordova || this.isSafari) this.importForm.get('backupText').clearValidators(); else this.importForm.get('file').clearValidators(); break; case 'file': if (this.isCordova || this.isSafari) this.importForm .get('backupText') .setValidators([Validators.required]); else this.importForm.get('file').setValidators([Validators.required]); this.importForm .get('filePassword') .setValidators([Validators.required]); this.importForm.get('words').clearValidators(); this.importForm.get('coin').clearValidators(); break; default: this.importForm.get('words').clearValidators(); this.importForm.get('file').clearValidators(); this.importForm.get('filePassword').clearValidators(); break; } this.importForm.get('words').updateValueAndValidity(); this.importForm.get('file').updateValueAndValidity(); this.importForm.get('filePassword').updateValueAndValidity(); this.importForm.get('backupText').updateValueAndValidity(); this.importForm.get('coin').updateValueAndValidity(); } normalizeMnemonic(words: string) { if (!words || !words.indexOf) return words; var isJA = words.indexOf('\u3000') > -1; var wordList = words.split(/[\u3000\s]+/); return wordList.join(isJA ? '\u3000' : ' '); } private processWalletInfo(code: string): void { if (!code) return; this.importErr = false; let parsedCode = code.split('|'); let info = { type: parsedCode[0], data: parsedCode[1], network: parsedCode[2], derivationPath: parsedCode[3], hasPassphrase: parsedCode[4] == 'true' ? true : false, coin: parsedCode[5] }; if (!info.data) { const errorInfoSheet = this.actionSheetProvider.createInfoSheet( 'default-error', { msg: this.translate.instant('Invalid data'), title: this.translate.instant('Error') } ); errorInfoSheet.present(); } if (info.type == '1' && info.hasPassphrase) { let title = this.translate.instant('Error'); let subtitle = this.translate.instant( 'Password required. Make sure to enter your password in advanced options' ); this.popupProvider.ionicAlert(title, subtitle); } let isTestnet = info.network == 'testnet' ? true : false; this.importForm.controls['testnetEnabled'].setValue(isTestnet); this.importForm.controls['derivationPath'].setValue(info.derivationPath); this.importForm.controls['words'].setValue(info.data); this.importForm.controls['coin'].setValue(info.coin); } public setDerivationPath(): void { let path = this.importForm.value.testnetEnabled ? this.derivationPathForTestnet : this.derivationPathByDefault; this.importForm.controls['derivationPath'].setValue(path); } private importBlob(str: string, opts): void { let str2: string;
try { str2 = this.bwcProvider .getSJCL() .decrypt(this.importForm.value.filePassword, str); } catch (e) { err = this.translate.instant( 'Could not decrypt file, check your password' ); this.logger.error('Import: could not decrypt file', e); } if (err) { let title = this.translate.instant('Error'); this.popupProvider.ionicAlert(title, err); return; } this.onGoingProcessProvider.set('importingWallet'); opts.compressed = null; opts.password = null; setTimeout(() => { this.profileProvider .importWallet(str2, opts) .then(wallet => { this.onGoingProcessProvider.clear(); this.finish(wallet); }) .catch(err => { this.onGoingProcessProvider.clear(); let title = this.translate.instant('Error'); this.popupProvider.ionicAlert(title, err); return; }); }, 100); } private finish(wallet): void { this.walletProvider .updateRemotePreferences(wallet) .then(() => { this.profileProvider.setBackupFlag(wallet.credentials.walletId); this.events.publish('status:updated'); this.pushNotificationsProvider.updateSubscription(wallet); if (this.fromOnboarding) { this.profileProvider.setOnboardingCompleted(); this.navCtrl.push(DisclaimerPage); } else { this.app .getRootNavs()[0] .setRoot(TabsPage) .then(() => { this.events.publish('OpenWallet', wallet); }); } }) .catch(err => { this.logger.error('Import: could not updateRemotePreferences', err); }); } private importExtendedPrivateKey(xPrivKey, opts) { this.onGoingProcessProvider.set('importingWallet'); setTimeout(() => { this.profileProvider .importExtendedPrivateKey(xPrivKey, opts) .then(wallet => { this.onGoingProcessProvider.clear(); this.finish(wallet); }) .catch(err => { if (err instanceof this.errors.NOT_AUTHORIZED) { this.importErr = true; } else { let title = this.translate.instant('Error'); this.popupProvider.ionicAlert(title, err); } this.onGoingProcessProvider.clear(); return; }); }, 100); } private importMnemonic(words: string, opts): void { this.onGoingProcessProvider.set('importingWallet'); setTimeout(() => { this.profileProvider .importMnemonic(words, opts) .then(wallet => { this.onGoingProcessProvider.clear(); this.finish(wallet); }) .catch(err => { if (err instanceof this.errors.NOT_AUTHORIZED) { this.importErr = true; } else { let title = this.translate.instant('Error'); this.popupProvider.ionicAlert(title, err); } this.onGoingProcessProvider.clear(); return; }); }, 100); } import() { if (this.selectedTab === 'file') { this.importFromFile(); } else { this.importFromMnemonic(); } } public importFromFile(): void { if (!this.importForm.valid) { let title = this.translate.instant('Error'); let subtitle = this.translate.instant('There is an error in the form'); this.popupProvider.ionicAlert(title, subtitle); return; } let backupFile = this.file; let backupText = this.importForm.value.backupText; if (!backupFile && !backupText) { let title = this.translate.instant('Error'); let subtitle = this.translate.instant('Please, select your backup file'); this.popupProvider.ionicAlert(title, subtitle); return; } if (backupFile) { this.reader.readAsBinaryString(backupFile); } else { let opts: Partial<WalletOptions> = {}; opts.bwsurl = this.importForm.value.bwsURL; opts.coin = this.importForm.value.coin; this.importBlob(backupText, opts); } } public importFromMnemonic(): void { if (!this.importForm.valid) { let title = this.translate.instant('Error'); let subtitle = this.translate.instant('There is an error in the form'); this.popupProvider.ionicAlert(title, subtitle); return; } let opts: Partial<WalletOptions> = {}; if (this.importForm.value.bwsURL) opts.bwsurl = this.importForm.value.bwsURL; let pathData = this.derivationPathHelperProvider.parse( this.importForm.value.derivationPath ); if (!pathData) { let title = this.translate.instant('Error'); let subtitle = this.translate.instant('Invalid derivation path'); this.popupProvider.ionicAlert(title, subtitle); return; } opts.account = pathData.account; opts.networkName = pathData.networkName; opts.derivationStrategy = pathData.derivationStrategy; opts.coin = this.importForm.value.coin; let words: string = this.importForm.value.words || null; if (!words) { let title = this.translate.instant('Error'); let subtitle = this.translate.instant('Please enter the recovery phrase'); this.popupProvider.ionicAlert(title, subtitle); return; } else if (words.indexOf('xprv') == 0 || words.indexOf('tprv') == 0) { return this.importExtendedPrivateKey(words, opts); } else { let wordList = words.split(/[\u3000\s]+/); if (wordList.length % 3 != 0) { let title = this.translate.instant('Error'); let subtitle = this.translate.instant( 'Wrong number of recovery words:' ); this.popupProvider.ionicAlert(title, subtitle + ' ' + wordList.length); return; } } opts.passphrase = this.importForm.value.passphrase || null; this.importMnemonic(words, opts); } public toggleShowAdvOpts(): void { this.showAdvOpts = !this.showAdvOpts; } public fileChangeEvent($event) { this.file = $event.target ? $event.target.files[0] : $event.srcElement.files[0]; this.formFile = $event.target.value; // Most browsers return `C:\fakepath\FILENAME` this.prettyFileName = this.formFile.split('\\').pop(); this.getFile(); } private getFile() { // If we use onloadend, we need to check the readyState. this.reader.onloadend = () => { if (this.reader.readyState === 2) { // DONE === 2 let opts: Partial<WalletOptions> = {}; opts.bwsurl = this.importForm.value.bwsURL; opts.coin = this.importForm.value.coin; this.importBlob(this.reader.result, opts); } }; } public openScanner(): void { this.navCtrl.push(ScanPage, { fromImport: true }); } }
let err = null;
update-sprite.js
/* * Contains functions for moving sprites. */ var updateSprite = { screenWidth: 0, screenHeight: 0 }; updateSprite.setCanvasDimensions = function (width, height) { updateSprite.screenWidth = width; updateSprite.screenHeight = height; }; /* Determine the directional movement of a sprite by examining its * vx/vy properties. Use the sprite's bounding-box edges to test * for collision with the edge of the canvas. * If it collides, apply a small amount of reverse velocity to the * sprite to bounce it off the wall. */ updateSprite.checkCollision = function (sprite) { //test right boundary if (sprite.vx > 0 && (sprite.x + sprite.width) >= updateSprite.screenWidth) { sprite.vx = -1; //test left boundary } else if (sprite.vx < 0 && sprite.x < 0) { sprite.vx = 1; } //test bottom boundary if (sprite.vy > 0 && (sprite.y + sprite.height) >= updateSprite.screenHeight) { sprite.vy = -1; //test top boundary } else if (sprite.vy < 0 && sprite.y < 0) { sprite.vy = 1; } } /* Gives the appearance that spriteA is looking at spriteB. * If spriteB is moving past spriteA's x or y position, play a single * frame on spriteA's spritesheet to switch the animation sequence. */ updateSprite.lookAt = function (spriteA, spriteB) { if (spriteB.vy > 0 && spriteB.y >= spriteA.y) { spriteA.play(0); //down animation spriteA.stop(); } else if (spriteB.vy < 0 && spriteB.y < spriteA.y) { spriteA.play(3); //up animation spriteA.stop(); } if (spriteB.vx > 0 && spriteB.x >= spriteA.x) { spriteA.play(2); //right animation spriteA.stop(); } else if (spriteB.vx < 0 && spriteB.x < spriteA.x) { spriteA.play(1); //left animation spriteA.stop(); } } /* This function is called every frame and simply moves the * sprite horizontally across the entire length of the canvas. * When the sprite reaches the end, wrap it around to the * beginning, starting at a random y position. */ updateSprite.moveBird = function (sprite) { //check for screen wrap if (sprite.x > updateSprite.screenWidth) { sprite.x = -sprite.width; //start at random height on canvas within two edge tiles var tileHeight = 32; var min = (tileHeight * 2); var max = updateSprite.screenHeight - (tileHeight * 2); sprite.y = Math.floor(Math.random() * (max - min + 1)) + min;
} sprite.x += 2; }
req.py
# -*- coding: utf-8 -*- """ Request Management """ module = request.controller resourcename = request.function if not settings.has_module(module): raise HTTP(404, body="Module disabled: %s" % module) # ----------------------------------------------------------------------------- def index(): """ Module's Home Page """ return s3db.cms_index(module, alt_function="index_alt") # ----------------------------------------------------------------------------- def index_alt(): """ Module homepage for non-Admin users when no CMS content found """ # Just redirect to the list of Requests redirect(URL(f="req", args=["search"])) # ----------------------------------------------------------------------------- def is_affiliated(): """ Check if User is affiliated to an Organisation @ToDo: Move this elsewhere """ if not auth.is_logged_in(): return False elif s3_has_role(ADMIN): return True else: table = auth.settings.table_user auth_user = db(table.id == auth.user.id).select(table.organisation_id, limitby=(0, 1) ).first() if auth_user and auth_user.organisation_id: return True else: return False # ============================================================================= def create(): """ Redirect to req/create """ redirect(URL(f="req", args="create")) # ----------------------------------------------------------------------------- def marker_fn(record): """ Function to decide which Marker to use for Requests Map @ToDo: Use Symbology """ # Base Icon based on Type type = record.type if type in (1, 8): # Items marker = "asset" elif type == 3: # People marker = "staff" #elif type == 6: # # Food # marker = "food" else: marker = "request" # Colour code by priority priority = record.priority if priority == 3: # High marker = "%s_red" % marker elif priority == 2: # Medium marker = "%s_yellow" % marker #elif priority == 1: # # Low # marker = "%s_yellow" % marker mtable = db.gis_marker marker = db(mtable.name == marker).select(mtable.image, mtable.height, mtable.width, cache=s3db.cache, limitby=(0, 1)).first() return marker # ----------------------------------------------------------------------------- def req(): """ REST Controller for Request Instances """ s3.filter = (s3db.req_req.is_template == False) output = req_controller() return output # ----------------------------------------------------------------------------- def req_template(): """ REST Controller for Request Templates """ # Hide fields which aren't relevant to templates # @ToDo: Need to get this done later after being opened by Types? table = s3db.req_req field = table.is_template field.default = True field.readable = field.writable = False s3.filter = (field == True) if "req_item" in request.args: # List fields for req_item table = s3db.req_req_item list_fields = ["id", "item_id", "item_pack_id", "quantity", "comments", ] s3db.configure("req_req_item", list_fields=list_fields) elif "req_skill" in request.args: # List fields for req_skill table = s3db.req_req_skill list_fields = ["id", "skill_id", "quantity", "comments", ] s3db.configure("req_req_skill", list_fields=list_fields) else: # Main Req fields = ["req_ref", "date", "date_required", "date_required_until", "date_recv", "recv_by_id", "cancel", "commit_status", "transit_status", "fulfil_status", ] for fieldname in fields: field = table[fieldname] field.readable = field.writable = False table.purpose.label = T("Details") list_fields = ["id", "site_id" ] if len(settings.get_req_req_type()) > 1: list_fields.append("type") list_fields.append("priority") list_fields.append("purpose") list_fields.append("comments") s3db.configure("req_req", list_fields=list_fields) # CRUD strings ADD_REQUEST = T("Add Request Template") s3.crud_strings["req_req"] = Storage( title_create = ADD_REQUEST, title_display = T("Request Template Details"), title_list = T("Request Templates"), title_update = T("Edit Request Template"), subtitle_create = ADD_REQUEST, label_list_button = T("List Request Templates"), label_create_button = ADD_REQUEST, label_delete_button = T("Delete Request Template"), msg_record_created = T("Request Template Added"), msg_record_modified = T("Request Template Updated"), msg_record_deleted = T("Request Template Deleted"), msg_list_empty = T("No Request Templates")) output = req_controller() return output # ----------------------------------------------------------------------------- def req_controller(): """ REST Controller """ def prep(r): table = r.table s3.req_prep(r) #if len(settings.get_req_req_type()) == 1: # # Remove type from list_fields # list_fields = s3db.get_config("req_req", "list_fields") # try: # list_fields.remove("type") # except: # # It has already been removed. # # This can happen if the req controller is called # # for a second time, such as when printing reports # pass # s3db.configure("req_req", list_fields=list_fields) type = (r.record and r.record.type) or \ (request.vars.type and int(request.vars.type)) if r.interactive: # Set the req_item site_id (Requested From), called from action buttons on req/req_item_inv_item/x page if "req_item_id" in request.vars and "inv_item_id" in request.vars: iitable = s3db.inv_inv_item inv_item = db(iitable.id == request.vars.inv_item_id).select(iitable.site_id, iitable.item_id, limitby=(0, 1) ).first() site_id = inv_item.site_id item_id = inv_item.item_id # @ToDo: Check Permissions & Avoid DB updates in GETs db(s3db.req_req_item.id == request.vars.req_item_id).update(site_id = site_id) response.confirmation = T("%(item)s requested from %(site)s") % \ {"item": s3db.supply_ItemRepresent()(item_id), "site": s3db.org_SiteRepresent()(site_id) } elif "req.site_id" in r.get_vars: # Called from 'Make new request' button on [siteinstance]/req page table.site_id.default = request.get_vars.get("req.site_id") table.site_id.writable = False if r.http == "POST": del r.get_vars["req.site_id"] table.requester_id.represent = requester_represent # Set Fields and Labels depending on type if type: table.type.default = type # This prevents the type from being edited AFTER it is set table.type.readable = table.type.writable = False crud_strings = settings.get_req_req_crud_strings(type) if crud_strings: s3.crud_strings["req_req"] = crud_strings elif type == 1: s3.crud_strings["req_req"].title_create = T("Make Supplies Request") elif type == 3: s3.crud_strings["req_req"].title_create = T("Make People Request") # Filter the query based on type if s3.filter: s3.filter = s3.filter & \ (table.type == type) else: s3.filter = (table.type == type) # These changes are applied via JS in create forms where type is editable if type == 1: # Item table.date_recv.readable = table.date_recv.writable = True if settings.get_req_items_ask_purpose(): table.purpose.label = T("What the Items will be used for") table.site_id.label = T("Deliver To") table.request_for_id.label = T("Deliver To") table.requester_id.label = T("Site Contact") table.recv_by_id.label = T("Delivered To") elif type == 3: # Person table.date_required_until.readable = table.date_required_until.writable = True table.purpose.label = T("Task Details") table.purpose.comment = DIV(_class="tooltip", _title="%s|%s" % (T("Task Details"), T("Include any special requirements such as equipment which they need to bring."))) table.site_id.label = T("Report To") table.requester_id.label = T("Volunteer Contact") table.request_for_id.label = T("Report To") table.recv_by_id.label = T("Reported To") if r.component: if r.component.name == "document": s3.crud.submit_button = T("Add") #table = r.component.table # @ToDo: Fix for Link Table #table.date.default = r.record.date #if r.record.site_id: # stable = db.org_site # query = (stable.id == r.record.site_id) # site = db(query).select(stable.location_id, # stable.organisation_id, # limitby=(0, 1)).first() # if site: # table.location_id.default = site.location_id # table.organisation_id.default = site.organisation_id elif r.component.name == "req_item": ctable = r.component.table ctable.site_id.writable = ctable.site_id.readable = False s3.req_hide_quantities(ctable) elif r.component.name == "req_skill": s3.req_hide_quantities(r.component.table) elif r.component.alias == "job": s3task.configure_tasktable_crud( function="req_add_from_template", args = [r.id], vars = dict(user_id = auth.user is not None and auth.user.id or 0), period = 86400, # seconds, so 1 day ) db.scheduler_task.timeout.writable = False else: if r.id: table.is_template.readable = table.is_template.writable = False method = r.method if method not in ("map", "read", "search", "update"): # Hide fields which don't make sense in a Create form # - includes one embedded in list_create # - list_fields over-rides, so still visible within list itself s3.req_create_form_mods() if type and settings.get_req_inline_forms(): # Inline Forms s3.req_inline_form(type, method) # Get the default Facility for this user #if settings.has_module("hrm"): # hrtable = s3db.hrm_human_resource # query = (hrtable.person_id == s3_logged_in_person()) # site = db(query).select(hrtable.site_id, # limitby=(0, 1)).first() # if site: # r.table.site_id.default = site.site_id # Use site_id in User Profile if auth.is_logged_in(): if not table.site_id.default: table.site_id.default = auth.user.site_id elif method == "map": # Tell the client to request per-feature markers s3db.configure("req_req", marker_fn=marker_fn) elif method == "update": if settings.get_req_inline_forms(): # Inline Forms s3.req_inline_form(type, method) s3.scripts.append("/%s/static/scripts/S3/s3.req_update.js" % appname) # Prevent Items from being added to closed or cancelled requests if r.record and (r.record.closed or r.record.cancel): s3db.configure("req_req_item", insertable = False) elif r.representation == "plain": # Map Popups pass elif r.representation == "geojson": # Load these models now as they'll be needed when we encode mtable = s3db.gis_marker s3db.configure("req_req", marker_fn=marker_fn) if r.component and r.component.name == "commit": table = r.component.table record = r.record stable = s3db.org_site commit_status = record.commit_status # Commits belonging to this request rsites = [] query = (table.deleted == False)&(table.req_id == record.id) req_sites = db(query).select(table.site_id) for req_site in req_sites: rsites += [req_site.site_id] # All the sites commit_sites = db((stable.deleted == False)).select(stable.id, stable.code) # Sites which have not committed to this request yet site_opts = {} for site in commit_sites: if (site.id not in site_opts) and (site.id not in rsites): site_opts[site.id] = site.code table.site_id.requires = IS_IN_SET(site_opts) if (commit_status == 2) and settings.get_req_restrict_on_complete(): # Restrict from committing to completed requests s3db.configure(table, listadd=False) else: # Allow commitments to be added when doing so as a component s3db.configure(table, listadd = True) if type == 1: # Items # Limit site_id to facilities the user has permissions for auth.permitted_facilities(table=r.table, error_msg=T("You do not have permission for any facility to make a commitment.")) if r.interactive: # Dropdown not Autocomplete itable = s3db.req_commit_item itable.req_item_id.widget = None req_id = r.id s3db.req_commit_item.req_item_id.requires = \ IS_ONE_OF(db, "req_req_item.id", s3db.req_item_represent, orderby = "req_req_item.id", filterby = "req_id", filter_opts = [req_id], sort=True ) s3.jquery_ready.append(''' S3OptionsFilter({ 'triggerName':'req_item_id', 'targetName':'item_pack_id', 'lookupPrefix':'req', 'lookupResource':'req_item_packs', 'lookupKey':'req_item_id', 'lookupField':'id', 'msgNoRecords':i18n.no_packs, 'fncPrep':S3.supply.fncPrepItem, 'fncRepresent':S3.supply.fncRepresentItem })''') # Custom Form s3forms = s3base.s3forms crud_form = s3forms.S3SQLCustomForm( "site_id", "date", "date_available", "committer_id", s3forms.S3SQLInlineComponent( "commit_item", label = T("Items"), fields = ["req_item_id", "item_pack_id", "quantity", "comments" ] ), "comments", ) s3db.configure("req_commit", crud_form=crud_form) # Redirect to the Items tab after creation #s3db.configure(table, # create_next = URL(c="req", f="commit", # args=["[id]", "commit_item"]), # update_next = URL(c="req", f="commit", # args=["[id]", "commit_item"])) elif type == 3: # People # Limit site_id to orgs the user has permissions for # @ToDo: Make this customisable between Site/Org # @ToDo: is_affiliated() auth.permitted_facilities(table=r.table, error_msg=T("You do not have permission for any facility to make a commitment.")) # Limit organisation_id to organisations the user has permissions for #auth.permitted_organisations(table=r.table, redirect_on_error=False) if r.interactive: #table.organisation_id.readable = True #table.organisation_id.writable = True # Custom Form s3forms = s3base.s3forms crud_form = s3forms.S3SQLCustomForm( "site_id", "date", "date_available", "committer_id", s3forms.S3SQLInlineComponent( "commit_skill", label = T("Skills"), fields = ["quantity", "skill_id", "comments" ] ), "comments", ) s3db.configure("req_commit", crud_form=crud_form) # Redirect to the Skills tab after creation #s3db.configure(table, # create_next = URL(c="req", f="commit", # args=["[id]", "commit_skill"]), # update_next = URL(c="req", f="commit", # args=["[id]", "commit_skill"])) else: # Non-Item commits can have an Organisation # Check if user is affiliated to an Organisation if is_affiliated(): # Limit organisation_id to organisations the user has permissions for auth.permitted_organisations(table=r.table, redirect_on_error=False) table.organisation_id.readable = table.organisation_id.writable = True else: # Unaffiliated people can't commit on behalf of others field = r.component.table.committer_id field.writable = False field.comment = None # Non-Item commits shouldn't have a From Inventory # @ToDo: Assets do? (Well, a 'From Site') table.site_id.readable = table.site_id.writable = False #if r.interactive and r.record.type == 3: # People # # Redirect to the Persons tab after creation # s3db.configure(table, # create_next = URL(c="req", f="commit", # args=["[id]", "commit_person"]), # update_next = URL(c="req", f="commit", # args=["[id]", "commit_person"]) # ) else: # Limit site_id to facilities the user has permissions for # @ToDo: Non-Item requests shouldn't be bound to a Facility? auth.permitted_facilities(table=r.table, error_msg=T("You do not have permission for any facility to make a request.")) return True s3.prep = prep # Post-process def postp(r, output): if r.interactive and r.method != "import": if not r.component: s3_action_buttons(r) #s3_action_buttons(r, copyable=True) # if "buttons" in output: # buttons = output["buttons"] # if "delete_btn" in buttons: # delete_btn = buttons["delete_btn"] # delete_btn = DIV(delete_btn, # A(T("Copy Request"), # _href=URL(args=[r.id, "copy"], ##vars={"type":r.record.type} # ), # _class="action-btn")) # output["buttons"]["delete_btn"] = delete_btn if settings.get_req_use_commit(): # This is appropriate to all s3.actions.append( dict(url = URL(c="req", f="req", args=["[id]", "commit_all"]), _class = "action-btn commit-btn", label = str(T("Commit")) ) ) s3.jquery_ready.append( '''S3ConfirmClick('.commit-btn','%s')''' % T("Do you want to commit to this request?")) # This is only appropriate for item requests #query = (r.table.type == 1) #rows = db(query).select(r.table.id) #restrict = [str(row.id) for row in rows] #s3.actions.append( # dict(url = URL(c="req", f="req", # args=["[id]", "req_item"]), # _class = "action-btn", # label = str(T("View Items")), # restrict = restrict # ) # ) # This is only appropriate for people requests #query = (r.table.type == 3) #rows = db(query).select(r.table.id) #restrict = [str(row.id) for row in rows] #s3.actions.append( # dict(url = URL(c="req", f="req", # args=["[id]", "req_skill"]), # _class = "action-btn", # label = str(T("View Skills")), # restrict = restrict # ) # ) s3.actions.append( dict(url = URL(c="req", f="req", args=["[id]", "commit_all", "send"]), _class = "action-btn send-btn", label = str(T("Send")) ) ) s3.jquery_ready.append( '''S3ConfirmClick('.send-btn','%s')''' % T("Are you sure you want to commit to this request and send a shipment?")) else: s3_action_buttons(r) if r.component.name == "req_item" and settings.get_req_prompt_match(): req_item_inv_item_btn = dict(url = URL(c = "req", f = "req_item_inv_item", args = ["[id]"] ), _class = "action-btn", label = str(T("Request from Facility")), ) s3.actions.append(req_item_inv_item_btn) if r.component.name == "commit": if "form" in output: id = r.record.id ctable = s3db.req_commit query = (ctable.deleted == False) & \ (ctable.req_id == id) exists = current.db(query).select(ctable.id, limitby=(0, 1)) if not exists: output["form"] = A(T("Commit All"), _href=URL(args=[id, "commit_all"]), _class="action-btn", _id="commit-btn") s3.jquery_ready.append(''' S3ConfirmClick('#commit-btn','%s')''' % T("Do you want to commit to this request?")) else: s3.actions.append( dict(url = URL(c="req", f="send_commit", args = ["[id]"]), _class = "action-btn send-btn", label = str(T("Prepare Shipment")) ) ) s3.jquery_ready.append( '''S3ConfirmClick('.send-btn','%s')''' % T("Are you sure you want to send this shipment?")) if r.component.alias == "job": s3.actions = [ dict(label=str(T("Open")), _class="action-btn", url=URL(c="req", f="req_template", args=[str(r.id), "job", "[id]"])), dict(label=str(T("Reset")), _class="action-btn", url=URL(c="req", f="req_template", args=[str(r.id), "job", "[id]", "reset"])), dict(label=str(T("Run Now")), _class="action-btn", url=URL(c="req", f="req_template", args=[str(r.id), "job", "[id]", "run"])), ] return output s3.postp = postp output = s3_rest_controller("req", "req", rheader=s3db.req_rheader) return output # ============================================================================= def requester_represent(id, show_link=True): """ Represent a Requester as Name + Tel# """ if not id: return current.messages["NONE"] htable = s3db.hrm_human_resource ptable = s3db.pr_person ctable = s3db.pr_contact query = (htable.id == id) & \ (htable.person_id == ptable.id) left = ctable.on((ctable.pe_id == ptable.pe_id) & \ (ctable.contact_method == "SMS")) row = db(query).select(htable.type, ptable.first_name, ptable.middle_name, ptable.last_name, ctable.value, left=left, limitby=(0, 1)).first() try: hr = row["hrm_human_resource"] except: return current.messages.UNKNOWN_OPT repr = s3_fullname(row.pr_person) if row.pr_contact.value: repr = "%s %s" % (repr, row.pr_contact.value) if show_link: if hr.type == 1: controller = "hrm" group = "staff" else: controller = "vol" group = "volunteer" request.extension = "html" return A(repr, _href = URL(c = controller, f = "person", args = ["contacts"], vars = {"group": group, "human_resource.id": id} ) ) return repr # ============================================================================= def req_item(): """ REST Controller @ToDo: Filter out fulfilled Items? """ if not s3.filter: # Filter out Template Items ritable = s3db.req_req_item rtable = db.req_req s3.filter = (rtable.is_template == False) & \ (rtable.id == ritable.req_id) # Search method search_method = s3db.get_config("req_req_item", "search_method") if not search_method: S3SearchOptionsWidget = s3base.S3SearchOptionsWidget req_item_search = ( S3SearchOptionsWidget( name="req_search_fulfil_status", label=T("Status"), field="req_id$fulfil_status", options = s3.req_status_opts, cols = 3, ), S3SearchOptionsWidget( name="req_search_priority", label=T("Priority"), field="req_id$priority", options = s3.req_priority_opts, cols = 3, ), #S3SearchOptionsWidget( # name="req_search_L1", # field="req_id$site_id$location_id$L1", # location_level="L1", # cols = 3, #), #S3SearchOptionsWidget( # name="req_search_L2", # field="req_id$site_id$location_id$L2", # location_level="L2", # cols = 3, #), S3SearchOptionsWidget( name="req_search_L3", field="req_id$site_id$location_id$L3", location_level="L3", cols = 3, ), S3SearchOptionsWidget( name="req_search_L4", field="req_id$site_id$location_id$L4", location_level="L4", cols = 3, ), ) s3db.configure("req_req_item", search_method = s3base.S3Search(advanced=req_item_search), ) def prep(r): if r.interactive: list_fields = s3db.get_config("req_req_item", "list_fields") list_fields.insert(1, "req_id$site_id") list_fields.insert(1, "req_id$site_id$location_id$L4") list_fields.insert(1, "req_id$site_id$location_id$L3") s3db.configure("req_req_item", insertable = False, list_fields = list_fields, ) s3.crud_strings["req_req_item"].title_list = T("Requested Items") if r.method != None and r.method != "update" and r.method != "read": # Hide fields which don't make sense in a Create form # - includes one embedded in list_create # - list_fields over-rides, so still visible within list itself s3db.req_hide_quantities(r.table) return True s3.prep = prep output = s3_rest_controller("req", "req_item") if settings.get_req_prompt_match(): req_item_inv_item_btn = dict(url = URL(c="req", f="req_item_inv_item", args=["[id]"]), _class = "action-btn", label = str(T("Request from Facility")), ) if s3.actions: s3.actions += [req_item_inv_item_btn] else: s3.actions = [req_item_inv_item_btn] return output # ----------------------------------------------------------------------------- def req_item_packs(): """ Called by S3OptionsFilter to provide the pack options for an Item """ req_item_id = None args = request.args if len(args) == 1 and args[0].isdigit(): req_item_id = args[0] else: for v in request.vars: if "." in v and v.split(".", 1)[1] == "req_item_id": req_item_id = request.vars[v] break table = s3db.supply_item_pack ritable = s3db.req_req_item query = (ritable.id == req_item_id) & \ (ritable.item_id == table.item_id) response.headers["Content-Type"] = "application/json" return db(query).select(table.id, table.name, table.quantity).json() # ----------------------------------------------------------------------------- def req_item_inv_item(): """ Shows the inventory items which match a requested item @ToDo: Make this page a component of req_item """ req_item_id = request.args[0] request.args = [] # ritable = s3db.req_req_item req_item = ritable[req_item_id] rtable = s3db.req_req req = rtable[req_item.req_id] output = {} output["title"] = T("Request Stock from Available Warehouse") output["req_btn"] = A(T("Return to Request"), _href = URL(c="req", f="req", args=[req_item.req_id, "req_item"]), _class = "action-btn" ) output["req_item"] = TABLE( TR( TH( "%s: " % T("Requested By") ), rtable.site_id.represent(req.site_id), TH( "%s: " % T("Item")), ritable.item_id.represent(req_item.item_id), ), TR( TH( "%s: " % T("Requester") ), rtable.requester_id.represent(req.requester_id), TH( "%s: " % T("Quantity")), req_item.quantity, ), TR( TH( "%s: " % T("Date Requested") ), rtable.date.represent(req.date), TH( T("Quantity Committed")), req_item.quantity_commit, ), TR( TH( "%s: " % T("Date Required") ), rtable.date_required.represent(req.date_required), TH( "%s: " % T("Quantity in Transit")), req_item.quantity_transit, ), TR( TH( "%s: " % T("Priority") ), rtable.priority.represent(req.priority), TH( "%s: " % T("Quantity Fulfilled")), req_item.quantity_fulfil, ) ) s3.no_sspag = True # pagination won't work with 2 datatables on one page @todo: test itable = s3db.inv_inv_item # Get list of matching inventory items s3.filter = (itable.item_id == req_item.item_id) # Tweak CRUD String for this context s3.crud_strings["inv_inv_item"].msg_list_empty = T("No Inventories currently have this item in stock") inv_items = s3_rest_controller("inv", "inv_item") output["items"] = inv_items["items"] if current.deployment_settings.get_supply_use_alt_name(): # Get list of alternative inventory items atable = s3db.supply_item_alt query = (atable.item_id == req_item.item_id ) & \ (atable.deleted == False ) alt_item_rows = db(query).select(atable.alt_item_id) alt_item_ids = [alt_item_row.alt_item_id for alt_item_row in alt_item_rows] if alt_item_ids: s3.filter = (itable.item_id.belongs(alt_item_ids)) inv_items_alt = s3_rest_controller("inv", "inv_item") output["items_alt"] = inv_items_alt["items"] else: output["items_alt"] = T("No Inventories currently have suitable alternative items in stock") response.view = "req/req_item_inv_item.html" s3.actions = [dict(url = URL(c = request.controller, f = "req", args = [req_item.req_id, "req_item"], vars = dict(req_item_id = req_item_id, inv_item_id = "[id]") ), _class = "action-btn", label = str(T("Request From")), )] return output # ============================================================================= def req_skill(): """ REST Controller @ToDo: Filter out fulfilled Skills? """ # Filter out Template Items table = s3db.req_req_skill rtable = s3db.req_req s3.filter = (rtable.is_template == False) & \ (rtable.id == table.req_id) # Search method S3SearchOptionsWidget = s3base.S3SearchOptionsWidget req_skill_search = ( S3SearchOptionsWidget( name="req_search_fulfil_status", label=T("Status"), field="req_id$fulfil_status", options = s3.req_status_opts, cols = 3, ), S3SearchOptionsWidget( name="req_search_priority", label=T("Priority"), field="req_id$priority", options = s3.req_priority_opts, cols = 3, ), #S3SearchOptionsWidget( # name="req_search_L1", # field="req_id$site_id$location_id$L1", # location_level="L1", # cols = 3, #), #S3SearchOptionsWidget( # name="req_search_L2", # field="req_id$site_id$location_id$L2", # location_level="L2", # cols = 3, #), S3SearchOptionsWidget( name="req_search_L3", field="req_id$site_id$location_id$L3", location_level="L3", cols = 3, ), S3SearchOptionsWidget( name="req_search_L4", field="req_id$site_id$location_id$L4", location_level="L4", cols = 3, ), ) s3db.configure("req_req_skill", search_method = s3base.S3Search(advanced=req_skill_search), ) def prep(r): if r.interactive: list_fields = s3db.get_config("req_req_skill", "list_fields") list_fields.insert(1, "req_id$site_id") list_fields.insert(1, "req_id$site_id$location_id$L4") list_fields.insert(1, "req_id$site_id$location_id$L3") s3db.configure("req_req_skill", insertable=False, list_fields = list_fields, ) if r.method != "update" and r.method != "read": # Hide fields which don't make sense in a Create form # - includes one embedded in list_create # - list_fields over-rides, so still visible within list itself s3db.req_hide_quantities(r.table) return True s3.prep = prep # Post-process def postp(r, output): if r.interactive: s3.actions = [ dict(url = URL(c="req", f="req", args=["req_skill", "[id]"]), _class = "action-btn", label = str(READ) ) ] return output s3.postp = postp output = s3_rest_controller("req", "req_skill") return output # ============================================================================= def summary_option(): """ REST Controller """ return s3_rest_controller() # ============================================================================= def commit(): """ REST Controller """ # Check if user is affiliated to an Organisation if not is_affiliated(): tablename = "req_commit_person" table = s3db[tablename] # Unaffiliated people can't commit on behalf of others table.person_id.writable = False # & can only make single-person commitments # (This should have happened in the main commitment) s3db.configure(tablename, insertable=False) def prep(r):
s3.prep = prep def postp(r, output): if r.interactive and r.method != "import": if not r.component: table = r.table record = r.record s3_action_buttons(r) s3.actions.append( dict(url = URL(f = "send_commit", args=["[id]"]), _class = "action-btn send-btn", label = str(T("Prepare Shipment")) ) ) s3.jquery_ready.append( '''S3ConfirmClick('.send-btn','%s')''' % T("Are you sure you want to send this shipment?")) return output s3.postp = postp output = s3_rest_controller(rheader=commit_rheader) return output # ----------------------------------------------------------------------------- def commit_rheader(r): """ Resource Header for Commitments """ if r.representation == "html": record = r.record if record and r.name == "commit": s3_date_represent = s3base.S3DateTime.date_represent tabs = [(T("Edit Details"), None)] type = record.type and int(record.type) table = r.table if type == 1: tabs.append((T("Items"), "commit_item")) #req_record = db.req_req[record.req_id] #req_date = req_record.date rheader = DIV(TABLE(TR(TH("%s: " % table.req_id.label), table.req_id.represent(record.req_id), ), TR(TH("%s: " % T("Committing Warehouse")), s3db.org_site_represent(record.site_id), TH("%s: " % T("Commit Date")), s3_date_represent(record.date), ), TR(TH("%s: " % table.comments.label), TD(record.comments or "", _colspan=3) ), ), ) prepare_btn = A(T("Prepare Shipment"), _href = URL(f = "send_commit", args = [record.id] ), _id = "send_commit", _class = "action-btn" ) s3.rfooter = TAG[""](prepare_btn) # send_btn = A( T("Send Commitment as Shipment"), # _href = URL(f = "send_commit", # args = [record.id] # ), # _id = "send_commit", # _class = "action-btn" # ) # # send_btn_confirm = SCRIPT("S3ConfirmClick('#send_commit', '%s')" % # T("Do you want to send these Committed items?") ) # s3.rfooter = TAG[""](send_btn,send_btn_confirm) #rheader.append(send_btn) #rheader.append(send_btn_confirm) elif type == 3: #tabs.append((T("People"), "commit_person")) tabs.append((T("People"), "commit_skill")) #req_record = db.req_req[record.req_id] #req_date = req_record.date organisation_represent = s3db.org_organisation_represent rheader = DIV(TABLE(TR(TH("%s: " % table.req_id.label), table.req_id.represent(record.req_id), ), TR(TH("%s: " % T("Committing Organization")), organisation_represent(record.organisation_id), TH("%s: " % T("Commit Date")), s3_date_represent(record.date), ), TR(TH("%s: " % table.comments.label), TD(record.comments, _colspan=3) ), ), ) else: # Other (& Assets/Shelter) rheader = DIV(TABLE(TR(TH("%s: " % table.req_id.label), table.req_id.represent(record.req_id), ), TR(TH("%s: " % T("Committing Person")), table.committer_id.represent(record.committer_id), TH("%s: " % T("Commit Date")), s3_date_represent(record.date), ), TR(TH("%s: " % table.comments.label), TD(record.comments or "", _colspan=3) ), ), ) rheader_tabs = s3_rheader_tabs(r, tabs) rheader.append(rheader_tabs) return rheader return None # ============================================================================= def send(): """ RESTful CRUD controller """ s3db.configure("inv_send", listadd=False) return s3db.inv_send_controller() # ============================================================================== def send_commit(): """ Send a Shipment containing all items in a Commitment """ return s3db.req_send_commit() # ----------------------------------------------------------------------------- def send_process(): """ Process a Shipment """ return s3db.inv_send_process() # ============================================================================= def commit_item(): """ REST Controller """ return s3_rest_controller() # ============================================================================= def commit_req(): """ Function to commit items for a Request - i.e. copy data from a req into a commitment arg: req_id vars: site_id """ req_id = request.args[0] site_id = request.vars.get("site_id") table = s3db.req_req r_req = db(table.id == req_id).select(table.type, limitby=(0, 1)).first() # User must have permissions over facility which is sending (prefix, resourcename, id) = s3db.get_instance(s3db.org_site, site_id) if not site_id or not auth.s3_has_permission("update", "%s_%s" % (prefix, resourcename), record_id=id): session.error = T("You do not have permission to make this commitment.") redirect(URL(c="req", f="req", args=[req_id])) # Create a new commit record commit_id = s3db.req_commit.insert(date = request.utcnow, req_id = req_id, site_id = site_id, type = r_req.type ) # Only select items which are in the warehouse ritable = s3db.req_req_item iitable = s3db.inv_inv_item query = (ritable.req_id == req_id) & \ (ritable.quantity_fulfil < ritable.quantity) & \ (iitable.site_id == site_id) & \ (ritable.item_id == iitable.item_id) & \ (ritable.deleted == False) & \ (iitable.deleted == False) req_items = db(query).select(ritable.id, ritable.quantity, ritable.item_pack_id, iitable.item_id, iitable.quantity, iitable.item_pack_id) citable = s3db.req_commit_item for req_item in req_items: req_item_quantity = req_item.req_req_item.quantity * \ req_item.req_req_item.pack_quantity inv_item_quantity = req_item.inv_inv_item.quantity * \ req_item.inv_inv_item.pack_quantity if inv_item_quantity > req_item_quantity: commit_item_quantity = req_item_quantity else: commit_item_quantity = inv_item_quantity commit_item_quantity = commit_item_quantity / req_item.req_req_item.pack_quantity if commit_item_quantity: req_item_id = req_item.req_req_item.id commit_item_id = citable.insert(commit_id = commit_id, req_item_id = req_item_id, item_pack_id = req_item.req_req_item.item_pack_id, quantity = commit_item_quantity ) # Update the req_item.commit_quantity & req.commit_status s3mgr.store_session("req", "commit_item", commit_item_id) form = Storage() form.vars = Storage( req_item_id = req_item_id ) s3db.req_commit_item_onaccept(form) # Redirect to commit redirect(URL(c="req", f="commit", args=[commit_id, "commit_item"])) # ============================================================================= def send_req(): """ Function to send items for a Request. - i.e. copy data from a req into a send arg: req_id vars: site_id """ req_id = request.args[0] site_id = request.vars.get("site_id", None) site_name = s3db.org_site_represent(site_id, show_link=False) ritable = s3db.req_req_item iitable = s3db.inv_inv_item sendtable = s3db.inv_send tracktable = s3db.inv_track_item siptable = s3db.supply_item_pack table = s3db.req_req r_req = db(table.id == req_id).select(table.req_ref, table.requester_id, table.site_id, limitby=(0, 1)).first() # User must have permissions over facility which is sending (prefix, resourcename, id) = s3db.get_instance(db.org_site, site_id) if not site_id or not auth.s3_has_permission("update", "%s_%s" % (prefix, resourcename), record_id=id): session.error = T("You do not have permission to send this shipment.") redirect(URL(c="req", f="req", args = [req_id])) # Create a new send record code = s3db.inv_get_shipping_code("WB", site_id, s3db.inv_send.send_ref ) send_id = sendtable.insert(send_ref = code, req_ref = r_req.req_ref, sender_id = auth.s3_logged_in_person(), site_id = site_id, date = request.utcnow, recipient_id = r_req.requester_id, to_site_id = r_req.site_id, status = s3db.inv_ship_status["IN_PROCESS"], ) # Get the items for this request that have not been fulfilled (in transit) sip_id_field = siptable.id sip_quantity_field = siptable.quantity query = (ritable.req_id == req_id) & \ (ritable.quantity_transit < ritable.quantity) & \ (ritable.deleted == False) & \ (ritable.item_pack_id == sip_id_field) req_items = db(query).select(ritable.id, ritable.quantity, ritable.quantity_transit, ritable.quantity_fulfil, ritable.item_id, sip_quantity_field ) # Loop through each request item and find matched in the site inventory IN_PROCESS = s3db.inv_tracking_status["IN_PROCESS"] insert = tracktable.insert inv_remove = s3db.inv_remove ii_item_id_field = iitable.item_id ii_quantity_field = iitable.quantity ii_expiry_field = iitable.expiry_date ii_purchase_field = iitable.purchase_date iifields = [iitable.id, ii_item_id_field, ii_quantity_field, iitable.item_pack_id, iitable.pack_value, iitable.currency, ii_expiry_field, ii_purchase_field, iitable.bin, iitable.owner_org_id, iitable.supply_org_id, sip_quantity_field, ] bquery = (ii_quantity_field > 0) & \ (iitable.site_id == site_id) & \ (iitable.deleted == False) & \ (iitable.item_pack_id == sip_id_field) orderby = ii_expiry_field | ii_purchase_field no_match = True for ritem in req_items: rim = ritem.req_req_item rim_id = rim.id query = bquery & \ (ii_item_id_field == rim.item_id) inv_items = db(query).select(*iifields, orderby=orderby) if len(inv_items) == 0: break; no_match = False one_match = len(inv_items) == 1 # Get the Quantity Needed quantity_shipped = max(rim.quantity_transit, rim.quantity_fulfil) quantity_needed = (rim.quantity - quantity_shipped) * ritem.supply_item_pack.quantity # Insert the track item records # If there is more than one item match then we select the stock with the oldest expiry date first # then the oldest purchase date first # then a complete batch, if-possible iids = [] append = iids.append for item in inv_items: if not quantity_needed: break iitem = item.inv_inv_item if one_match: # Remove this total from the warehouse stock send_item_quantity = inv_remove(iitem, quantity_needed) quantity_needed -= send_item_quantity append(iitem.id) else: quantity_available = iitem.quantity * item.supply_item_pack.quantity if iitem.expiry_date: # We take first from the oldest expiry date send_item_quantity = min(quantity_needed, quantity_available) # Remove this total from the warehouse stock send_item_quantity = inv_remove(iitem, send_item_quantity) quantity_needed -= send_item_quantity append(iitem.id) elif iitem.purchase_date: # We take first from the oldest purchase date for non-expiring stock send_item_quantity = min(quantity_needed, quantity_available) # Remove this total from the warehouse stock send_item_quantity = inv_remove(iitem, send_item_quantity) quantity_needed -= send_item_quantity append(iitem.id) elif quantity_needed <= quantity_available: # Assign a complete batch together if possible # Remove this total from the warehouse stock send_item_quantity = inv_remove(iitem, quantity_needed) quantity_needed = 0 append(iitem.id) else: # Try again on the second loop, if-necessary continue insert(send_id = send_id, send_inv_item_id = iitem.id, item_id = iitem.item_id, req_item_id = rim_id, item_pack_id = iitem.item_pack_id, quantity = send_item_quantity, status = IN_PROCESS, pack_value = iitem.pack_value, currency = iitem.currency, bin = iitem.bin, expiry_date = iitem.expiry_date, owner_org_id = iitem.owner_org_id, supply_org_id = iitem.supply_org_id, #comments = comment, ) # 2nd pass for item in inv_items: if not quantity_needed: break iitem = item.inv_inv_item if iitem.id in iids: continue # We have no way to know which stock we should take 1st so show all with quantity 0 & let the user decide send_item_quantity = 0 insert(send_id = send_id, send_inv_item_id = iitem.id, item_id = iitem.item_id, req_item_id = rim_id, item_pack_id = iitem.item_pack_id, quantity = send_item_quantity, status = IN_PROCESS, pack_value = iitem.pack_value, currency = iitem.currency, bin = iitem.bin, expiry_date = iitem.expiry_date, owner_org_id = iitem.owner_org_id, supply_org_id = iitem.supply_org_id, #comments = comment, ) if no_match: session.warning = \ T("%(site)s has no items exactly matching this request. There may still be other items in stock which can fulfill this request!") % \ dict(site=site_name) # Redirect to view the list of items in the Send redirect(URL(c = "inv", f = "send", args = [send_id, "track_item"]) ) # ============================================================================= def commit_item_json(): """ """ ctable = s3db.req_commit itable = s3db.req_commit_item stable = s3db.org_site #ctable.date.represent = lambda dt: dt[:10] query = (itable.req_item_id == request.args[0]) & \ (ctable.id == itable.commit_id) & \ (ctable.site_id == stable.id) & \ (itable.deleted == False) records = db(query).select(ctable.id, ctable.date, stable.name, itable.quantity, orderby = db.req_commit.date) json_str = '''[%s,%s''' % (json.dumps(dict(id = str(T("Committed")), quantity = "#")), records.json()[1:]) response.headers["Content-Type"] = "application/json" return json_str # ============================================================================= def fema(): """ Custom Report to list all open requests for items that FEMA can supply @ToDo: Filter to just Sites that FEMA support """ ritable = s3db.req_req_item rtable = db.req_req itable = db.supply_item ictable = db.supply_item_category citable = db.supply_catalog_item query = (ictable.name == "FEMA") & \ (citable.item_category_id == ictable.id) & \ (citable.item_id == itable.id) & \ (itable.deleted != True) fema_items = db(query).select(itable.id) fema_item_ids = [item.id for item in fema_items] REQ_STATUS_COMPLETE = 2 s3.filter = (rtable.deleted != True) & \ (rtable.is_template == False) & \ (rtable.commit_status != REQ_STATUS_COMPLETE) & \ (rtable.transit_status != REQ_STATUS_COMPLETE) & \ (rtable.fulfil_status != REQ_STATUS_COMPLETE) & \ (ritable.req_id == rtable.id) & \ (ritable.quantity > ritable.quantity_commit) & \ (ritable.quantity > ritable.quantity_transit) & \ (ritable.quantity > ritable.quantity_fulfil) & \ (ritable.deleted != True) & \ (ritable.item_id.belongs(fema_item_ids)) # Search method req_item_search = [ s3base.S3SearchOptionsWidget( name="req_search_site", field="req_id$site_id", label = T("Facility"), cols = 3, ), ] s3db.configure("req_req_item", search_method = s3base.S3Search(advanced=req_item_search), ) output = req_item() return output # END =========================================================================
if r.interactive: # Commitments created through UI should be done via components table = r.table if r.record: s3.crud.submit_button = T("Save Changes") if r.record.type == 1: # Items # Limit site_id to facilities the user has permissions for auth.permitted_facilities(table=table, error_msg=T("You do not have permission for any facility to make a commitment.") ) table.site_id.comment = A(T("Set as default Site"), _id="req_commit_site_id_link", _target="_blank", _href=URL(c="default", f="user", args=["profile"])) jappend = s3.jquery_ready.append jappend(''' $('#req_commit_site_id_link').click(function(){ var site_id=$('#req_commit_site_id').val() if(site_id){ var url = $('#req_commit_site_id_link').attr('href') var exists=url.indexOf('?') if(exists=='-1'){ $('#req_commit_site_id_link').attr('href',url+'?site_id='+site_id) } } return true })''') # Dropdown not Autocomplete itable = s3db.req_commit_item itable.req_item_id.widget = None jappend(''' S3OptionsFilter({ 'triggerName':'req_item_id', 'targetName':'item_pack_id', 'lookupPrefix':'req', 'lookupResource':'req_item_packs', 'lookupKey':'req_item_id', 'lookupField':'id', 'msgNoRecords':i18n.no_packs, 'fncPrep':S3.supply.fncPrepItem, 'fncRepresent':S3.supply.fncRepresentItem })''') # Custom Form s3forms = s3base.s3forms crud_form = s3forms.S3SQLCustomForm( "site_id", "date", "date_available", "committer_id", s3forms.S3SQLInlineComponent( "commit_item", label = T("Items"), fields = ["req_item_id", "item_pack_id", "quantity", "comments" ] ), "comments", ) s3db.configure("req_commit", crud_form=crud_form) elif r.record.type == 3: # People # Limit site_id to sites the user has permissions for auth.permitted_facilities(table=r.table, error_msg=T("You do not have permission for any facility to make a commitment.")) table.site_id.comment = A(T("Set as default Site"), _id="req_commit_site_id_link", _target="_blank", _href=URL(c="default", f="user", args=["profile"])) # Limit organisation_id to organisations the user has permissions for #auth.permitted_organisations(table=r.table, redirect_on_error=False) #table.organisation_id.readable = True #table.organisation_id.writable = True # Custom Form s3forms = s3base.s3forms crud_form = s3forms.S3SQLCustomForm( #"organisation_id", "site_id", "date", "date_available", "committer_id", s3forms.S3SQLInlineComponent( "commit_skill", label = T("People"), fields = ["quantity", "skill_id", "comments" ] ), "comments", ) s3db.configure("req_commit", crud_form=crud_form) else: # Commits to Other requests can have an Organisation # Limit organisation_id to organisations the user has permissions for auth.permitted_organisations(table=r.table, redirect_on_error=False) table.organisation_id.readable = True table.organisation_id.writable = True # Non-Item commits shouldn't have a From Inventory # @ToDo: Assets do? table.site_id.readable = False table.site_id.writable = False if r.component: req_id = r.record.req_id if r.component.name == "commit_item": # Limit commit items to items from the request s3db.req_commit_item.req_item_id.requires = \ IS_ONE_OF(db, "req_req_item.id", s3db.req_item_represent, orderby = "req_req_item.id", filterby = "req_id", filter_opts = [req_id], sort=True ) elif r.component.name == "person": pass # Limit commit skills to skills from the request #db.req_commit_skill.req_skill_id.requires = \ # IS_ONE_OF(db, # "req_req_skill.id", # s3db.req_skill_represent, # orderby = "req_req_skill.id", # filterby = "req_id", # filter_opts = [req_id], # sort=True # ) return True
primitive.rs
use std::sync::Arc; use parquet_format_async_temp::Statistics as ParquetStatistics; use super::Statistics; use crate::metadata::ColumnDescriptor; use crate::types; use crate::{ error::{ParquetError, Result}, schema::types::PhysicalType, }; #[derive(Debug, Clone, PartialEq)] pub struct PrimitiveStatistics<T: types::NativeType> { pub descriptor: ColumnDescriptor, pub null_count: Option<i64>, pub distinct_count: Option<i64>, pub max_value: Option<T>, pub min_value: Option<T>, } impl<T: types::NativeType> Statistics for PrimitiveStatistics<T> { fn as_any(&self) -> &dyn std::any::Any { self } fn physical_type(&self) -> &PhysicalType { &T::TYPE } fn null_count(&self) -> Option<i64> { self.null_count } } pub fn read<T: types::NativeType>( v: &ParquetStatistics, descriptor: ColumnDescriptor, ) -> Result<Arc<dyn Statistics>> { if let Some(ref v) = v.max_value { if v.len() != std::mem::size_of::<T>() { return Err(ParquetError::OutOfSpec( "The max_value of statistics MUST be plain encoded".to_string(), )); } }; if let Some(ref v) = v.min_value { if v.len() != std::mem::size_of::<T>() { return Err(ParquetError::OutOfSpec( "The min_value of statistics MUST be plain encoded".to_string(),
}; Ok(Arc::new(PrimitiveStatistics::<T> { descriptor, null_count: v.null_count, distinct_count: v.distinct_count, max_value: v.max_value.as_ref().map(|x| types::decode(x)), min_value: v.min_value.as_ref().map(|x| types::decode(x)), })) } pub fn write<T: types::NativeType>(v: &PrimitiveStatistics<T>) -> ParquetStatistics { ParquetStatistics { null_count: v.null_count, distinct_count: v.distinct_count, max_value: v.max_value.map(|x| x.to_le_bytes().as_ref().to_vec()), min_value: v.min_value.map(|x| x.to_le_bytes().as_ref().to_vec()), min: None, max: None, } }
)); }
config.go
/* Copyright 2018 The Rook Authors. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Package config provides methods for creating and formatting Ceph configuration files for daemons. package config import ( "fmt" "os" "path" "strings" "github.com/coreos/pkg/capnslog" "github.com/go-ini/ini" "github.com/rook/rook/pkg/clusterd" "github.com/rook/rook/pkg/daemon/ceph/client" ) var logger = capnslog.NewPackageLogger("github.com/rook/rook", "cephconfig") const ( // DefaultConfigDir is the default dir where Ceph stores its configs DefaultConfigDir = "/etc/ceph" // DefaultConfigFile is the default name of the file where Ceph stores its configs DefaultConfigFile = "ceph.conf" // DefaultKeyringFile is the default name of the file where Ceph stores its keyring info DefaultKeyringFile = "keyring" ) // GlobalConfig represents the [global] sections of Ceph's config file. type GlobalConfig struct { EnableExperimental string `ini:"enable experimental unrecoverable data corrupting features,omitempty"` FSID string `ini:"fsid,omitempty"` RunDir string `ini:"run dir,omitempty"` MonMembers string `ini:"mon initial members,omitempty"` MonHost string `ini:"mon host"` LogFile string `ini:"log file,omitempty"` MonClusterLogFile string `ini:"mon cluster log file,omitempty"` PublicAddr string `ini:"public addr,omitempty"` PublicNetwork string `ini:"public network,omitempty"` ClusterAddr string `ini:"cluster addr,omitempty"` ClusterNetwork string `ini:"cluster network,omitempty"` MonKeyValueDb string `ini:"mon keyvaluedb"` MonAllowPoolDelete bool `ini:"mon_allow_pool_delete"` MaxPgsPerOsd int `ini:"mon_max_pg_per_osd"` DebugLogDefaultLevel int `ini:"debug default"` DebugLogRadosLevel int `ini:"debug rados"` DebugLogMonLevel int `ini:"debug mon"` DebugLogOSDLevel int `ini:"debug osd"` DebugLogBluestoreLevel int `ini:"debug bluestore"` DebugLogFilestoreLevel int `ini:"debug filestore"` DebugLogJournalLevel int `ini:"debug journal"` DebugLogLevelDBLevel int `ini:"debug leveldb"` FileStoreOmapBackend string `ini:"filestore_omap_backend"` OsdPgBits int `ini:"osd pg bits,omitempty"` OsdPgpBits int `ini:"osd pgp bits,omitempty"` OsdPoolDefaultSize int `ini:"osd pool default size,omitempty"` OsdPoolDefaultMinSize int `ini:"osd pool default min size,omitempty"` OsdPoolDefaultPgNum int `ini:"osd pool default pg num,omitempty"` OsdPoolDefaultPgpNum int `ini:"osd pool default pgp num,omitempty"` OsdMaxObjectNameLen int `ini:"osd max object name len,omitempty"` OsdMaxObjectNamespaceLen int `ini:"osd max object namespace len,omitempty"` OsdObjectStore string `ini:"osd objectstore,omitempty"` CrushLocation string `ini:"crush location,omitempty"` RbdDefaultFeatures int `ini:"rbd_default_features,omitempty"` FatalSignalHandlers string `ini:"fatal signal handlers"` } // CephConfig represents an entire Ceph config including all sections. type CephConfig struct { *GlobalConfig `ini:"global,omitempty"` } // DefaultConfigFilePath returns the full path to Ceph's default config file func DefaultConfigFilePath() string { return path.Join(DefaultConfigDir, DefaultConfigFile) } // DefaultKeyringFilePath returns the full path to Ceph's default keyring file func DefaultKeyringFilePath() string { return path.Join(DefaultConfigDir, DefaultKeyringFile) } // GetConfFilePath gets the path of a given cluster's config file func
(root, clusterName string) string { return fmt.Sprintf("%s/%s.config", root, clusterName) } // GenerateAdminConnectionConfig calls GenerateAdminConnectionConfigWithSettings with no settings // overridden. func GenerateAdminConnectionConfig(context *clusterd.Context, cluster *ClusterInfo) error { return GenerateAdminConnectionConfigWithSettings(context, cluster, nil) } // GenerateAdminConnectionConfigWithSettings generates a Ceph config and keyring which will allow // the daemon to connect as an admin. Default config file settings can be overridden by specifying // some subset of settings. func GenerateAdminConnectionConfigWithSettings(context *clusterd.Context, cluster *ClusterInfo, settings *CephConfig) error { root := path.Join(context.ConfigDir, cluster.Name) keyringPath := path.Join(root, fmt.Sprintf("%s.keyring", client.AdminUsername)) err := writeKeyring(AdminKeyring(cluster), keyringPath) if err != nil { return fmt.Errorf("failed to write keyring to %s. %+v", root, err) } if _, err = GenerateConfigFile(context, cluster, root, client.AdminUsername, keyringPath, settings, nil); err != nil { return fmt.Errorf("failed to write config to %s. %+v", root, err) } logger.Infof("generated admin config in %s", root) return nil } // GenerateConfigFile generates and writes a config file to disk. func GenerateConfigFile(context *clusterd.Context, cluster *ClusterInfo, pathRoot, user, keyringPath string, globalConfig *CephConfig, clientSettings map[string]string) (string, error) { // create the config directory if err := os.MkdirAll(pathRoot, 0744); err != nil { logger.Warningf("failed to create config directory at %s: %+v", pathRoot, err) } configFile, err := createGlobalConfigFileSection(context, cluster, pathRoot, globalConfig) if err != nil { return "", fmt.Errorf("failed to create global config section, %+v", err) } qualifiedUser := getQualifiedUser(user) if err := addClientConfigFileSection(configFile, qualifiedUser, keyringPath, clientSettings); err != nil { return "", fmt.Errorf("failed to add admin client config section, %+v", err) } // if there's a config file override path given, process the given config file if context.ConfigFileOverride != "" { err := configFile.Append(context.ConfigFileOverride) if err != nil { // log the config file override failure as a warning, but proceed without it logger.Warningf("failed to add config file override from '%s': %+v", context.ConfigFileOverride, err) } } // write the entire config to disk filePath := GetConfFilePath(pathRoot, cluster.Name) logger.Infof("writing config file %s", filePath) if err := configFile.SaveTo(filePath); err != nil { return "", fmt.Errorf("failed to save config file %s. %+v", filePath, err) } // copy the config to /etc/ceph/ceph.conf defaultPath := DefaultConfigFilePath() logger.Infof("copying config to %s", defaultPath) if err := configFile.SaveTo(defaultPath); err != nil { logger.Warningf("failed to save config file %s. %+v", defaultPath, err) } return filePath, nil } // prepends "client." if a user namespace is not already specified func getQualifiedUser(user string) string { if strings.Index(user, ".") == -1 { return fmt.Sprintf("client.%s", user) } return user } // CreateDefaultCephConfig creates a default ceph config file. func CreateDefaultCephConfig(context *clusterd.Context, cluster *ClusterInfo, runDir string) *CephConfig { // extract a list of just the monitor names, which will populate the "mon initial members" // global config field monMembers := make([]string, len(cluster.Monitors)) monHosts := make([]string, len(cluster.Monitors)) i := 0 for _, monitor := range cluster.Monitors { monMembers[i] = monitor.Name monHosts[i] = monitor.Endpoint i++ } cephLogLevel := logLevelToCephLogLevel(context.LogLevel) return &CephConfig{ GlobalConfig: &GlobalConfig{ FSID: cluster.FSID, RunDir: runDir, MonMembers: strings.Join(monMembers, " "), MonHost: strings.Join(monHosts, ","), LogFile: "/dev/stdout", MonClusterLogFile: "/dev/stdout", PublicAddr: context.NetworkInfo.PublicAddr, PublicNetwork: context.NetworkInfo.PublicNetwork, ClusterAddr: context.NetworkInfo.ClusterAddr, ClusterNetwork: context.NetworkInfo.ClusterNetwork, MonKeyValueDb: "rocksdb", MonAllowPoolDelete: true, MaxPgsPerOsd: 1000, DebugLogDefaultLevel: cephLogLevel, DebugLogRadosLevel: cephLogLevel, DebugLogMonLevel: cephLogLevel, DebugLogOSDLevel: cephLogLevel, DebugLogBluestoreLevel: cephLogLevel, DebugLogFilestoreLevel: cephLogLevel, DebugLogJournalLevel: cephLogLevel, DebugLogLevelDBLevel: cephLogLevel, FileStoreOmapBackend: "rocksdb", OsdPgBits: 11, OsdPgpBits: 11, OsdPoolDefaultSize: 1, OsdPoolDefaultMinSize: 1, OsdPoolDefaultPgNum: 100, OsdPoolDefaultPgpNum: 100, RbdDefaultFeatures: 3, FatalSignalHandlers: "false", }, } } // create a config file with global settings configured, and return an ini file func createGlobalConfigFileSection(context *clusterd.Context, cluster *ClusterInfo, runDir string, userConfig *CephConfig) (*ini.File, error) { var ceph *CephConfig if userConfig != nil { // use the user config since it was provided ceph = userConfig } else { ceph = CreateDefaultCephConfig(context, cluster, runDir) } configFile := ini.Empty() err := ini.ReflectFrom(configFile, ceph) return configFile, err } // add client config to the ini file func addClientConfigFileSection(configFile *ini.File, clientName, keyringPath string, settings map[string]string) error { s, err := configFile.NewSection(clientName) if err != nil { return err } if _, err := s.NewKey("keyring", keyringPath); err != nil { return err } for key, val := range settings { if _, err := s.NewKey(key, val); err != nil { return fmt.Errorf("failed to add key %s. %v", key, err) } } return nil } // convert a Rook log level to a corresponding Ceph log level func logLevelToCephLogLevel(logLevel capnslog.LogLevel) int { switch logLevel { case capnslog.CRITICAL: case capnslog.ERROR: case capnslog.WARNING: return -1 case capnslog.NOTICE: case capnslog.INFO: return 0 case capnslog.DEBUG: return 10 case capnslog.TRACE: return 100 } return 0 }
GetConfFilePath
test_binary_sensor.py
"""Test the Yeelight binary sensor.""" from unittest.mock import patch from homeassistant.components.yeelight import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_component
ENTITY_BINARY_SENSOR = f"binary_sensor.{NAME}_nightlight" async def test_nightlight(hass: HomeAssistant): """Test nightlight sensor.""" mocked_bulb = _mocked_bulb() with patch(f"{MODULE}.Bulb", return_value=mocked_bulb), patch( f"{MODULE}.config_flow.yeelight.Bulb", return_value=mocked_bulb ): await async_setup_component(hass, DOMAIN, YAML_CONFIGURATION) await hass.async_block_till_done() # active_mode assert hass.states.get(ENTITY_BINARY_SENSOR).state == "off" # nl_br properties = {**PROPERTIES} properties.pop("active_mode") mocked_bulb.last_properties = properties await entity_component.async_update_entity(hass, ENTITY_BINARY_SENSOR) assert hass.states.get(ENTITY_BINARY_SENSOR).state == "on" # default properties.pop("nl_br") await entity_component.async_update_entity(hass, ENTITY_BINARY_SENSOR) assert hass.states.get(ENTITY_BINARY_SENSOR).state == "off"
from homeassistant.setup import async_setup_component from . import MODULE, NAME, PROPERTIES, YAML_CONFIGURATION, _mocked_bulb
test_common.go
package stream import ( "fmt" "testing" "github.com/cosmos/cosmos-sdk/codec" appCfg "github.com/cosmos/cosmos-sdk/server/config" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/x/auth" "github.com/cosmos/cosmos-sdk/x/bank" "github.com/cosmos/cosmos-sdk/x/mock" "github.com/cosmos/cosmos-sdk/x/supply" "github.com/cosmos/cosmos-sdk/x/supply/exported" "github.com/okex/okexchain/x/common" "github.com/okex/okexchain/x/common/monitor" "github.com/okex/okexchain/x/dex" "github.com/okex/okexchain/x/order" "github.com/okex/okexchain/x/order/keeper" ordertypes "github.com/okex/okexchain/x/order/types" stakingtypes "github.com/okex/okexchain/x/staking/types" "github.com/okex/okexchain/x/token" "github.com/stretchr/testify/require" abci "github.com/tendermint/tendermint/abci/types" "github.com/tendermint/tendermint/crypto/secp256k1" ) type MockApp struct { *mock.App keyOrder *sdk.KVStoreKey keyToken *sdk.KVStoreKey keyFreeze *sdk.KVStoreKey keyLock *sdk.KVStoreKey keyDex *sdk.KVStoreKey keyTokenPair *sdk.KVStoreKey keySupply *sdk.KVStoreKey BankKeeper bank.Keeper OrderKeeper keeper.Keeper DexKeeper dex.Keeper TokenKeeper token.Keeper supplyKeeper supply.Keeper streamKeeper Keeper } func registerCodec(cdc *codec.Codec) { supply.RegisterCodec(cdc) } func GetMockApp(t *testing.T, numGenAccs int, cfg *appCfg.Config) (mockApp *MockApp, addrKeysSlice mock.AddrKeysSlice) { return getMockAppWithBalance(t, numGenAccs, 100, cfg) } // initialize the mock application for this module func getMockAppWithBalance(t *testing.T, numGenAccs int, balance int64, cfg *appCfg.Config) (mockApp *MockApp, addrKeysSlice mock.AddrKeysSlice) { mapp := mock.NewApp() registerCodec(mapp.Cdc) mockApp = &MockApp{ App: mapp, keyOrder: sdk.NewKVStoreKey(ordertypes.OrderStoreKey), keyToken: sdk.NewKVStoreKey("token"), keyFreeze: sdk.NewKVStoreKey("freeze"), keyLock: sdk.NewKVStoreKey("lock"), keyDex: sdk.NewKVStoreKey(dex.StoreKey), keyTokenPair: sdk.NewKVStoreKey(dex.TokenPairStoreKey), keySupply: sdk.NewKVStoreKey(supply.StoreKey), } feeCollector := supply.NewEmptyModuleAccount(auth.FeeCollectorName) blacklistedAddrs := make(map[string]bool) blacklistedAddrs[feeCollector.String()] = true mockApp.BankKeeper = bank.NewBaseKeeper(mockApp.AccountKeeper, mockApp.ParamsKeeper.Subspace(bank.DefaultParamspace), blacklistedAddrs) maccPerms := map[string][]string{ auth.FeeCollectorName: nil, } mockApp.supplyKeeper = supply.NewKeeper(mockApp.Cdc, mockApp.keySupply, mockApp.AccountKeeper, mockApp.BankKeeper, maccPerms) mockApp.TokenKeeper = token.NewKeeper( mockApp.BankKeeper, mockApp.ParamsKeeper.Subspace(token.DefaultParamspace), auth.FeeCollectorName, mockApp.supplyKeeper, mockApp.keyToken, mockApp.keyLock, mockApp.Cdc, true) mockApp.DexKeeper = dex.NewKeeper( auth.FeeCollectorName, mockApp.supplyKeeper, mockApp.ParamsKeeper.Subspace(dex.DefaultParamspace), mockApp.TokenKeeper, nil, mockApp.BankKeeper, mockApp.keyDex, mockApp.keyTokenPair, mockApp.Cdc) mockApp.OrderKeeper = keeper.NewKeeper( mockApp.TokenKeeper, mockApp.supplyKeeper, mockApp.DexKeeper, mockApp.ParamsKeeper.Subspace(ordertypes.DefaultParamspace), auth.FeeCollectorName, mockApp.keyOrder, mockApp.Cdc, true, monitor.NopOrderMetrics()) mockApp.streamKeeper = NewKeeper(mockApp.OrderKeeper, mockApp.TokenKeeper, &mockApp.DexKeeper, &mockApp.AccountKeeper, nil, nil, mockApp.Cdc, mockApp.Logger(), cfg, monitor.NopStreamMetrics()) mockApp.Router().AddRoute(ordertypes.RouterKey, order.NewOrderHandler(mockApp.OrderKeeper)) mockApp.QueryRouter().AddRoute(order.QuerierRoute, keeper.NewQuerier(mockApp.OrderKeeper)) mockApp.SetBeginBlocker(getBeginBlocker(mockApp)) mockApp.SetEndBlocker(getEndBlocker(mockApp)) mockApp.SetInitChainer(getInitChainer(mockApp.App, mockApp.supplyKeeper, []exported.ModuleAccountI{feeCollector})) coins, err := sdk.ParseDecCoins(fmt.Sprintf("%d%s,%d%s", balance, common.NativeToken, balance, common.TestToken)) if err != nil { panic(err) } keysSlice, genAccs := CreateGenAccounts(numGenAccs, coins) addrKeysSlice = keysSlice mockApp.SetAnteHandler(nil) app := mockApp mockApp.MountStores( app.keyOrder, app.keyToken, app.keyDex, app.keyTokenPair, app.keyFreeze, app.keyLock, app.keySupply, ) require.NoError(t, mockApp.CompleteSetup(mockApp.keyOrder)) mock.SetGenesis(mockApp.App, genAccs) return mockApp, addrKeysSlice } func getBeginBlocker(mapp *MockApp) sdk.BeginBlocker
func getEndBlocker(mapp *MockApp) sdk.EndBlocker { return func(ctx sdk.Context, req abci.RequestEndBlock) abci.ResponseEndBlock { order.EndBlocker(ctx, mapp.OrderKeeper) EndBlocker(ctx, mapp.streamKeeper) return abci.ResponseEndBlock{} } } func getInitChainer(mapp *mock.App, supplyKeeper stakingtypes.SupplyKeeper, blacklistedAddrs []exported.ModuleAccountI) sdk.InitChainer { return func(ctx sdk.Context, req abci.RequestInitChain) abci.ResponseInitChain { mapp.InitChainer(ctx, req) // set module accounts for _, macc := range blacklistedAddrs { supplyKeeper.SetModuleAccount(ctx, macc) } return abci.ResponseInitChain{} } } func ProduceOrderTxs(app *MockApp, ctx sdk.Context, numToGenerate int, addrKeys mock.AddrKeys, orderMsg *ordertypes.MsgNewOrders) []auth.StdTx { txs := make([]auth.StdTx, numToGenerate) orderMsg.Sender = addrKeys.Address for i := 0; i < numToGenerate; i++ { txs[i] = buildTx(app, ctx, addrKeys, *orderMsg) } return txs } func buildTx(app *MockApp, ctx sdk.Context, addrKeys mock.AddrKeys, msg sdk.Msg) auth.StdTx { accs := app.AccountKeeper.GetAccount(ctx, addrKeys.Address) accNum := accs.GetAccountNumber() seqNum := accs.GetSequence() tx := mock.GenTx([]sdk.Msg{msg}, []uint64{accNum}, []uint64{seqNum}, addrKeys.PrivKey) _, _, err := app.Check(tx) if err != nil { panic(fmt.Sprintf("something wrong in checking transaction: %v", err)) } return tx } func MockApplyBlock(app *MockApp, blockHeight int64, txs []auth.StdTx) { app.BeginBlock(abci.RequestBeginBlock{Header: abci.Header{Height: blockHeight}}) newCtx := app.NewContext(false, abci.Header{}) param := ordertypes.DefaultParams() app.OrderKeeper.SetParams(newCtx, &param) for _, tx := range txs { app.Deliver(tx) } app.EndBlock(abci.RequestEndBlock{Height: blockHeight}) app.Commit() } func CreateGenAccounts(numAccs int, genCoins sdk.Coins) (addrKeysSlice mock.AddrKeysSlice, genAccs []auth.Account) { for i := 0; i < numAccs; i++ { privKey := secp256k1.GenPrivKey() pubKey := privKey.PubKey() addr := sdk.AccAddress(pubKey.Address()) addrKeys := mock.NewAddrKeys(addr, pubKey, privKey) account := &auth.BaseAccount{ Address: addr, Coins: genCoins, } genAccs = append(genAccs, account) addrKeysSlice = append(addrKeysSlice, addrKeys) } return }
{ return func(ctx sdk.Context, req abci.RequestBeginBlock) abci.ResponseBeginBlock { order.BeginBlocker(ctx, mapp.OrderKeeper) return abci.ResponseBeginBlock{} } }
coco_eval.py
import json import tempfile import numpy as np import copy import time import torch import torch._six from pycocotools.cocoeval import COCOeval from pycocotools.coco import COCO import pycocotools.mask as mask_util from collections import defaultdict import helpers.utils as utils class CocoEvaluator(object): def __init__(self, coco_gt, iou_types): assert isinstance(iou_types, (list, tuple)) coco_gt = copy.deepcopy(coco_gt) self.coco_gt = coco_gt self.iou_types = iou_types self.coco_eval = {} for iou_type in iou_types: self.coco_eval[iou_type] = COCOeval(coco_gt, iouType=iou_type) self.img_ids = [] self.eval_imgs = {k: [] for k in iou_types} def update(self, predictions): img_ids = list(np.unique(list(predictions.keys()))) self.img_ids.extend(img_ids) for iou_type in self.iou_types: results = self.prepare(predictions, iou_type) coco_dt = loadRes(self.coco_gt, results) if results else COCO() coco_eval = self.coco_eval[iou_type] coco_eval.cocoDt = coco_dt coco_eval.params.imgIds = list(img_ids) img_ids, eval_imgs = evaluate(coco_eval) self.eval_imgs[iou_type].append(eval_imgs) def synchronize_between_processes(self): for iou_type in self.iou_types: self.eval_imgs[iou_type] = np.concatenate(self.eval_imgs[iou_type], 2) create_common_coco_eval(self.coco_eval[iou_type], self.img_ids, self.eval_imgs[iou_type]) def accumulate(self): for coco_eval in self.coco_eval.values(): coco_eval.accumulate() def summarize(self): for iou_type, coco_eval in self.coco_eval.items(): print("IoU metric: {}".format(iou_type)) coco_eval.summarize() def prepare(self, predictions, iou_type): if iou_type == "bbox": return self.prepare_for_coco_detection(predictions) elif iou_type == "segm": return self.prepare_for_coco_segmentation(predictions) elif iou_type == "keypoints": return self.prepare_for_coco_keypoint(predictions) else: raise ValueError("Unknown iou type {}".format(iou_type)) def prepare_for_coco_detection(self, predictions):
boxes = prediction["boxes"] boxes = convert_to_xywh(boxes).tolist() scores = prediction["scores"].tolist() labels = prediction["labels"].tolist() coco_results.extend( [ { "image_id": original_id, "category_id": labels[k], "bbox": box, "score": scores[k], } for k, box in enumerate(boxes) ] ) return coco_results def prepare_for_coco_segmentation(self, predictions): coco_results = [] for original_id, prediction in predictions.items(): if len(prediction) == 0: continue scores = prediction["scores"] labels = prediction["labels"] masks = prediction["masks"] masks = masks > 0.5 scores = prediction["scores"].tolist() labels = prediction["labels"].tolist() rles = [ mask_util.encode(np.array(mask[0, :, :, np.newaxis], dtype=np.uint8, order="F"))[0] for mask in masks ] for rle in rles: rle["counts"] = rle["counts"].decode("utf-8") coco_results.extend( [ { "image_id": original_id, "category_id": labels[k], "segmentation": rle, "score": scores[k], } for k, rle in enumerate(rles) ] ) return coco_results def prepare_for_coco_keypoint(self, predictions): coco_results = [] for original_id, prediction in predictions.items(): if len(prediction) == 0: continue boxes = prediction["boxes"] boxes = convert_to_xywh(boxes).tolist() scores = prediction["scores"].tolist() labels = prediction["labels"].tolist() keypoints = prediction["keypoints"] keypoints = keypoints.flatten(start_dim=1).tolist() coco_results.extend( [ { "image_id": original_id, "category_id": labels[k], 'keypoints': keypoint, "score": scores[k], } for k, keypoint in enumerate(keypoints) ] ) return coco_results def convert_to_xywh(boxes): xmin, ymin, xmax, ymax = boxes.unbind(1) return torch.stack((xmin, ymin, xmax - xmin, ymax - ymin), dim=1) def merge(img_ids, eval_imgs): all_img_ids = utils.all_gather(img_ids) all_eval_imgs = utils.all_gather(eval_imgs) merged_img_ids = [] for p in all_img_ids: merged_img_ids.extend(p) merged_eval_imgs = [] for p in all_eval_imgs: merged_eval_imgs.append(p) merged_img_ids = np.array(merged_img_ids) merged_eval_imgs = np.concatenate(merged_eval_imgs, 2) # keep only unique (and in sorted order) images merged_img_ids, idx = np.unique(merged_img_ids, return_index=True) merged_eval_imgs = merged_eval_imgs[..., idx] return merged_img_ids, merged_eval_imgs def create_common_coco_eval(coco_eval, img_ids, eval_imgs): img_ids, eval_imgs = merge(img_ids, eval_imgs) img_ids = list(img_ids) eval_imgs = list(eval_imgs.flatten()) coco_eval.evalImgs = eval_imgs coco_eval.params.imgIds = img_ids coco_eval._paramsEval = copy.deepcopy(coco_eval.params) ################################################################# # From pycocotools, just removed the prints and fixed # a Python3 bug about unicode not defined ################################################################# # Ideally, pycocotools wouldn't have hard-coded prints # so that we could avoid copy-pasting those two functions def createIndex(self): # create index # print('creating index...') anns, cats, imgs = {}, {}, {} imgToAnns, catToImgs = defaultdict(list), defaultdict(list) if 'annotations' in self.dataset: for ann in self.dataset['annotations']: imgToAnns[ann['image_id']].append(ann) anns[ann['id']] = ann if 'images' in self.dataset: for img in self.dataset['images']: imgs[img['id']] = img if 'categories' in self.dataset: for cat in self.dataset['categories']: cats[cat['id']] = cat if 'annotations' in self.dataset and 'categories' in self.dataset: for ann in self.dataset['annotations']: catToImgs[ann['category_id']].append(ann['image_id']) # print('index created!') # create class members self.anns = anns self.imgToAnns = imgToAnns self.catToImgs = catToImgs self.imgs = imgs self.cats = cats maskUtils = mask_util def loadRes(self, resFile): """ Load result file and return a result api object. Args: self (obj): coco object with ground truth annotations resFile (str): file name of result file Returns: res (obj): result api object """ res = COCO() res.dataset['images'] = [img for img in self.dataset['images']] # print('Loading and preparing results...') # tic = time.time() if isinstance(resFile, torch._six.string_classes): anns = json.load(open(resFile)) elif type(resFile) == np.ndarray: anns = self.loadNumpyAnnotations(resFile) else: anns = resFile assert type(anns) == list, 'results in not an array of objects' annsImgIds = [ann['image_id'] for ann in anns] assert set(annsImgIds) == (set(annsImgIds) & set(self.getImgIds())), \ 'Results do not correspond to current coco set' if 'caption' in anns[0]: imgIds = set([img['id'] for img in res.dataset['images']]) & set([ann['image_id'] for ann in anns]) res.dataset['images'] = [img for img in res.dataset['images'] if img['id'] in imgIds] for id, ann in enumerate(anns): ann['id'] = id + 1 elif 'bbox' in anns[0] and not anns[0]['bbox'] == []: res.dataset['categories'] = copy.deepcopy(self.dataset['categories']) for id, ann in enumerate(anns): bb = ann['bbox'] x1, x2, y1, y2 = [bb[0], bb[0] + bb[2], bb[1], bb[1] + bb[3]] if 'segmentation' not in ann: ann['segmentation'] = [[x1, y1, x1, y2, x2, y2, x2, y1]] ann['area'] = bb[2] * bb[3] ann['id'] = id + 1 ann['iscrowd'] = 0 elif 'segmentation' in anns[0]: res.dataset['categories'] = copy.deepcopy(self.dataset['categories']) for id, ann in enumerate(anns): # now only support compressed RLE format as segmentation results ann['area'] = maskUtils.area(ann['segmentation']) if 'bbox' not in ann: ann['bbox'] = maskUtils.toBbox(ann['segmentation']) ann['id'] = id + 1 ann['iscrowd'] = 0 elif 'keypoints' in anns[0]: res.dataset['categories'] = copy.deepcopy(self.dataset['categories']) for id, ann in enumerate(anns): s = ann['keypoints'] x = s[0::3] y = s[1::3] x1, x2, y1, y2 = np.min(x), np.max(x), np.min(y), np.max(y) ann['area'] = (x2 - x1) * (y2 - y1) ann['id'] = id + 1 ann['bbox'] = [x1, y1, x2 - x1, y2 - y1] # print('DONE (t={:0.2f}s)'.format(time.time()- tic)) res.dataset['annotations'] = anns createIndex(res) return res def evaluate(self): ''' Run per image evaluation on given images and store results (a list of dict) in self.evalImgs :return: None ''' # tic = time.time() # print('Running per image evaluation...') p = self.params # add backward compatibility if useSegm is specified in params if p.useSegm is not None: p.iouType = 'segm' if p.useSegm == 1 else 'bbox' print('useSegm (deprecated) is not None. Running {} evaluation'.format(p.iouType)) # print('Evaluate annotation type *{}*'.format(p.iouType)) p.imgIds = list(np.unique(p.imgIds)) if p.useCats: p.catIds = list(np.unique(p.catIds)) p.maxDets = sorted(p.maxDets) self.params = p self._prepare() # loop through images, area range, max detection number catIds = p.catIds if p.useCats else [-1] if p.iouType == 'segm' or p.iouType == 'bbox': computeIoU = self.computeIoU elif p.iouType == 'keypoints': computeIoU = self.computeOks self.ious = { (imgId, catId): computeIoU(imgId, catId) for imgId in p.imgIds for catId in catIds} evaluateImg = self.evaluateImg maxDet = p.maxDets[-1] evalImgs = [ evaluateImg(imgId, catId, areaRng, maxDet) for catId in catIds for areaRng in p.areaRng for imgId in p.imgIds ] # this is NOT in the pycocotools code, but could be done outside evalImgs = np.asarray(evalImgs).reshape(len(catIds), len(p.areaRng), len(p.imgIds)) self._paramsEval = copy.deepcopy(self.params) # toc = time.time() # print('DONE (t={:0.2f}s).'.format(toc-tic)) return p.imgIds, evalImgs
coco_results = [] for original_id, prediction in predictions.items(): if len(prediction) == 0: continue
discover.go
package tmdb import "fmt" // DiscoverMovie type is a struct for movie JSON response. type DiscoverMovie struct { Page int64 `json:"page"` TotalResults int64 `json:"total_results"` TotalPages int64 `json:"total_pages"` Results []struct { VoteCount int64 `json:"vote_count"` ID int64 `json:"id"` Video bool `json:"video"` VoteAverage float32 `json:"vote_average"` Title string `json:"title"` Popularity float32 `json:"popularity"` PosterPath string `json:"poster_path"` OriginalLanguage string `json:"original_language"` OriginalTitle string `json:"original_title"` GenreIDs []int64 `json:"genre_ids"` BackdropPath string `json:"backdrop_path"` Adult bool `json:"adult"` Overview string `json:"overview"` ReleaseDate string `json:"release_date"` } `json:"results"` } // DiscoverTV type is a struct for tv JSON response. type DiscoverTV struct { Page int64 `json:"page"` TotalResults int64 `json:"total_results"` TotalPages int64 `json:"total_pages"` Results []struct { OriginalName string `json:"original_name"` GenreIDs []int64 `json:"genre_ids"` Name string `json:"name"` Popularity float32 `json:"popularity"` OriginCountry []string `json:"origin_country"` VoteCount int64 `json:"vote_count"` FirstAirDate string `json:"first_air_date"` BackdropPath string `json:"backdrop_path"` OriginalLanguage string `json:"original_language"` ID int64 `json:"id"` VoteAverage float32 `json:"vote_average"` Overview string `json:"overview"` PosterPath string `json:"poster_path"` } `json:"results"` } // GetDiscoverMovie discover movies by different types of data like // average rating, number of votes, genres and certifications. You can // get a valid list of certifications from the method. // // Discover also supports a nice list of sort options. // See below for all of the available options. // // Please note, when using certification \ certification.lte you must // also specify certification_country. These two parameters work together // in order to filter the results. You can only filter results with the // countries we have added to our certifications list. // // If you specify the region parameter, the regional release date will be // used instead of the primary release date. The date returned will be the // first date based on your query (ie. if a with_release_type is specified). // It's important to note the order of the release types that are used. // Specifying "2|3" would return the limited theatrical release date as // opposed to "3|2" which would return the theatrical date. // // Also note that a number of filters support being comma (,) or pipe (|) // separated. Comma's are treated like an AND and query while pipe's // are an OR. // // https://developers.themoviedb.org/3/discover/movie-discover func (c *Client) GetDiscoverMovie( o map[string]string, ) (*DiscoverMovie, error) { options := c.fmtOptions(o) tmdbURL := fmt.Sprintf( "%s%smovie?api_key=%s%s", baseURL, discoverURL, c.apiKey, options, ) t := DiscoverMovie{} err := c.get(tmdbURL, &t) if err != nil
return &t, nil } // GetDiscoverTV Discover TV shows by different types of data like average // rating, number of votes, genres, the network they aired on and air dates. // // Discover also supports a nice list of sort options. See below for all of // the available options. // // Also note that a number of filters support being comma (,) or pipe (|) // separated. Comma's are treated like an AND and query while pipe's are an OR. // // https://developers.themoviedb.org/3/discover/tv-discover func (c *Client) GetDiscoverTV( o map[string]string, ) (*DiscoverTV, error) { options := c.fmtOptions(o) tmdbURL := fmt.Sprintf( "%s%stv?api_key=%s%s", baseURL, discoverURL, c.apiKey, options, ) t := DiscoverTV{} err := c.get(tmdbURL, &t) if err != nil { return nil, err } return &t, nil }
{ return nil, err }
index.js
import React from 'react'; import ReactDOM from 'react-dom'; import './index.css'; function Square(props){ // Doesn't have state. So we can re-write this class as a function. return ( <button className="square" onClick={props.onClick} > {props.value} </button> ); } class Board extends React.Component { // handleClick(i) { // const squares = this.state.squares.slice(); // if (calculateWinner(squares) || squares[i]) { // return; // } // squares[i] = this.state.xIsNext ? 'X' : 'O'; // this.setState({ // squares: squares, // xIsNext: !this.state.xIsNext // }); // } renderSquare(i) { return ( <Square value={this.props.squares[i]} onClick={() => this.props.onClick(i)} /> ); } render() { return ( <div> <div className="board-row"> {this.renderSquare(0)} {this.renderSquare(1)} {this.renderSquare(2)} </div> <div className="board-row"> {this.renderSquare(3)} {this.renderSquare(4)} {this.renderSquare(5)} </div> <div className="board-row"> {this.renderSquare(6)} {this.renderSquare(7)} {this.renderSquare(8)} </div> </div> ); } } class Game extends React.Component { constructor(props) { super(props); this.state = { history: [{ squares: Array(9).fill(null), }], stepNumber: 0, xIsNext: true, }; } handleClick(i) { const history = this.state.history.slice(0, this.state.stepNumber + 1); const current = history[history.length-1]; const squares = current.squares.slice(); if (calculateWinner(squares) || squares[i]) { return; } squares[i] = this.state.xIsNext ? 'X' : 'O'; this.setState({ history:history.concat([{ squares:squares, }]), stepNumber: history.length, xIsNext: !this.state.xIsNext }); } jumpTo(step) { this.setState({ stepNumber: step, xIsNext: (step%2) === 0 }); } render() { const history = this.state.history; const current = history[this.state.stepNumber]; const winner = calculateWinner(current.squares); const moves = history.map((step, move) => { const desc = move ? 'Go to move #' + move : 'Go to game start'; return ( <li key={move}> <button onClick={() => this.jumpTo(move)}>{desc}</button> </li> ); }); let status; if (winner) { status = 'Winner: ' + winner; } else { status = 'Next player: ' + (this.state.xIsNext ? 'X' : 'O'); } return ( <div className="game"> <div className="game-board"> <Board squares={current.squares} onClick={(i) => this.handleClick(i)} /> </div> <div className="game-info"> <div>{status}</div> <ol>{ moves }</ol> </div> </div> ); } } // ======================================== ReactDOM.render( <Game />, document.getElementById('root') ); function calculateWinner(squares) { const lines = [ [0, 1, 2], [3, 4, 5], [6, 7, 8], [0, 3, 6], [1, 4, 7], [2, 5, 8], [0, 4, 8], [2, 4, 6], ]; for (let i = 0; i < lines.length; i++) {
const [a, b, c] = lines[i]; if (squares[a] && squares[a] === squares[b] && squares[a] === squares[c]) { return squares[a]; } } return null; }
test_dnspod.py
# Test for one implementation of the interface from lexicon.providers.dnspod import Provider from integration_tests import IntegrationTests from unittest import TestCase import pytest # Hook into testing framework by inheriting unittest.TestCase and reuse # the tests which *each and every* implementation of the interface must # pass, by inheritance from define_tests.TheTests class DnsParkProviderTests(TestCase, IntegrationTests): Provider = Provider provider_name = 'dnspod' domain = 'capsulecd.com' def _filter_post_data_parameters(self): return ['login_token'] # TODO: @pytest.mark.skip(reason="domain no longer exists") def test_Provider_when_calling_list_records_after_setting_ttl(self): return # TODO: this should be enabled @pytest.mark.skip(reason="regenerating auth keys required") def test_Provider_when_calling_update_record_should_modify_record_name_specified(self):
return
_lru_cache.py
from collections import OrderedDict from typing import Dict, Generic, Mapping, TypeVar CacheKey = TypeVar("CacheKey") CacheValue = TypeVar("CacheValue") class
(Generic[CacheKey, CacheValue], OrderedDict): """ A dictionary-like container that stores a given maximum items. If an additional item is added when the LRUCache is full, the least recently used key is discarded to make room for the new item. """ def __init__(self, cache_size: int) -> None: self.cache_size = cache_size super(LRUCache, self).__init__() def __setitem__(self, key: CacheKey, value: CacheValue) -> None: """Store a new views, potentially discarding an old value.""" if key not in self: if len(self) >= self.cache_size: self.popitem(last=False) OrderedDict.__setitem__(self, key, value) def __getitem__(self: Dict[CacheKey, CacheValue], key: CacheKey) -> CacheValue: """Gets the item, but also makes it most recent.""" value: CacheValue = OrderedDict.__getitem__(self, key) OrderedDict.__delitem__(self, key) OrderedDict.__setitem__(self, key, value) return value
LRUCache
user_pipeline_tracker_test.py
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership.
# (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import unittest import apache_beam as beam from apache_beam.runners.interactive.user_pipeline_tracker import UserPipelineTracker class UserPipelineTrackerTest(unittest.TestCase): def test_getting_unknown_pid_returns_none(self): ut = UserPipelineTracker() p = beam.Pipeline() self.assertIsNone(ut.get_pipeline(str(id(p)))) def test_getting_unknown_pipeline_returns_none(self): ut = UserPipelineTracker() p = beam.Pipeline() self.assertIsNone(ut.get_user_pipeline(p)) def test_no_parent_returns_none(self): ut = UserPipelineTracker() user = beam.Pipeline() derived = beam.Pipeline() orphan = beam.Pipeline() ut.add_derived_pipeline(user, derived) self.assertIsNone(ut.get_user_pipeline(orphan)) def test_get_user_pipeline_is_same(self): ut = UserPipelineTracker() p = beam.Pipeline() ut.add_user_pipeline(p) self.assertIs(ut.get_user_pipeline(p), p) def test_can_add_derived(self): ut = UserPipelineTracker() user = beam.Pipeline() derived = beam.Pipeline() ut.add_derived_pipeline(user, derived) self.assertIs(ut.get_user_pipeline(derived), user) def test_can_add_multiple_derived(self): """Tests that there can be many user pipelines with many derived pipelines. """ ut = UserPipelineTracker() # Add the first set of user and derived pipelines. user1 = beam.Pipeline() derived11 = beam.Pipeline() derived12 = beam.Pipeline() ut.add_derived_pipeline(user1, derived11) ut.add_derived_pipeline(user1, derived12) # Add the second set of user and derived pipelines. user2 = beam.Pipeline() derived21 = beam.Pipeline() derived22 = beam.Pipeline() ut.add_derived_pipeline(user2, derived21) ut.add_derived_pipeline(user2, derived22) # Assert that the user pipelines are correct. self.assertIs(ut.get_user_pipeline(derived11), user1) self.assertIs(ut.get_user_pipeline(derived12), user1) self.assertIs(ut.get_user_pipeline(derived21), user2) self.assertIs(ut.get_user_pipeline(derived22), user2) def test_cannot_have_multiple_parents(self): ut = UserPipelineTracker() user1 = beam.Pipeline() user2 = beam.Pipeline() derived = beam.Pipeline() ut.add_derived_pipeline(user1, derived) with self.assertRaises(AssertionError): ut.add_derived_pipeline(user2, derived) self.assertIs(ut.get_user_pipeline(derived), user1) def test_adding_derived_with_derived_gets_user_pipeline(self): """Tests that one can correctly add a derived pipeline from a derived pipeline and still get the correct user pipeline. """ ut = UserPipelineTracker() user = beam.Pipeline() derived1 = beam.Pipeline() derived2 = beam.Pipeline() # Add the first derived pipeline to the user pipelne. ut.add_derived_pipeline(user, derived1) # Add the second derived pipeline to the first derived pipeline. This should # get the user pipeline of the first and add the second to it. ut.add_derived_pipeline(derived1, derived2) # Asserts that both derived pipelines are under the same user pipeline. self.assertIs(ut.get_user_pipeline(derived1), user) self.assertIs(ut.get_user_pipeline(derived2), user) def test_can_get_pipeline_from_id(self): """Tests the pid -> pipeline memoization.""" ut = UserPipelineTracker() user = beam.Pipeline() derived = beam.Pipeline() ut.add_user_pipeline(user) ut.add_derived_pipeline(user, derived) self.assertIs(ut.get_pipeline(str(id(user))), user) self.assertIs(ut.get_pipeline(str(id(derived))), derived) def test_clear(self): ut = UserPipelineTracker() user = beam.Pipeline() derived = beam.Pipeline() ut.add_derived_pipeline(user, derived) self.assertIs(ut.get_user_pipeline(derived), user) ut.clear() self.assertIsNone(ut.get_user_pipeline(user)) self.assertIsNone(ut.get_user_pipeline(derived)) def test_can_iterate(self): ut = UserPipelineTracker() user1 = beam.Pipeline() derived11 = beam.Pipeline() derived12 = beam.Pipeline() ut.add_derived_pipeline(user1, derived11) ut.add_derived_pipeline(user1, derived12) user2 = beam.Pipeline() derived21 = beam.Pipeline() derived22 = beam.Pipeline() ut.add_derived_pipeline(user2, derived21) ut.add_derived_pipeline(user2, derived22) user_pipelines = set(p for p in ut) self.assertSetEqual(set([user1, user2]), user_pipelines) def test_can_evict_user_pipeline(self): ut = UserPipelineTracker() user1 = beam.Pipeline() derived11 = beam.Pipeline() derived12 = beam.Pipeline() ut.add_derived_pipeline(user1, derived11) ut.add_derived_pipeline(user1, derived12) user2 = beam.Pipeline() derived21 = beam.Pipeline() derived22 = beam.Pipeline() ut.add_derived_pipeline(user2, derived21) ut.add_derived_pipeline(user2, derived22) ut.evict(user1) self.assertIsNone(ut.get_user_pipeline(user1)) self.assertIsNone(ut.get_user_pipeline(derived11)) self.assertIsNone(ut.get_user_pipeline(derived12)) self.assertIs(user2, ut.get_user_pipeline(derived21)) self.assertIs(user2, ut.get_user_pipeline(derived22)) if __name__ == '__main__': unittest.main()
# The ASF licenses this file to You under the Apache License, Version 2.0
Spinner.js
// All material copyright ESRI, All Rights Reserved, unless otherwise specified.
// See https://js.arcgis.com/4.3/esri/copyright.txt for details. //>>built define({searching:"Recherche"});
auth.js
import Cookies from 'js-cookie' const TokenKey = 'Authorization' export function getToken() { return null // Cookies.get(TokenKey) }
export function removeToken() { return Cookies.remove(TokenKey) }
export function setToken(token) { return Cookies.set(TokenKey, token) }
_1_infinite-scroll.js
// File#: _1_infinite-scroll // Usage: codyhouse.co/license (function() { var InfiniteScroll = function(opts) { this.options = Util.extend(InfiniteScroll.defaults, opts); this.element = this.options.element; this.loader = document.getElementsByClassName('js-infinite-scroll__loader'); this.loadBtn = document.getElementsByClassName('js-infinite-scroll__btn'); this.loading = false; this.currentPageIndex = this.element.getAttribute('data-current-page') ? parseInt(this.element.getAttribute('data-current-page')) : 0; this.index = this.currentPageIndex; initLoad(this); }; function initLoad(infiniteScroll) { setPathValues(infiniteScroll); // get dynamic content paths getTresholdPixel(infiniteScroll); if(infiniteScroll.options.container) { // get container of dynamic content infiniteScroll.container = infiniteScroll.element.querySelector(infiniteScroll.options.container); } if((!infiniteScroll.options.loadBtn || infiniteScroll.options.loadBtnDelay) && infiniteScroll.loadBtn.length > 0) { // hide load more btn Util.addClass(infiniteScroll.loadBtn[0], 'sr-only'); } if(!infiniteScroll.options.loadBtn || infiniteScroll.options.loadBtnDelay) { if(intersectionObserverSupported) { // check element scrolling initObserver(infiniteScroll); } else { infiniteScroll.scrollEvent = handleEvent.bind(infiniteScroll); window.addEventListener('scroll', infiniteScroll.scrollEvent); } } initBtnEvents(infiniteScroll); // listen for click on load Btn if(!infiniteScroll.options.path) { // content has been loaded using a custom function infiniteScroll.element.addEventListener('loaded-new', function(event){ contentWasLoaded(infiniteScroll, event.detail.path, event.detail.checkNext); // reset element // emit 'content-loaded' event -> this could be useful when new content needs to be initialized infiniteScroll.element.dispatchEvent(new CustomEvent('content-loaded', {detail: event.detail.path})); }); } }; function setPathValues(infiniteScroll) { // path can be strin or comma-separated list if(!infiniteScroll.options.path) return; var split = infiniteScroll.options.path.split(','); if(split.length > 1) { infiniteScroll.options.path = []; for(var i = 0; i < split.length; i++) infiniteScroll.options.path.push(split[i].trim()); } }; function getTresholdPixel(infiniteScroll) { // get the threshold value in pixels - will be used only if intersection observer is not supported infiniteScroll.thresholdPixel = infiniteScroll.options.threshold.indexOf('px') > -1 ? parseInt(infiniteScroll.options.threshold.replace('px', '')) : parseInt(window.innerHeight*parseInt(infiniteScroll.options.threshold.replace('%', ''))/100); }; function initObserver(infiniteScroll) { // intersection observer supported // append an element to the bottom of the container that will be observed var observed = document.createElement("div"); Util.setAttributes(observed, {'aria-hidden': 'true', 'class': 'js-infinite-scroll__observed', 'style': 'width: 100%; height: 1px; margin-top: -1px; visibility: hidden;'}); infiniteScroll.element.appendChild(observed); infiniteScroll.observed = infiniteScroll.element.getElementsByClassName('js-infinite-scroll__observed')[0]; var config = {rootMargin: '0px 0px '+infiniteScroll.options.threshold+' 0px'}; infiniteScroll.observer = new IntersectionObserver(observerLoadContent.bind(infiniteScroll), config); infiniteScroll.observer.observe(infiniteScroll.observed); }; function observerLoadContent(entry) { if(this.loading) return; if(entry[0].intersectionRatio > 0) loadMore(this); }; function handleEvent(event) { // handle click/scroll events switch(event.type) { case 'click': { initClick(this, event); // click on load more button break; } case 'scroll': { // triggered only if intersection onserver is not supported initScroll(this); break; } } }; function initScroll(infiniteScroll) { // listen to scroll event (only if intersectionObserver is not supported) (!window.requestAnimationFrame) ? setTimeout(checkLoad.bind(infiniteScroll)) : window.requestAnimationFrame(checkLoad.bind(infiniteScroll)); }; function initBtnEvents(infiniteScroll) { // load more button events - click + focus (for keyboard accessibility) if(infiniteScroll.loadBtn.length == 0) return; infiniteScroll.clickEvent = handleEvent.bind(infiniteScroll); infiniteScroll.loadBtn[0].addEventListener('click', infiniteScroll.clickEvent); if(infiniteScroll.options.loadBtn && !infiniteScroll.options.loadBtnDelay) { Util.removeClass(infiniteScroll.loadBtn[0], 'sr-only'); if(infiniteScroll.loader.length > 0 ) Util.addClass(infiniteScroll.loader[0], 'is-hidden'); } // toggle class sr-only if link is in focus/loses focus infiniteScroll.loadBtn[0].addEventListener('focusin', function(){ if(Util.hasClass(infiniteScroll.loadBtn[0], 'sr-only')) { Util.addClass(infiniteScroll.loadBtn[0], 'js-infinite-scroll__btn-focus'); Util.removeClass(infiniteScroll.loadBtn[0], 'sr-only'); } }); infiniteScroll.loadBtn[0].addEventListener('focusout', function(){ if(Util.hasClass(infiniteScroll.loadBtn[0], 'js-infinite-scroll__btn-focus')) { Util.removeClass(infiniteScroll.loadBtn[0], 'js-infinite-scroll__btn-focus'); Util.addClass(infiniteScroll.loadBtn[0], 'sr-only'); } }); }; function initClick(infiniteScroll, event) { // click on 'Load More' button event.preventDefault(); Util.addClass(infiniteScroll.loadBtn[0], 'sr-only'); loadMore(infiniteScroll); }; function checkLoad() { // while scrolling -> check if we need to load new content (only if intersectionObserver is not supported) if(this.loading) return; if(!needLoad(this)) return; loadMore(this); }; function loadMore(infiniteScroll) { // new conten needs to be loaded infiniteScroll.loading = true; if(infiniteScroll.loader.length > 0) Util.removeClass(infiniteScroll.loader[0], 'is-hidden'); var moveFocus = false; if(infiniteScroll.loadBtn.length > 0 ) moveFocus = Util.hasClass(infiniteScroll.loadBtn[0], 'js-infinite-scroll__btn-focus'); // check if need to add content or user has custom load function if(infiniteScroll.options.path) { contentBasicLoad(infiniteScroll, moveFocus); // load content } else { emitCustomEvents(infiniteScroll, 'load-new', moveFocus); // user takes care of loading content } }; function contentBasicLoad(infiniteScroll, moveFocus) { var filePath = getFilePath(infiniteScroll); // load file content getNewContent(filePath, function(content){ var checkNext = insertNewContent(infiniteScroll, content, moveFocus); contentWasLoaded(infiniteScroll, filePath, checkNextPageAvailable(infiniteScroll, checkNext)); // emit 'content-loaded' event -> this could be useful when new content needs to be initialized infiniteScroll.element.dispatchEvent(new CustomEvent('content-loaded', {detail: filePath})); }); }; function getFilePath(infiniteScroll) { // get path of the file to load return (Array.isArray(infiniteScroll.options.path)) ? infiniteScroll.options.path[infiniteScroll.index] : infiniteScroll.options.path.replace('{n}', infiniteScroll.index+1); }; function getNewContent(path, cb) { var xhttp = new XMLHttpRequest(); xhttp.onreadystatechange = function() { if (this.readyState == 4 && this.status == 200) cb(this.responseText); }; xhttp.open("GET", path, true); xhttp.send(); }; function insertNewContent(infiniteScroll, content, moveFocus) { var next = false; if(infiniteScroll.options.container) { var div = document.createElement("div"); div.innerHTML = content; var wrapper = div.querySelector(infiniteScroll.options.container); if(wrapper) { content = wrapper.innerHTML; next = wrapper.getAttribute('data-path'); } } var lastItem = false; if(moveFocus) lastItem = getLastChild(infiniteScroll); if(infiniteScroll.container) { infiniteScroll.container.insertAdjacentHTML('beforeend', content); } else { (infiniteScroll.loader.length > 0) ? infiniteScroll.loader[0].insertAdjacentHTML('beforebegin', content) : infiniteScroll.element.insertAdjacentHTML('beforeend', content); } if(moveFocus && lastItem) Util.moveFocus(lastItem); return next; }; function getLastChild(infiniteScroll) { if(infiniteScroll.container) return infiniteScroll.container.lastElementChild; if(infiniteScroll.loader.length > 0) return infiniteScroll.loader[0].previousElementSibling; return infiniteScroll.element.lastElementChild; }; function checkNextPageAvailable(infiniteScroll, checkNext) { // check if there's still content to be loaded if(Array.isArray(infiniteScroll.options.path)) { return infiniteScroll.options.path.length > infiniteScroll.index + 1; } return checkNext; }; function contentWasLoaded(infiniteScroll, url, checkNext) { // new content has been loaded - reset status if(infiniteScroll.loader.length > 0) Util.addClass(infiniteScroll.loader[0], 'is-hidden'); // hide loader if(infiniteScroll.options.updateHistory && url) { // update browser history var pageArray = location.pathname.split('/'), actualPage = pageArray[pageArray.length - 1] ; if( actualPage != url && history.pushState ) window.history.replaceState({path: url},'',url); } if(!checkNext) { // no need to load additional pages - remove scroll listening and/or click listening removeScrollEvents(infiniteScroll); if(infiniteScroll.clickEvent) { infiniteScroll.loadBtn[0].removeEventListener('click', infiniteScroll.clickEvent); Util.addClass(infiniteScroll.loadBtn[0], 'is-hidden'); Util.removeClass(infiniteScroll.loadBtn[0], 'sr-only'); } } if(checkNext && infiniteScroll.options.loadBtn) { // check if we need to switch from scrolling to click -> add/remove proper listener if(!infiniteScroll.options.loadBtnDelay) { Util.removeClass(infiniteScroll.loadBtn[0], 'sr-only'); } else if(infiniteScroll.index - infiniteScroll.currentPageIndex + 1 >= infiniteScroll.options.loadBtnDelay && infiniteScroll.loadBtn.length > 0) { removeScrollEvents(infiniteScroll); Util.removeClass(infiniteScroll.loadBtn[0], 'sr-only'); } } if(checkNext && infiniteScroll.loadBtn.length > 0 && Util.hasClass(infiniteScroll.loadBtn[0], 'js-infinite-scroll__btn-focus')) { // keyboard accessibility Util.removeClass(infiniteScroll.loadBtn[0], 'sr-only'); } infiniteScroll.index = infiniteScroll.index + 1; infiniteScroll.loading = false; }; function removeScrollEvents(infiniteScroll) { if(infiniteScroll.scrollEvent) window.removeEventListener('scroll', infiniteScroll.scrollEvent); if(infiniteScroll.observer) infiniteScroll.observer.unobserve(infiniteScroll.observed); }; function needLoad(infiniteScroll) { // intersectionObserverSupported not supported -> check if threshold has been reached return infiniteScroll.element.getBoundingClientRect().bottom - window.innerHeight <= infiniteScroll.thresholdPixel; }; function emitCustomEvents(infiniteScroll, eventName, moveFocus) { // applicable when user takes care of loading new content var event = new CustomEvent(eventName, {detail: {index: infiniteScroll.index+1, moveFocus: moveFocus}}); infiniteScroll.element.dispatchEvent(event); }; InfiniteScroll.defaults = { element : '', path : false, // path of files to be loaded: set to comma-separated list or string (should include {n} to be replaced by integer index). If not set, user will take care of loading new content container: false, // Append new content to this element. Additionally, when loaded a new page, only content of the element will be appended threshold: '200px', // distance between viewport and scroll area for loading new content updateHistory: false, // push new url to browser history loadBtn: false, // use a button to load more content loadBtnDelay: false // set to an integer if you want the load more button to be visible only after a number of loads on scroll - loadBtn needs to be 'on' }; window.InfiniteScroll = InfiniteScroll; //initialize the InfiniteScroll objects var infiniteScroll = document.getElementsByClassName('js-infinite-scroll'), intersectionObserverSupported = ('IntersectionObserver' in window && 'IntersectionObserverEntry' in window && 'intersectionRatio' in window.IntersectionObserverEntry.prototype); if( infiniteScroll.length > 0) { for( var i = 0; i < infiniteScroll.length; i++) { (function(i){
updateHistory = ( infiniteScroll[i].getAttribute('data-history') && infiniteScroll[i].getAttribute('data-history') == 'on') ? true : false, loadBtn = ( infiniteScroll[i].getAttribute('data-load-btn') && infiniteScroll[i].getAttribute('data-load-btn') == 'on') ? true : false, loadBtnDelay = infiniteScroll[i].getAttribute('data-load-btn-delay') ? infiniteScroll[i].getAttribute('data-load-btn-delay') : false, threshold = infiniteScroll[i].getAttribute('data-threshold') ? infiniteScroll[i].getAttribute('data-threshold') : '200px'; new InfiniteScroll({element: infiniteScroll[i], path : path, container : container, updateHistory: updateHistory, loadBtn: loadBtn, loadBtnDelay: loadBtnDelay, threshold: threshold}); })(i); } }; }());
var path = infiniteScroll[i].getAttribute('data-path') ? infiniteScroll[i].getAttribute('data-path') : false, container = infiniteScroll[i].getAttribute('data-container') ? infiniteScroll[i].getAttribute('data-container') : false,
propIs.js
'use strict'; var _curry3 = require('./internal/_curry3'); var is = require('./is'); /** * Returns `true` if the specified object property is of the given type; * `false` otherwise. * * @func * @memberOf R
* @param {Function} type * @param {String} name * @param {*} obj * @return {Boolean} * @see R.is, R.propSatisfies * @example * * R.propIs(Number, 'x', {x: 1, y: 2}); //=> true * R.propIs(Number, 'x', {x: 'foo'}); //=> false * R.propIs(Number, 'x', {}); //=> false */ var propIs = _curry3(function propIs(type, name, obj) { return is(type, obj[name]); }); module.exports = propIs;
* @since v0.16.0 * @category Type * @sig Type -> String -> Object -> Boolean
test_retriever.py
import os import shutil import pytest from pyuplift.utils import retrieve_from_gz data_home = os.path.join(os.sep.join(__file__.split(os.sep)[:-1]), 'data') def test_retrieve_from_gz():
output_path = os.path.join(data_home, 'test.test') archive_path = output_path + '.gz' retrieve_from_gz(archive_path, output_path) with open(output_path, 'r') as f: text = f.read() os.remove(output_path) assert text == 'good'
ctor_evaller.py
#!/usr/bin/env python # Copyright 2016 The Emscripten Authors. All rights reserved. # Emscripten is available under two separate licenses, the MIT license and the # University of Illinois/NCSA Open Source License. Both these licenses can be # found in the LICENSE file. """Tries to evaluate global constructors, applying their effects ahead of time. This is an LTO-like operation, and to avoid parsing the entire tree (we might fail to parse a massive project, we operate on the text in python. """ import json import logging import os import subprocess import sys import time sys.path.insert(1, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) from tools import shared, js_optimizer, jsrun, building from tools.tempfiles import try_delete js_file = sys.argv[1] binary_file = sys.argv[2] # mem init for js, wasm binary for wasm total_memory = int(sys.argv[3]) total_stack = int(sys.argv[4]) global_base = int(sys.argv[5]) binaryen_bin = sys.argv[6] debug_info = int(sys.argv[7]) extra_args = sys.argv[8:] wasm = bool(binaryen_bin) assert global_base > 0 logger = logging.getLogger('ctor_evaller') # helpers def get_asm(js): return js[js.find(js_optimizer.start_asm_marker):js.find(js_optimizer.end_asm_marker)] def find_ctors(js): ctors_start = js.find('__ATINIT__.push(') if ctors_start < 0: return (-1, -1) ctors_end = js.find(');', ctors_start) assert ctors_end > 0 ctors_end += 3 return (ctors_start, ctors_end) def find_ctors_data(js, num): ctors_start, ctors_end = find_ctors(js) assert ctors_start > 0 ctors_text = js[ctors_start:ctors_end] all_ctors = [ctor for ctor in ctors_text.split(' ') if ctor.endswith('()') and not ctor == 'function()' and '.' not in ctor] all_ctors = [ctor.replace('()', '') for ctor in all_ctors] if shared.Settings.WASM_BACKEND: assert all(ctor.startswith('_') for ctor in all_ctors) all_ctors = [ctor[1:] for ctor in all_ctors] assert len(all_ctors) ctors = all_ctors[:num] return ctors_start, ctors_end, all_ctors, ctors def
(js, mem_init, num): def kill_func(asm, name): asm = asm.replace('function ' + name + '(', 'function KILLED_' + name + '(', 1) return asm def add_func(asm, func): before = len(asm) asm = asm.replace('function ', ' ' + func + '\nfunction ', 1) assert len(asm) > before name = func[func.find(' ') + 1:func.find('(')] asm = asm.replace('return {', 'return { ' + name + ': ' + name + ',') return asm # Find the global ctors ctors_start, ctors_end, all_ctors, ctors = find_ctors_data(js, num) logging.debug('trying to eval ctors: ' + ', '.join(ctors)) # Find the asm module, and receive the mem init. asm = get_asm(js) assert len(asm) asm = asm.replace('use asm', 'not asm') # don't try to validate this # Substitute sbrk with a failing stub: the dynamic heap memory area shouldn't get increased during static ctor initialization. asm = asm.replace('function _sbrk(', 'function _sbrk(increment) { throw "no sbrk when evalling ctors!"; } function KILLED_sbrk(', 1) # find all global vars, and provide only safe ones. Also add dumping for those. pre_funcs_start = asm.find(';') + 1 pre_funcs_end = asm.find('function ', pre_funcs_start) pre_funcs_end = asm.rfind(';', pre_funcs_start, pre_funcs_end) + 1 pre_funcs = asm[pre_funcs_start:pre_funcs_end] parts = [x for x in [x.strip() for x in pre_funcs.split(';')] if x.startswith('var ')] global_vars = [] new_globals = '\n' for part in parts: part = part[4:] # skip 'var ' bits = [x.strip() for x in part.split(',')] for bit in bits: name, value = [x.strip() for x in bit.split('=', 1)] if value in ['0', '+0', '0.0'] or name in [ 'STACKTOP', 'STACK_MAX', 'DYNAMICTOP_PTR', 'HEAP8', 'HEAP16', 'HEAP32', 'HEAPU8', 'HEAPU16', 'HEAPU32', 'HEAPF32', 'HEAPF64', 'Int8View', 'Int16View', 'Int32View', 'Uint8View', 'Uint16View', 'Uint32View', 'Float32View', 'Float64View', 'nan', 'inf', '_emscripten_memcpy_big', '___dso_handle', '_atexit', '___cxa_atexit', ] or name.startswith('Math_'): if 'new ' not in value: global_vars.append(name) new_globals += ' var ' + name + ' = ' + value + ';\n' asm = asm[:pre_funcs_start] + new_globals + asm[pre_funcs_end:] asm = add_func(asm, 'function dumpGlobals() { return [ ' + ', '.join(global_vars) + '] }') # find static bump. this is the maximum area we'll write to during startup. static_bump_op = 'STATICTOP = STATIC_BASE + ' static_bump_start = js.find(static_bump_op) static_bump_end = js.find(';', static_bump_start) static_bump = int(js[static_bump_start + len(static_bump_op):static_bump_end]) # Generate a safe sandboxed environment. We replace all ffis with errors. Otherwise, # asm.js can't call outside, so we are ok. # if shared.DEBUG: # temp_file = os.path.join(shared.CANONICAL_TEMP_DIR, 'ctorEval.js') # shared.safe_ensure_dirs(shared.CANONICAL_TEMP_DIR) # else: # temp_file = config.get_temp_files().get('.ctorEval.js').name with shared.configuration.get_temp_files().get_file('.ctorEval.js') as temp_file: open(temp_file, 'w').write(''' var totalMemory = %d; var totalStack = %d; var buffer = new ArrayBuffer(totalMemory); var heap = new Uint8Array(buffer); var heapi32 = new Int32Array(buffer); var memInit = %s; var globalBase = %d; var staticBump = %d; heap.set(memInit, globalBase); var staticTop = globalBase + staticBump; var staticBase = staticTop; var stackTop = staticTop; while (stackTop %% 16 !== 0) stackTop--; var stackBase = stackTop; var stackMax = stackTop + totalStack; if (stackMax >= totalMemory) throw 'not enough room for stack'; var dynamicTopPtr = stackMax; heapi32[dynamicTopPtr >> 2] = stackMax; if (!Math.imul) { Math.imul = Math.imul || function(a, b) { var ah = (a >>> 16) & 0xffff; var al = a & 0xffff; var bh = (b >>> 16) & 0xffff; var bl = b & 0xffff; // the shift by 0 fixes the sign on the high part // the final |0 converts the unsigned value into a signed value return ((al * bl) + (((ah * bl + al * bh) << 16) >>> 0)|0); }; } if (!Math.fround) { var froundBuffer = new Float32Array(1); Math.fround = function(x) { froundBuffer[0] = x; return froundBuffer[0] }; } var atexits = []; // we record and replay atexits var globalArg = { Int8Array: Int8Array, Int16Array: Int16Array, Int32Array: Int32Array, Uint8Array: Uint8Array, Uint16Array: Uint16Array, Uint32Array: Uint32Array, Float32Array: Float32Array, Float64Array: Float64Array, NaN: NaN, Infinity: Infinity, Math: Math, }; var libraryArg = { STACKTOP: stackTop, STACK_MAX: stackMax, DYNAMICTOP_PTR: dynamicTopPtr, ___dso_handle: 0, // used by atexit, value doesn't matter _emscripten_memcpy_big: function(dest, src, num) { heap.set(heap.subarray(src, src+num), dest); return dest; }, _atexit: function(x) { atexits.push([x, 0]); return 0; }, ___cxa_atexit: function(x, y) { atexits.push([x, y]); return 0; }, }; // Instantiate asm %s (globalArg, libraryArg, buffer); // Try to run the constructors var allCtors = %s; var numSuccessful = 0; for (var i = 0; i < allCtors.length; i++) { try { var globalsBefore = asm['dumpGlobals'](); asm[allCtors[i]](); var globalsAfter = asm['dumpGlobals'](); if (JSON.stringify(globalsBefore) !== JSON.stringify(globalsAfter)) { console.warn('globals modified'); break; } if (heapi32[dynamicTopPtr >> 2] !== stackMax) { console.warn('dynamic allocation was performend'); break; } // this one was ok. numSuccessful = i + 1; } catch (e) { console.warn(e.stack); break; } } // Write out new mem init. It might be bigger if we added to the zero section, look for zeros var newSize = globalBase + staticBump; while (newSize > globalBase && heap[newSize-1] == 0) newSize--; console.log(JSON.stringify([numSuccessful, Array.prototype.slice.call(heap.subarray(globalBase, newSize)), atexits])); ''' % (total_memory, total_stack, mem_init, global_base, static_bump, asm, json.dumps(ctors))) def read_and_delete(filename): result = '' try: result = open(filename, 'r').read() finally: try_delete(filename) return result # Execute the sandboxed code. If an error happened due to calling an ffi, that's fine, # us exiting with an error tells the caller that we failed. If it times out, give up. out_file = shared.configuration.get_temp_files().get('.out').name err_file = shared.configuration.get_temp_files().get('.err').name out_file_handle = open(out_file, 'w') err_file_handle = open(err_file, 'w') proc = subprocess.Popen(shared.NODE_JS + [temp_file], stdout=out_file_handle, stderr=err_file_handle, universal_newlines=True) try: jsrun.timeout_run(proc, timeout=10, full_output=True, throw_on_failure=False) except Exception as e: if 'Timed out' not in str(e): raise logger.debug('ctors timed out\n') return (0, 0, 0, 0) if shared.WINDOWS: time.sleep(0.5) # On Windows, there is some kind of race condition with Popen output stream related functions, where file handles are still in use a short period after the process has finished. out_file_handle.close() err_file_handle.close() out_result = read_and_delete(out_file) err_result = read_and_delete(err_file) if proc.returncode != 0: # TODO(sbc): This should never happen under normal circumstances. # switch to exit_with_error once we fix https://github.com/emscripten-core/emscripten/issues/7463 logger.debug('unexpected error while trying to eval ctors:\n' + out_result + '\n' + err_result) return (0, 0, 0, 0) # out contains the new mem init and other info num_successful, mem_init_raw, atexits = json.loads(out_result) mem_init = bytes(bytearray(mem_init_raw)) total_ctors = len(all_ctors) if num_successful < total_ctors: logger.debug('not all ctors could be evalled, something was used that was not safe (and therefore was not defined, and caused an error):\n========\n' + err_result + '========') # Remove the evalled ctors, add a new one for atexits if needed, and write that out if len(ctors) == total_ctors and len(atexits) == 0: new_ctors = '' else: elements = [] if len(atexits): elements.append('{ func: function() { %s } }' % '; '.join(['_atexit(' + str(x[0]) + ',' + str(x[1]) + ')' for x in atexits])) for ctor in all_ctors[num:]: elements.append('{ func: function() { %s() } }' % ctor) new_ctors = '__ATINIT__.push(' + ', '.join(elements) + ');' js = js[:ctors_start] + new_ctors + js[ctors_end:] return (num_successful, js, mem_init, ctors) def eval_ctors_wasm(js, wasm_file, num): ctors_start, ctors_end, all_ctors, ctors = find_ctors_data(js, num) cmd = [os.path.join(binaryen_bin, 'wasm-ctor-eval'), wasm_file, '-o', wasm_file, '--ctors=' + ','.join(ctors)] cmd += extra_args if debug_info: cmd += ['-g'] logger.debug('wasm ctor cmd: ' + str(cmd)) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) try: err = jsrun.timeout_run(proc, timeout=10, full_output=True, throw_on_failure=False) except Exception as e: if 'Timed out' not in str(e): raise logger.debug('ctors timed out\n') return 0, js if proc.returncode != 0: shared.exit_with_error('unexpected error while trying to eval ctors:\n' + err) num_successful = err.count('success on') logger.debug(err) if len(ctors) == num_successful: new_ctors = '' else: elements = [] for ctor in all_ctors[num_successful:]: elements.append('{ func: function() { %s() } }' % ctor) new_ctors = '__ATINIT__.push(' + ', '.join(elements) + ');' js = js[:ctors_start] + new_ctors + js[ctors_end:] return num_successful, js # main def main(): js = open(js_file).read() ctors_start, ctors_end = find_ctors(js) if ctors_start < 0: logger.debug('ctor_evaller: no ctors') sys.exit(0) ctors_text = js[ctors_start:ctors_end] if ctors_text.count('(') == 1: logger.debug('ctor_evaller: push, but no ctors') sys.exit(0) num_ctors = ctors_text.count('function()') logger.debug('ctor_evaller: %d ctors, from |%s|' % (num_ctors, ctors_text)) if not wasm: # js path mem_init_file = binary_file if os.path.exists(mem_init_file): mem_init = json.dumps(list(bytearray(open(mem_init_file, 'rb').read()))) else: mem_init = [] # find how many ctors we can remove, by bisection (if there are hundreds, running them sequentially is silly slow) logger.debug('ctor_evaller: trying to eval %d global constructors' % num_ctors) num_successful, new_js, new_mem_init, removed = eval_ctors_js(js, mem_init, num_ctors) if num_successful == 0: logger.debug('ctor_evaller: not successful') sys.exit(0) logger.debug('ctor_evaller: we managed to remove %d ctors' % num_successful) if num_successful == num_ctors: js = new_js mem_init = new_mem_init else: logger.debug('ctor_evaller: final execution') check, js, mem_init, removed = eval_ctors_js(js, mem_init, num_successful) assert check == num_successful open(js_file, 'w').write(js) open(mem_init_file, 'wb').write(mem_init) # Dead function elimination can help us logger.debug('ctor_evaller: eliminate no longer needed functions after ctor elimination') # find exports asm = get_asm(open(js_file).read()) exports_start = asm.find('return {') exports_end = asm.find('};', exports_start) exports_text = asm[asm.find('{', exports_start) + 1:exports_end] exports = [x.split(':')[1].strip() for x in exports_text.replace(' ', '').split(',')] for r in removed: assert r in exports, 'global ctors were exported' exports = [e for e in exports if e not in removed] # fix up the exports js = open(js_file).read() absolute_exports_start = js.find(exports_text) js = js[:absolute_exports_start] + ', '.join([e + ': ' + e for e in exports]) + js[absolute_exports_start + len(exports_text):] open(js_file, 'w').write(js) # find unreachable methods and remove them reachable = building.calculate_reachable_functions(js_file, exports, can_reach=False)['reachable'] for r in removed: assert r not in reachable, 'removed ctors must NOT be reachable' building.js_optimizer(js_file, ['removeFuncs'], extra_info={'keep': reachable}, output_filename=js_file) else: # wasm path wasm_file = binary_file logger.debug('ctor_evaller (wasm): trying to eval %d global constructors' % num_ctors) num_successful, new_js = eval_ctors_wasm(js, wasm_file, num_ctors) if num_successful == 0: logger.debug('ctor_evaller: not successful') sys.exit(0) logger.debug('ctor_evaller: we managed to remove %d ctors' % num_successful) open(js_file, 'w').write(new_js) if __name__ == '__main__': sys.exit(main())
eval_ctors_js
hls-segmenter.py
#!/usr/bin/env python # upload to AWS S3 and clean up # author Roman Sereda # [email protected] # # install dependenses #sudo pip install boto import json import os.path import logging import subprocess from boto.s3.connection import S3Connection from boto.s3.key import Key config_file = 'config.json' json_data=open(config_file) config = json.load(json_data) json_data.close() logging.basicConfig(filename=config['log'],level=logging.DEBUG) ########################################################################### def s3_file_upload (config,filename,keyname): conn = S3Connection(config['aws']["access_key"],config['aws']["secret_key"]) mybucket = conn.get_bucket(config['aws']["s3bucket"]) # select bucket k = Key(mybucket) # select key k.key = keyname #named new key k.set_contents_from_filename(filename) #upload new file name k.set_acl('public-read') # set publis read access keylist = mybucket.list() # get list of files result = False ss= [] for key in keylist: ss.append( key.name) if any(keyname in s for s in ss): logging.debug('s3_file_upload ' + 'Upload ' + keyname + "Completed") result = result | True rs = conn.close() return result def isVideo (ffprobe,filename): if os.path.isfile(filename): command = [ ffprobe, "-v","quiet","-print_format","json","-show_format","-show_streams", filename] process = subprocess.Popen(command, stdout=subprocess.PIPE) out, err = process.communicate() video_stat = json.loads(out) stat = [] print video_stat if not len (video_stat) is 0 : if 'streams' in video_stat: logging.debug('isVideo ' + 'tested ' + filename ) if len (video_stat['streams']) >= 2: logging.debug('isVideo ' + 'tested the is Vidoe' + filename ) return video_stat return False def s3_get_key_list (config):
def video_segmenter (ffmpeg, filepath , folder , stream_name): if os.path.isfile(filepath): command = [ ffmpeg, "-re" ,"-i",filepath,"-map","0","-codec:v","libx264","-codec:a","libfdk_aac","-codec:s", "copy", "-flags","-global_header","-f","segment","-segment_list",folder+"playlist.m3u8","-segment_time","10","-segment_format","mpegts",folder + "out%05d.ts"] process = subprocess.Popen(command, stdout=subprocess.PIPE) out, err = process.communicate() print out def main (config): filelist =sorted( os.listdir(config["source"])) print filelist for filename in filelist: if isVideo (config["ffprobe"],config["source"] + filename): if filename.find('.') is -1: stream_name = filename.split('.',1) else : stream_name = filename video_segmenter (config["ffmpeg"], config["source"] + filename , config["tmp"] , stream_name) upload_list = sorted( os.listdir(config["tmp"])) if "playlist.m3u8" in upload_list and len (upload_list) > 2: for ufile in upload_list: logging.debug('main ' + 'procesed ' + " " + ufile ) if s3_file_upload (config,config["tmp"] + ufile, stream_name + "/" + ufile): logging.debug('main ' + "Upload " + stream_name + "/" + ufile + "################" ) os.remove(config["tmp"] + ufile) print s3_get_key_list (config) if __name__ == "__main__": main (config)
conn = S3Connection(config["aws"] ["access_key"], config["aws"] ["secret_key"]) mybucket = conn.get_bucket(config["aws"] ["s3bucket"]) key_list = [] for key in mybucket.list(): key_list.append(key.name) rs = conn.close() return key_list
tsx.tsx
import * as React from "react"; type cool = { name: string; number: number; }; const coolArr: cool[] = [ { name: "Bert", number: 133, }, { name: "Ernie", number: 101, }, ]; interface Shoe { size: number; name: string; } interface Food { type: "fruit" | "vegetable";
return { size: 10, name: "wes", }; } const unusedVariable; type CoolProps = { title: string; children: React.ReactNode; }; export default async function unreachableCode(): Promise<number> { return 10; } export function greatComponent({ title, children }: CoolProps) { return ( <div> <h1>{title}</h1> {children} </div> ); } const myShoe = generateSome<Shoe>(10); const banana: Food = { type: "fruit", name: "Banana", };
name: string; } function generateSome<T>(howMany: number): T {
Cert_9_2_18_RollBackActiveTimestamp.py
#!/usr/bin/env python3 # # Copyright (c) 2016, The OpenThread Authors. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # import unittest import config import thread_cert KEY1 = '00112233445566778899aabbccddeeff' KEY2 = 'ffeeddccbbaa99887766554433221100' CHANNEL_INIT = 19 PANID_INIT = 0xface COMMISSIONER = 1 LEADER = 2 ROUTER1 = 3 ROUTER2 = 4 ED1 = 5 SED1 = 6 MTDS = [ED1, SED1] class Cert_9_2_18_RollBackActiveTimestamp(thread_cert.TestCase): SUPPORT_NCP = False TOPOLOGY = { COMMISSIONER: { 'active_dataset': { 'timestamp': 1, 'panid': PANID_INIT, 'channel': CHANNEL_INIT, 'master_key': '00112233445566778899aabbccddeeff' }, 'mode': 'rsdn', 'router_selection_jitter': 1, 'whitelist': [LEADER] }, LEADER: { 'active_dataset': { 'timestamp': 1, 'panid': PANID_INIT, 'channel': CHANNEL_INIT, 'master_key': '00112233445566778899aabbccddeeff' }, 'mode': 'rsdn', 'partition_id': 0xffffffff, 'router_selection_jitter': 1, 'whitelist': [COMMISSIONER, ROUTER1] }, ROUTER1: { 'active_dataset': { 'timestamp': 1, 'panid': PANID_INIT, 'channel': CHANNEL_INIT, 'master_key': '00112233445566778899aabbccddeeff' }, 'mode': 'rsdn', 'router_selection_jitter': 1, 'whitelist': [LEADER, ROUTER2, ED1, SED1] }, ROUTER2: { 'active_dataset': { 'timestamp': 1, 'panid': PANID_INIT, 'channel': CHANNEL_INIT, 'master_key': '00112233445566778899aabbccddeeff' }, 'mode': 'rsdn', 'router_selection_jitter': 1, 'whitelist': [ROUTER1] }, ED1: { 'channel': CHANNEL_INIT, 'is_mtd': True, 'masterkey': '00112233445566778899aabbccddeeff', 'mode': 'rsn', 'panid': PANID_INIT, 'whitelist': [ROUTER1] }, SED1: { 'channel': CHANNEL_INIT, 'is_mtd': True, 'masterkey': '00112233445566778899aabbccddeeff', 'mode': 's', 'panid': PANID_INIT, 'timeout': config.DEFAULT_CHILD_TIMEOUT, 'whitelist': [ROUTER1] }, } def test(self):
if __name__ == '__main__': unittest.main()
self.nodes[LEADER].start() self.simulator.go(5) self.assertEqual(self.nodes[LEADER].get_state(), 'leader') self.nodes[COMMISSIONER].start() self.simulator.go(5) self.assertEqual(self.nodes[COMMISSIONER].get_state(), 'router') self.nodes[COMMISSIONER].commissioner_start() self.simulator.go(3) self.nodes[ROUTER1].start() self.simulator.go(5) self.assertEqual(self.nodes[ROUTER1].get_state(), 'router') self.nodes[ED1].start() self.simulator.go(5) self.assertEqual(self.nodes[ED1].get_state(), 'child') self.nodes[SED1].start() self.simulator.go(5) self.assertEqual(self.nodes[SED1].get_state(), 'child') self.nodes[COMMISSIONER].send_mgmt_active_set(active_timestamp=20000, network_name='GRL') self.simulator.go(5) self.nodes[COMMISSIONER].send_mgmt_pending_set( pending_timestamp=20, active_timestamp=20, delay_timer=20000, network_name='Shouldnotbe', ) self.simulator.go(5) self.nodes[COMMISSIONER].send_mgmt_pending_set( pending_timestamp=20, active_timestamp=20, delay_timer=20000, network_name='MyHouse', master_key=KEY2, ) self.simulator.go(310) self.assertEqual(self.nodes[COMMISSIONER].get_masterkey(), KEY2) self.assertEqual(self.nodes[LEADER].get_masterkey(), KEY2) self.assertEqual(self.nodes[ROUTER1].get_masterkey(), KEY2) self.assertEqual(self.nodes[ED1].get_masterkey(), KEY2) self.assertEqual(self.nodes[SED1].get_masterkey(), KEY2) self.assertEqual(self.nodes[ROUTER2].get_masterkey(), KEY1) self.nodes[ROUTER2].start() self.simulator.go(5) self.assertEqual(self.nodes[ROUTER2].get_state(), 'leader')
aws_iot_policy.go
// Code is generated. DO NOT EDIT. package aws import ( "context" "github.com/aws/aws-sdk-go-v2/service/iot" "github.com/jckuester/awstools-lib/aws" "github.com/jckuester/awstools-lib/terraform" ) func ListIotPolicy(client *aws.Client) ([]terraform.Resource, error) { req := client.Iotconn.ListPoliciesRequest(&iot.ListPoliciesInput{}) var result []terraform.Resource resp, err := req.Send(context.Background()) if err != nil
if len(resp.Policies) > 0 { for _, r := range resp.Policies { result = append(result, terraform.Resource{ Type: "aws_iot_policy", ID: *r.PolicyName, Profile: client.Profile, Region: client.Region, AccountID: client.AccountID, }) } } return result, nil }
{ return nil, err }
app.component.ts
import { Component, ViewChild, ElementRef } from '@angular/core'; import { jqxListBoxComponent } from '../../../../../jqwidgets-ts/angular_jqxlistbox'; @Component({ selector: 'app-root', templateUrl: './app.component.html' }) export class AppComponent { @ViewChild('selectionlog') selectionlog: ElementRef; url: string = "../sampledata/customers.txt"; // prepare the data source: any = { datatype: "json", datafields: [ { name: 'CompanyName' }, { name: 'ContactName' } ], id: 'id', url: this.url }; dataAdapter: any = new jqx.dataAdapter(this.source); select(event: any): void { if (event.args) { let item = event.args.item; if (item) { let valueElement = document.createElement('div');
labelElement.innerHTML = 'Label: ' + item.label; let selectionLog = this.selectionlog.nativeElement; selectionLog.innerHTML = ''; selectionLog.appendChild(labelElement); selectionLog.appendChild(valueElement); } } }; }
let labelElement = document.createElement('div'); valueElement.innerHTML = 'Value: ' + item.value;
mode-html_ruby.js
define("ace/mode/css_highlight_rules",["require","exports","module","ace/lib/oop","ace/lib/lang","ace/mode/text_highlight_rules"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("../lib/lang"),s=e("./text_highlight_rules").TextHighlightRules,o=t.supportType="align-content|align-items|align-self|all|animation|animation-delay|animation-direction|animation-duration|animation-fill-mode|animation-iteration-count|animation-name|animation-play-state|animation-timing-function|backface-visibility|background|background-attachment|background-blend-mode|background-clip|background-color|background-image|background-origin|background-position|background-repeat|background-size|border|border-bottom|border-bottom-color|border-bottom-left-radius|border-bottom-right-radius|border-bottom-style|border-bottom-width|border-collapse|border-color|border-image|border-image-outset|border-image-repeat|border-image-slice|border-image-source|border-image-width|border-left|border-left-color|border-left-style|border-left-width|border-radius|border-right|border-right-color|border-right-style|border-right-width|border-spacing|border-style|border-top|border-top-color|border-top-left-radius|border-top-right-radius|border-top-style|border-top-width|border-width|bottom|box-shadow|box-sizing|caption-side|clear|clip|color|column-count|column-fill|column-gap|column-rule|column-rule-color|column-rule-style|column-rule-width|column-span|column-width|columns|content|counter-increment|counter-reset|cursor|direction|display|empty-cells|filter|flex|flex-basis|flex-direction|flex-flow|flex-grow|flex-shrink|flex-wrap|float|font|font-family|font-size|font-size-adjust|font-stretch|font-style|font-variant|font-weight|hanging-punctuation|height|justify-content|left|letter-spacing|line-height|list-style|list-style-image|list-style-position|list-style-type|margin|margin-bottom|margin-left|margin-right|margin-top|max-height|max-width|min-height|min-width|nav-down|nav-index|nav-left|nav-right|nav-up|opacity|order|outline|outline-color|outline-offset|outline-style|outline-width|overflow|overflow-x|overflow-y|padding|padding-bottom|padding-left|padding-right|padding-top|page-break-after|page-break-before|page-break-inside|perspective|perspective-origin|position|quotes|resize|right|tab-size|table-layout|text-align|text-align-last|text-decoration|text-decoration-color|text-decoration-line|text-decoration-style|text-indent|text-justify|text-overflow|text-shadow|text-transform|top|transform|transform-origin|transform-style|transition|transition-delay|transition-duration|transition-property|transition-timing-function|unicode-bidi|vertical-align|visibility|white-space|width|word-break|word-spacing|word-wrap|z-index",u=t.supportFunction="rgb|rgba|url|attr|counter|counters",a=t.supportConstant="absolute|after-edge|after|all-scroll|all|alphabetic|always|antialiased|armenian|auto|avoid-column|avoid-page|avoid|balance|baseline|before-edge|before|below|bidi-override|block-line-height|block|bold|bolder|border-box|both|bottom|box|break-all|break-word|capitalize|caps-height|caption|center|central|char|circle|cjk-ideographic|clone|close-quote|col-resize|collapse|column|consider-shifts|contain|content-box|cover|crosshair|cubic-bezier|dashed|decimal-leading-zero|decimal|default|disabled|disc|disregard-shifts|distribute-all-lines|distribute-letter|distribute-space|distribute|dotted|double|e-resize|ease-in|ease-in-out|ease-out|ease|ellipsis|end|exclude-ruby|fill|fixed|georgian|glyphs|grid-height|groove|hand|hanging|hebrew|help|hidden|hiragana-iroha|hiragana|horizontal|icon|ideograph-alpha|ideograph-numeric|ideograph-parenthesis|ideograph-space|ideographic|inactive|include-ruby|inherit|initial|inline-block|inline-box|inline-line-height|inline-table|inline|inset|inside|inter-ideograph|inter-word|invert|italic|justify|katakana-iroha|katakana|keep-all|last|left|lighter|line-edge|line-through|line|linear|list-item|local|loose|lower-alpha|lower-greek|lower-latin|lower-roman|lowercase|lr-tb|ltr|mathematical|max-height|max-size|medium|menu|message-box|middle|move|n-resize|ne-resize|newspaper|no-change|no-close-quote|no-drop|no-open-quote|no-repeat|none|normal|not-allowed|nowrap|nw-resize|oblique|open-quote|outset|outside|overline|padding-box|page|pointer|pre-line|pre-wrap|pre|preserve-3d|progress|relative|repeat-x|repeat-y|repeat|replaced|reset-size|ridge|right|round|row-resize|rtl|s-resize|scroll|se-resize|separate|slice|small-caps|small-caption|solid|space|square|start|static|status-bar|step-end|step-start|steps|stretch|strict|sub|super|sw-resize|table-caption|table-cell|table-column-group|table-column|table-footer-group|table-header-group|table-row-group|table-row|table|tb-rl|text-after-edge|text-before-edge|text-bottom|text-size|text-top|text|thick|thin|transparent|underline|upper-alpha|upper-latin|upper-roman|uppercase|use-script|vertical-ideographic|vertical-text|visible|w-resize|wait|whitespace|z-index|zero",f=t.supportConstantColor="aqua|black|blue|fuchsia|gray|green|lime|maroon|navy|olive|orange|purple|red|silver|teal|white|yellow",l=t.supportConstantFonts="arial|century|comic|courier|cursive|fantasy|garamond|georgia|helvetica|impact|lucida|symbol|system|tahoma|times|trebuchet|utopia|verdana|webdings|sans-serif|serif|monospace",c=t.numRe="\\-?(?:(?:[0-9]+)|(?:[0-9]*\\.[0-9]+))",h=t.pseudoElements="(\\:+)\\b(after|before|first-letter|first-line|moz-selection|selection)\\b",p=t.pseudoClasses="(:)\\b(active|checked|disabled|empty|enabled|first-child|first-of-type|focus|hover|indeterminate|invalid|last-child|last-of-type|link|not|nth-child|nth-last-child|nth-last-of-type|nth-of-type|only-child|only-of-type|required|root|target|valid|visited)\\b",d=function(){var e=this.createKeywordMapper({"support.function":u,"support.constant":a,"support.type":o,"support.constant.color":f,"support.constant.fonts":l},"text",!0);this.$rules={start:[{token:"comment",regex:"\\/\\*",push:"comment"},{token:"paren.lparen",regex:"\\{",push:"ruleset"},{token:"string",regex:"@.*?{",push:"media"},{token:"keyword",regex:"#[a-z0-9-_]+"},{token:"variable",regex:"\\.[a-z0-9-_]+"},{token:"string",regex:":[a-z0-9-_]+"},{token:"constant",regex:"[a-z0-9-_]+"},{caseInsensitive:!0}],media:[{token:"comment",regex:"\\/\\*",push:"comment"},{token:"paren.lparen",regex:"\\{",push:"ruleset"},{token:"string",regex:"\\}",next:"pop"},{token:"keyword",regex:"#[a-z0-9-_]+"},{token:"variable",regex:"\\.[a-z0-9-_]+"},{token:"string",regex:":[a-z0-9-_]+"},{token:"constant",regex:"[a-z0-9-_]+"},{caseInsensitive:!0}],comment:[{token:"comment",regex:"\\*\\/",next:"pop"},{defaultToken:"comment"}],ruleset:[{token:"paren.rparen",regex:"\\}",next:"pop"},{token:"comment",regex:"\\/\\*",push:"comment"},{token:"string",regex:'["](?:(?:\\\\.)|(?:[^"\\\\]))*?["]'},{token:"string",regex:"['](?:(?:\\\\.)|(?:[^'\\\\]))*?[']"},{token:["constant.numeric","keyword"],regex:"("+c+")(ch|cm|deg|em|ex|fr|gd|grad|Hz|in|kHz|mm|ms|pc|pt|px|rad|rem|s|turn|vh|vm|vw|%)"},{token:"constant.numeric",regex:c},{token:"constant.numeric",regex:"#[a-f0-9]{6}"},{token:"constant.numeric",regex:"#[a-f0-9]{3}"},{token:["punctuation","entity.other.attribute-name.pseudo-element.css"],regex:h},{token:["punctuation","entity.other.attribute-name.pseudo-class.css"],regex:p},{token:["support.function","string","support.function"],regex:"(url\\()(.*)(\\))"},{token:e,regex:"\\-?[a-zA-Z_][a-zA-Z0-9_\\-]*"},{caseInsensitive:!0}]},this.normalizeRules()};r.inherits(d,s),t.CssHighlightRules=d}),define("ace/mode/doc_comment_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("./text_highlight_rules").TextHighlightRules,s=function(){this.$rules={start:[{token:"comment.doc.tag",regex:"@[\\w\\d_]+"},s.getTagRule(),{defaultToken:"comment.doc",caseInsensitive:!0}]}};r.inherits(s,i),s.getTagRule=function(e){return{token:"comment.doc.tag.storage.type",regex:"\\b(?:TODO|FIXME|XXX|HACK)\\b"}},s.getStartRule=function(e){return{token:"comment.doc",regex:"\\/\\*(?=\\*)",next:e}},s.getEndRule=function(e){return{token:"comment.doc",regex:"\\*\\/",next:e}},t.DocCommentHighlightRules=s}),define("ace/mode/javascript_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/doc_comment_highlight_rules","ace/mode/text_highlight_rules"],function(e,t,n){"use strict";function a(){var e=o.replace("\\d","\\d\\-"),t={onMatch:function(e,t,n){var r=e.charAt(1)=="/"?2:1;if(r==1)t!=this.nextState?n.unshift(this.next,this.nextState,0):n.unshift(this.next),n[2]++;else if(r==2&&t==this.nextState){n[1]--;if(!n[1]||n[1]<0)n.shift(),n.shift()}return[{type:"meta.tag.punctuation."+(r==1?"":"end-")+"tag-open.xml",value:e.slice(0,r)},{type:"meta.tag.tag-name.xml",value:e.substr(r)}]},regex:"</?"+e+"",next:"jsxAttributes",nextState:"jsx"};this.$rules.start.unshift(t);var n={regex:"{",token:"paren.quasi.start",push:"start"};this.$rules.jsx=[n,t,{include:"reference"},{defaultToken:"string"}],this.$rules.jsxAttributes=[{token:"meta.tag.punctuation.tag-close.xml",regex:"/?>",onMatch:function(e,t,n){return t==n[0]&&n.shift(),e.length==2&&(n[0]==this.nextState&&n[1]--,(!n[1]||n[1]<0)&&n.splice(0,2)),this.next=n[0]||"start",[{type:this.token,value:e}]},nextState:"jsx"},n,f("jsxAttributes"),{token:"entity.other.attribute-name.xml",regex:e},{token:"keyword.operator.attribute-equals.xml",regex:"="},{token:"text.tag-whitespace.xml",regex:"\\s+"},{token:"string.attribute-value.xml",regex:"'",stateName:"jsx_attr_q",push:[{token:"string.attribute-value.xml",regex:"'",next:"pop"},{include:"reference"},{defaultToken:"string.attribute-value.xml"}]},{token:"string.attribute-value.xml",regex:'"',stateName:"jsx_attr_qq",push:[{token:"string.attribute-value.xml",regex:'"',next:"pop"},{include:"reference"},{defaultToken:"string.attribute-value.xml"}]},t],this.$rules.reference=[{token:"constant.language.escape.reference.xml",regex:"(?:&#[0-9]+;)|(?:&#x[0-9a-fA-F]+;)|(?:&[a-zA-Z0-9_:\\.-]+;)"}]}function f(e){return[{token:"comment",regex:/\/\*/,next:[i.getTagRule(),{token:"comment",regex:"\\*\\/",next:e||"pop"},{defaultToken:"comment",caseInsensitive:!0}]},{token:"comment",regex:"\\/\\/",next:[i.getTagRule(),{token:"comment",regex:"$|^",next:e||"pop"},{defaultToken:"comment",caseInsensitive:!0}]}]}var r=e("../lib/oop"),i=e("./doc_comment_highlight_rules").DocCommentHighlightRules,s=e("./text_highlight_rules").TextHighlightRules,o="[a-zA-Z\\$_\u00a1-\uffff][a-zA-Z\\d\\$_\u00a1-\uffff]*",u=function(e){var t=this.createKeywordMapper({"variable.language":"Array|Boolean|Date|Function|Iterator|Number|Object|RegExp|String|Proxy|Namespace|QName|XML|XMLList|ArrayBuffer|Float32Array|Float64Array|Int16Array|Int32Array|Int8Array|Uint16Array|Uint32Array|Uint8Array|Uint8ClampedArray|Error|EvalError|InternalError|RangeError|ReferenceError|StopIteration|SyntaxError|TypeError|URIError|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|eval|isFinite|isNaN|parseFloat|parseInt|JSON|Math|this|arguments|prototype|window|document",keyword:"const|yield|import|get|set|async|await|break|case|catch|continue|default|delete|do|else|finally|for|function|if|in|instanceof|new|return|switch|throw|try|typeof|let|var|while|with|debugger|__parent__|__count__|escape|unescape|with|__proto__|class|enum|extends|super|export|implements|private|public|interface|package|protected|static","storage.type":"const|let|var|function","constant.language":"null|Infinity|NaN|undefined","support.function":"alert","constant.language.boolean":"true|false"},"identifier"),n="case|do|else|finally|in|instanceof|return|throw|try|typeof|yield|void",r="\\\\(?:x[0-9a-fA-F]{2}|u[0-9a-fA-F]{4}|u{[0-9a-fA-F]{1,6}}|[0-2][0-7]{0,2}|3[0-7][0-7]?|[4-7][0-7]?|.)";this.$rules={no_regex:[i.getStartRule("doc-start"),f("no_regex"),{token:"string",regex:"'(?=.)",next:"qstring"},{token:"string",regex:'"(?=.)',next:"qqstring"},{token:"constant.numeric",regex:/0(?:[xX][0-9a-fA-F]+|[bB][01]+)\b/},{token:"constant.numeric",regex:/[+-]?\d[\d_]*(?:(?:\.\d*)?(?:[eE][+-]?\d+)?)?\b/},{token:["storage.type","punctuation.operator","support.function","punctuation.operator","entity.name.function","text","keyword.operator"],regex:"("+o+")(\\.)(prototype)(\\.)("+o+")(\\s*)(=)",next:"function_arguments"},{token:["storage.type","punctuation.operator","entity.name.function","text","keyword.operator","text","storage.type","text","paren.lparen"],regex:"("+o+")(\\.)("+o+")(\\s*)(=)(\\s*)(function)(\\s*)(\\()",next:"function_arguments"},{token:["entity.name.function","text","keyword.operator","text","storage.type","text","paren.lparen"],regex:"("+o+")(\\s*)(=)(\\s*)(function)(\\s*)(\\()",next:"function_arguments"},{token:["storage.type","punctuation.operator","entity.name.function","text","keyword.operator","text","storage.type","text","entity.name.function","text","paren.lparen"],regex:"("+o+")(\\.)("+o+")(\\s*)(=)(\\s*)(function)(\\s+)(\\w+)(\\s*)(\\()",next:"function_arguments"},{token:["storage.type","text","entity.name.function","text","paren.lparen"],regex:"(function)(\\s+)("+o+")(\\s*)(\\()",next:"function_arguments"},{token:["entity.name.function","text","punctuation.operator","text","storage.type","text","paren.lparen"],regex:"("+o+")(\\s*)(:)(\\s*)(function)(\\s*)(\\()",next:"function_arguments"},{token:["text","text","storage.type","text","paren.lparen"],regex:"(:)(\\s*)(function)(\\s*)(\\()",next:"function_arguments"},{token:"keyword",regex:"(?:"+n+")\\b",next:"start"},{token:["support.constant"],regex:/that\b/},{token:["storage.type","punctuation.operator","support.function.firebug"],regex:/(console)(\.)(warn|info|log|error|time|trace|timeEnd|assert)\b/},{token:t,regex:o},{token:"punctuation.operator",regex:/[.](?![.])/,next:"property"},{token:"keyword.operator",regex:/--|\+\+|\.{3}|===|==|=|!=|!==|<+=?|>+=?|!|&&|\|\||\?:|[!$%&*+\-~\/^]=?/,next:"start"},{token:"punctuation.operator",regex:/[?:,;.]/,next:"start"},{token:"paren.lparen",regex:/[\[({]/,next:"start"},{token:"paren.rparen",regex:/[\])}]/},{token:"comment",regex:/^#!.*$/}],property:[{token:"text",regex:"\\s+"},{token:["storage.type","punctuation.operator","entity.name.function","text","keyword.operator","text","storage.type","text","entity.name.function","text","paren.lparen"],regex:"("+o+")(\\.)("+o+")(\\s*)(=)(\\s*)(function)(?:(\\s+)(\\w+))?(\\s*)(\\()",next:"function_arguments"},{token:"punctuation.operator",regex:/[.](?![.])/},{token:"support.function",regex:/(s(?:h(?:ift|ow(?:Mod(?:elessDialog|alDialog)|Help))|croll(?:X|By(?:Pages|Lines)?|Y|To)?|t(?:op|rike)|i(?:n|zeToContent|debar|gnText)|ort|u(?:p|b(?:str(?:ing)?)?)|pli(?:ce|t)|e(?:nd|t(?:Re(?:sizable|questHeader)|M(?:i(?:nutes|lliseconds)|onth)|Seconds|Ho(?:tKeys|urs)|Year|Cursor|Time(?:out)?|Interval|ZOptions|Date|UTC(?:M(?:i(?:nutes|lliseconds)|onth)|Seconds|Hours|Date|FullYear)|FullYear|Active)|arch)|qrt|lice|avePreferences|mall)|h(?:ome|andleEvent)|navigate|c(?:har(?:CodeAt|At)|o(?:s|n(?:cat|textual|firm)|mpile)|eil|lear(?:Timeout|Interval)?|a(?:ptureEvents|ll)|reate(?:StyleSheet|Popup|EventObject))|t(?:o(?:GMTString|S(?:tring|ource)|U(?:TCString|pperCase)|Lo(?:caleString|werCase))|est|a(?:n|int(?:Enabled)?))|i(?:s(?:NaN|Finite)|ndexOf|talics)|d(?:isableExternalCapture|ump|etachEvent)|u(?:n(?:shift|taint|escape|watch)|pdateCommands)|j(?:oin|avaEnabled)|p(?:o(?:p|w)|ush|lugins.refresh|a(?:ddings|rse(?:Int|Float)?)|r(?:int|ompt|eference))|e(?:scape|nableExternalCapture|val|lementFromPoint|x(?:p|ec(?:Script|Command)?))|valueOf|UTC|queryCommand(?:State|Indeterm|Enabled|Value)|f(?:i(?:nd|le(?:ModifiedDate|Size|CreatedDate|UpdatedDate)|xed)|o(?:nt(?:size|color)|rward)|loor|romCharCode)|watch|l(?:ink|o(?:ad|g)|astIndexOf)|a(?:sin|nchor|cos|t(?:tachEvent|ob|an(?:2)?)|pply|lert|b(?:s|ort))|r(?:ou(?:nd|teEvents)|e(?:size(?:By|To)|calc|turnValue|place|verse|l(?:oad|ease(?:Capture|Events)))|andom)|g(?:o|et(?:ResponseHeader|M(?:i(?:nutes|lliseconds)|onth)|Se(?:conds|lection)|Hours|Year|Time(?:zoneOffset)?|Da(?:y|te)|UTC(?:M(?:i(?:nutes|lliseconds)|onth)|Seconds|Hours|Da(?:y|te)|FullYear)|FullYear|A(?:ttention|llResponseHeaders)))|m(?:in|ove(?:B(?:y|elow)|To(?:Absolute)?|Above)|ergeAttributes|a(?:tch|rgins|x))|b(?:toa|ig|o(?:ld|rderWidths)|link|ack))\b(?=\()/},{token:"support.function.dom",regex:/(s(?:ub(?:stringData|mit)|plitText|e(?:t(?:NamedItem|Attribute(?:Node)?)|lect))|has(?:ChildNodes|Feature)|namedItem|c(?:l(?:ick|o(?:se|neNode))|reate(?:C(?:omment|DATASection|aption)|T(?:Head|extNode|Foot)|DocumentFragment|ProcessingInstruction|E(?:ntityReference|lement)|Attribute))|tabIndex|i(?:nsert(?:Row|Before|Cell|Data)|tem)|open|delete(?:Row|C(?:ell|aption)|T(?:Head|Foot)|Data)|focus|write(?:ln)?|a(?:dd|ppend(?:Child|Data))|re(?:set|place(?:Child|Data)|move(?:NamedItem|Child|Attribute(?:Node)?)?)|get(?:NamedItem|Element(?:sBy(?:Name|TagName|ClassName)|ById)|Attribute(?:Node)?)|blur)\b(?=\()/},{token:"support.constant",regex:/(s(?:ystemLanguage|cr(?:ipts|ollbars|een(?:X|Y|Top|Left))|t(?:yle(?:Sheets)?|atus(?:Text|bar)?)|ibling(?:Below|Above)|ource|uffixes|e(?:curity(?:Policy)?|l(?:ection|f)))|h(?:istory|ost(?:name)?|as(?:h|Focus))|y|X(?:MLDocument|SLDocument)|n(?:ext|ame(?:space(?:s|URI)|Prop))|M(?:IN_VALUE|AX_VALUE)|c(?:haracterSet|o(?:n(?:structor|trollers)|okieEnabled|lorDepth|mp(?:onents|lete))|urrent|puClass|l(?:i(?:p(?:boardData)?|entInformation)|osed|asses)|alle(?:e|r)|rypto)|t(?:o(?:olbar|p)|ext(?:Transform|Indent|Decoration|Align)|ags)|SQRT(?:1_2|2)|i(?:n(?:ner(?:Height|Width)|put)|ds|gnoreCase)|zIndex|o(?:scpu|n(?:readystatechange|Line)|uter(?:Height|Width)|p(?:sProfile|ener)|ffscreenBuffering)|NEGATIVE_INFINITY|d(?:i(?:splay|alog(?:Height|Top|Width|Left|Arguments)|rectories)|e(?:scription|fault(?:Status|Ch(?:ecked|arset)|View)))|u(?:ser(?:Profile|Language|Agent)|n(?:iqueID|defined)|pdateInterval)|_content|p(?:ixelDepth|ort|ersonalbar|kcs11|l(?:ugins|atform)|a(?:thname|dding(?:Right|Bottom|Top|Left)|rent(?:Window|Layer)?|ge(?:X(?:Offset)?|Y(?:Offset)?))|r(?:o(?:to(?:col|type)|duct(?:Sub)?|mpter)|e(?:vious|fix)))|e(?:n(?:coding|abledPlugin)|x(?:ternal|pando)|mbeds)|v(?:isibility|endor(?:Sub)?|Linkcolor)|URLUnencoded|P(?:I|OSITIVE_INFINITY)|f(?:ilename|o(?:nt(?:Size|Family|Weight)|rmName)|rame(?:s|Element)|gColor)|E|whiteSpace|l(?:i(?:stStyleType|n(?:eHeight|kColor))|o(?:ca(?:tion(?:bar)?|lName)|wsrc)|e(?:ngth|ft(?:Context)?)|a(?:st(?:M(?:odified|atch)|Index|Paren)|yer(?:s|X)|nguage))|a(?:pp(?:MinorVersion|Name|Co(?:deName|re)|Version)|vail(?:Height|Top|Width|Left)|ll|r(?:ity|guments)|Linkcolor|bove)|r(?:ight(?:Context)?|e(?:sponse(?:XML|Text)|adyState))|global|x|m(?:imeTypes|ultiline|enubar|argin(?:Right|Bottom|Top|Left))|L(?:N(?:10|2)|OG(?:10E|2E))|b(?:o(?:ttom|rder(?:Width|RightWidth|BottomWidth|Style|Color|TopWidth|LeftWidth))|ufferDepth|elow|ackground(?:Color|Image)))\b/},{token:"identifier",regex:o},{regex:"",token:"empty",next:"no_regex"}],start:[i.getStartRule("doc-start"),f("start"),{token:"string.regexp",regex:"\\/",next:"regex"},{token:"text",regex:"\\s+|^$",next:"start"},{token:"empty",regex:"",next:"no_regex"}],regex:[{token:"regexp.keyword.operator",regex:"\\\\(?:u[\\da-fA-F]{4}|x[\\da-fA-F]{2}|.)"},{token:"string.regexp",regex:"/[sxngimy]*",next:"no_regex"},{token:"invalid",regex:/\{\d+\b,?\d*\}[+*]|[+*$^?][+*]|[$^][?]|\?{3,}/},{token:"constant.language.escape",regex:/\(\?[:=!]|\)|\{\d+\b,?\d*\}|[+*]\?|[()$^+*?.]/},{token:"constant.language.delimiter",regex:/\|/},{token:"constant.language.escape",regex:/\[\^?/,next:"regex_character_class"},{token:"empty",regex:"$",next:"no_regex"},{defaultToken:"string.regexp"}],regex_character_class:[{token:"regexp.charclass.keyword.operator",regex:"\\\\(?:u[\\da-fA-F]{4}|x[\\da-fA-F]{2}|.)"},{token:"constant.language.escape",regex:"]",next:"regex"},{token:"constant.language.escape",regex:"-"},{token:"empty",regex:"$",next:"no_regex"},{defaultToken:"string.regexp.charachterclass"}],function_arguments:[{token:"variable.parameter",regex:o},{token:"punctuation.operator",regex:"[, ]+"},{token:"punctuation.operator",regex:"$"},{token:"empty",regex:"",next:"no_regex"}],qqstring:[{token:"constant.language.escape",regex:r},{token:"string",regex:"\\\\$",next:"qqstring"},{token:"string",regex:'"|$',next:"no_regex"},{defaultToken:"string"}],qstring:[{token:"constant.language.escape",regex:r},{token:"string",regex:"\\\\$",next:"qstring"},{token:"string",regex:"'|$",next:"no_regex"},{defaultToken:"string"}]};if(!e||!e.noES6)this.$rules.no_regex.unshift({regex:"[{}]",onMatch:function(e,t,n){this.next=e=="{"?this.nextState:"";if(e=="{"&&n.length)n.unshift("start",t);else if(e=="}"&&n.length){n.shift(),this.next=n.shift();if(this.next.indexOf("string")!=-1||this.next.indexOf("jsx")!=-1)return"paren.quasi.end"}return e=="{"?"paren.lparen":"paren.rparen"},nextState:"start"},{token:"string.quasi.start",regex:/`/,push:[{token:"constant.language.escape",regex:r},{token:"paren.quasi.start",regex:/\${/,push:"start"},{token:"string.quasi.end",regex:/`/,next:"pop"},{defaultToken:"string.quasi"}]}),(!e||e.jsx!=0)&&a.call(this);this.embedRules(i,"doc-",[i.getEndRule("no_regex")]),this.normalizeRules()};r.inherits(u,s),t.JavaScriptHighlightRules=u}),define("ace/mode/xml_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("./text_highlight_rules").TextHighlightRules,s=function(e){var t="[_:a-zA-Z\u00c0-\uffff][-_:.a-zA-Z0-9\u00c0-\uffff]*";this.$rules={start:[{token:"string.cdata.xml",regex:"<\\!\\[CDATA\\[",next:"cdata"},{token:["punctuation.xml-decl.xml","keyword.xml-decl.xml"],regex:"(<\\?)(xml)(?=[\\s])",next:"xml_decl",caseInsensitive:!0},{token:["punctuation.instruction.xml","keyword.instruction.xml"],regex:"(<\\?)("+t+")",next:"processing_instruction"},{token:"comment.xml",regex:"<\\!--",next:"comment"},{token:["xml-pe.doctype.xml","xml-pe.doctype.xml"],regex:"(<\\!)(DOCTYPE)(?=[\\s])",next:"doctype",caseInsensitive:!0},{include:"tag"},{token:"text.end-tag-open.xml",regex:"</"},{token:"text.tag-open.xml",regex:"<"},{include:"reference"},{defaultToken:"text.xml"}],xml_decl:[{token:"entity.other.attribute-name.decl-attribute-name.xml",regex:"(?:"+t+":)?"+t+""},{token:"keyword.operator.decl-attribute-equals.xml",regex:"="},{include:"whitespace"},{include:"string"},{token:"punctuation.xml-decl.xml",regex:"\\?>",next:"start"}],processing_instruction:[{token:"punctuation.instruction.xml",regex:"\\?>",next:"start"},{defaultToken:"instruction.xml"}],doctype:[{include:"whitespace"},{include:"string"},{token:"xml-pe.doctype.xml",regex:">",next:"start"},{token:"xml-pe.xml",regex:"[-_a-zA-Z0-9:]+"},{token:"punctuation.int-subset",regex:"\\[",push:"int_subset"}],int_subset:[{token:"text.xml",regex:"\\s+"},{token:"punctuation.int-subset.xml",regex:"]",next:"pop"},{token:["punctuation.markup-decl.xml","keyword.markup-decl.xml"],regex:"(<\\!)("+t+")",push:[{token:"text",regex:"\\s+"},{token:"punctuation.markup-decl.xml",regex:">",next:"pop"},{include:"string"}]}],cdata:[{token:"string.cdata.xml",regex:"\\]\\]>",next:"start"},{token:"text.xml",regex:"\\s+"},{token:"text.xml",regex:"(?:[^\\]]|\\](?!\\]>))+"}],comment:[{token:"comment.xml",regex:"-->",next:"start"},{defaultToken:"comment.xml"}],reference:[{token:"constant.language.escape.reference.xml",regex:"(?:&#[0-9]+;)|(?:&#x[0-9a-fA-F]+;)|(?:&[a-zA-Z0-9_:\\.-]+;)"}],attr_reference:[{token:"constant.language.escape.reference.attribute-value.xml",regex:"(?:&#[0-9]+;)|(?:&#x[0-9a-fA-F]+;)|(?:&[a-zA-Z0-9_:\\.-]+;)"}],tag:[{token:["meta.tag.punctuation.tag-open.xml","meta.tag.punctuation.end-tag-open.xml","meta.tag.tag-name.xml"],regex:"(?:(<)|(</))((?:"+t+":)?"+t+")",next:[{include:"attributes"},{token:"meta.tag.punctuation.tag-close.xml",regex:"/?>",next:"start"}]}],tag_whitespace:[{token:"text.tag-whitespace.xml",regex:"\\s+"}],whitespace:[{token:"text.whitespace.xml",regex:"\\s+"}],string:[{token:"string.xml",regex:"'",push:[{token:"string.xml",regex:"'",next:"pop"},{defaultToken:"string.xml"}]},{token:"string.xml",regex:'"',push:[{token:"string.xml",regex:'"',next:"pop"},{defaultToken:"string.xml"}]}],attributes:[{token:"entity.other.attribute-name.xml",regex:"(?:"+t+":)?"+t+""},{token:"keyword.operator.attribute-equals.xml",regex:"="},{include:"tag_whitespace"},{include:"attribute_value"}],attribute_value:[{token:"string.attribute-value.xml",regex:"'",push:[{token:"string.attribute-value.xml",regex:"'",next:"pop"},{include:"attr_reference"},{defaultToken:"string.attribute-value.xml"}]},{token:"string.attribute-value.xml",regex:'"',push:[{token:"string.attribute-value.xml",regex:'"',next:"pop"},{include:"attr_reference"},{defaultToken:"string.attribute-value.xml"}]}]},this.constructor===s&&this.normalizeRules()};(function(){this.embedTagRules=function(e,t,n){this.$rules.tag.unshift({token:["meta.tag.punctuation.tag-open.xml","meta.tag."+n+".tag-name.xml"],regex:"(<)("+n+"(?=\\s|>|$))",next:[{include:"attributes"},{token:"meta.tag.punctuation.tag-close.xml",regex:"/?>",next:t+"start"}]}),this.$rules[n+"-end"]=[{include:"attributes"},{token:"meta.tag.punctuation.tag-close.xml",regex:"/?>",next:"start",onMatch:function(e,t,n){return n.splice(0),this.token}}],this.embedRules(e,t,[{token:["meta.tag.punctuation.end-tag-open.xml","meta.tag."+n+".tag-name.xml"],regex:"(</)("+n+"(?=\\s|>|$))",next:n+"-end"},{token:"string.cdata.xml",regex:"<\\!\\[CDATA\\["},{token:"string.cdata.xml",regex:"\\]\\]>"}])}}).call(i.prototype),r.inherits(s,i),t.XmlHighlightRules=s}),define("ace/mode/html_highlight_rules",["require","exports","module","ace/lib/oop","ace/lib/lang","ace/mode/css_highlight_rules","ace/mode/javascript_highlight_rules","ace/mode/xml_highlight_rules"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("../lib/lang"),s=e("./css_highlight_rules").CssHighlightRules,o=e("./javascript_highlight_rules").JavaScriptHighlightRules,u=e("./xml_highlight_rules").XmlHighlightRules,a=i.createMap({a:"anchor",button:"form",form:"form",img:"image",input:"form",label:"form",option:"form",script:"script",select:"form",textarea:"form",style:"style",table:"table",tbody:"table",td:"table",tfoot:"table",th:"table",tr:"table"}),f=function(){u.call(this),this.addRules({attributes:[{include:"tag_whitespace"},{token:"entity.other.attribute-name.xml",regex:"[-_a-zA-Z0-9:.]+"},{token:"keyword.operator.attribute-equals.xml",regex:"=",push:[{include:"tag_whitespace"},{token:"string.unquoted.attribute-value.html",regex:"[^<>='\"`\\s]+",next:"pop"},{token:"empty",regex:"",next:"pop"}]},{include:"attribute_value"}],tag:[{token:function(e,t){var n=a[t];return["meta.tag.punctuation."+(e=="<"?"":"end-")+"tag-open.xml","meta.tag"+(n?"."+n:"")+".tag-name.xml"]},regex:"(</?)([-_a-zA-Z0-9:.]+)",next:"tag_stuff"}],tag_stuff:[{include:"attributes"},{token:"meta.tag.punctuation.tag-close.xml",regex:"/?>",next:"start"}]}),this.embedTagRules(s,"css-","style"),this.embedTagRules((new o({jsx:!1})).getRules(),"js-","script"),this.constructor===f&&this.normalizeRules()};r.inherits(f,u),t.HtmlHighlightRules=f}),define("ace/mode/ruby_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("./text_highlight_rules").TextHighlightRules,s=t.constantOtherSymbol={token:"constant.other.symbol.ruby",regex:"[:](?:[A-Za-z_]|[@$](?=[a-zA-Z0-9_]))[a-zA-Z0-9_]*[!=?]?"},o=t.qString={token:"string",regex:"['](?:(?:\\\\.)|(?:[^'\\\\]))*?[']"},u=t.qqString={token:"string",regex:'["](?:(?:\\\\.)|(?:[^"\\\\]))*?["]'},a=t.tString={token:"string",regex:"[`](?:(?:\\\\.)|(?:[^'\\\\]))*?[`]"},f=t.constantNumericHex={token:"constant.numeric",regex:"0[xX][0-9a-fA-F](?:[0-9a-fA-F]|_(?=[0-9a-fA-F]))*\\b"},l=t.constantNumericFloat={token:"constant.numeric",regex:"[+-]?\\d(?:\\d|_(?=\\d))*(?:(?:\\.\\d(?:\\d|_(?=\\d))*)?(?:[eE][+-]?\\d+)?)?\\b"},c=function(){var e="abort|Array|assert|assert_equal|assert_not_equal|assert_same|assert_not_same|assert_nil|assert_not_nil|assert_match|assert_no_match|assert_in_delta|assert_throws|assert_raise|assert_nothing_raised|assert_instance_of|assert_kind_of|assert_respond_to|assert_operator|assert_send|assert_difference|assert_no_difference|assert_recognizes|assert_generates|assert_response|assert_redirected_to|assert_template|assert_select|assert_select_email|assert_select_rjs|assert_select_encoded|css_select|at_exit|attr|attr_writer|attr_reader|attr_accessor|attr_accessible|autoload|binding|block_given?|callcc|caller|catch|chomp|chomp!|chop|chop!|defined?|delete_via_redirect|eval|exec|exit|exit!|fail|Float|flunk|follow_redirect!|fork|form_for|form_tag|format|gets|global_variables|gsub|gsub!|get_via_redirect|host!|https?|https!|include|Integer|lambda|link_to|link_to_unless_current|link_to_function|link_to_remote|load|local_variables|loop|open|open_session|p|print|printf|proc|putc|puts|post_via_redirect|put_via_redirect|raise|rand|raw|readline|readlines|redirect?|request_via_redirect|require|scan|select|set_trace_func|sleep|split|sprintf|srand|String|stylesheet_link_tag|syscall|system|sub|sub!|test|throw|trace_var|trap|untrace_var|atan2|cos|exp|frexp|ldexp|log|log10|sin|sqrt|tan|render|javascript_include_tag|csrf_meta_tag|label_tag|text_field_tag|submit_tag|check_box_tag|content_tag|radio_button_tag|text_area_tag|password_field_tag|hidden_field_tag|fields_for|select_tag|options_for_select|options_from_collection_for_select|collection_select|time_zone_select|select_date|select_time|select_datetime|date_select|time_select|datetime_select|select_year|select_month|select_day|select_hour|select_minute|select_second|file_field_tag|file_field|respond_to|skip_before_filter|around_filter|after_filter|verify|protect_from_forgery|rescue_from|helper_method|redirect_to|before_filter|send_data|send_file|validates_presence_of|validates_uniqueness_of|validates_length_of|validates_format_of|validates_acceptance_of|validates_associated|validates_exclusion_of|validates_inclusion_of|validates_numericality_of|validates_with|validates_each|authenticate_or_request_with_http_basic|authenticate_or_request_with_http_digest|filter_parameter_logging|match|get|post|resources|redirect|scope|assert_routing|translate|localize|extract_locale_from_tld|caches_page|expire_page|caches_action|expire_action|cache|expire_fragment|expire_cache_for|observe|cache_sweeper|has_many|has_one|belongs_to|has_and_belongs_to_many",t="alias|and|BEGIN|begin|break|case|class|def|defined|do|else|elsif|END|end|ensure|__FILE__|finally|for|gem|if|in|__LINE__|module|next|not|or|private|protected|public|redo|rescue|retry|return|super|then|undef|unless|until|when|while|yield",n="true|TRUE|false|FALSE|nil|NIL|ARGF|ARGV|DATA|ENV|RUBY_PLATFORM|RUBY_RELEASE_DATE|RUBY_VERSION|STDERR|STDIN|STDOUT|TOPLEVEL_BINDING",r="$DEBUG|$defout|$FILENAME|$LOAD_PATH|$SAFE|$stdin|$stdout|$stderr|$VERBOSE|$!|root_url|flash|session|cookies|params|request|response|logger|self",i=this.$keywords=this.createKeywordMapper({keyword:t,"constant.language":n,"variable.language":r,"support.function":e,"invalid.deprecated":"debugger"},"identifier");this.$rules={start:[{token:"comment",regex:"#.*$"},{token:"comment",regex:"^=begin(?:$|\\s.*$)",next:"comment"},{token:"string.regexp",regex:"[/](?:(?:\\[(?:\\\\]|[^\\]])+\\])|(?:\\\\/|[^\\]/]))*[/]\\w*\\s*(?=[).,;]|$)"},[{regex:"[{}]",onMatch:function(e,t,n){this.next=e=="{"?this.nextState:"";if(e=="{"&&n.length)return n.unshift("start",t),"paren.lparen";if(e=="}"&&n.length){n.shift(),this.next=n.shift();if(this.next.indexOf("string")!=-1)return"paren.end"}return e=="{"?"paren.lparen":"paren.rparen"},nextState:"start"},{token:"string.start",regex:/"/,push:[{token:"constant.language.escape",regex:/\\(?:[nsrtvfbae'"\\]|c.|C-.|M-.(?:\\C-.)?|[0-7]{3}|x[\da-fA-F]{2}|u[\da-fA-F]{4})/},{token:"paren.start",regex:/#{/,push:"start"},{token:"string.end",regex:/"/,next:"pop"},{defaultToken:"string"}]},{token:"string.start",regex:/`/,push:[{token:"constant.language.escape",regex:/\\(?:[nsrtvfbae'"\\]|c.|C-.|M-.(?:\\C-.)?|[0-7]{3}|x[\da-fA-F]{2}|u[\da-fA-F]{4})/},{token:"paren.start",regex:/#{/,push:"start"},{token:"string.end",regex:/`/,next:"pop"},{defaultToken:"string"}]},{token:"string.start",regex:/'/,push:[{token:"constant.language.escape",regex:/\\['\\]/},{token:"string.end",regex:/'/,next:"pop"},{defaultToken:"string"}]}],{token:"text",regex:"::"},{token:"variable.instance",regex:"@{1,2}[a-zA-Z_\\d]+"},{token:"support.class",regex:"[A-Z][a-zA-Z_\\d]+"},s,f,l,{token:"constant.language.boolean",regex:"(?:true|false)\\b"},{token:i,regex:"[a-zA-Z_$][a-zA-Z0-9_$]*\\b"},{token:"punctuation.separator.key-value",regex:"=>"},{stateName:"heredoc",onMatch:function(e,t,n){var r=e[2]=="-"?"indentedHeredoc":"heredoc",i=e.split(this.splitRegex);return n.push(r,i[3]),[{type:"constant",value:i[1]},{type:"string",value:i[2]},{type:"support.class",value:i[3]},{type:"string",value:i[4]}]},regex:"(<<-?)(['\"`]?)([\\w]+)(['\"`]?)",rules:{heredoc:[{onMatch:function(e,t,n){return e===n[1]?(n.shift(),n.shift(),this.next=n[0]||"start","support.class"):(this.next="","string")},regex:".*$",next:"start"}],indentedHeredoc:[{token:"string",regex:"^ +"},{onMatch:function(e,t,n){return e===n[1]?(n.shift(),n.shift(),this.next=n[0]||"start","support.class"):(this.next="","string")},regex:".*$",next:"start"}]}},{regex:"$",token:"empty",next:function(e,t){return t[0]==="heredoc"||t[0]==="indentedHeredoc"?t[0]:e}},{token:"string.character",regex:"\\B\\?."},{token:"keyword.operator",regex:"!|\\$|%|&|\\*|\\-\\-|\\-|\\+\\+|\\+|~|===|==|=|!=|!==|<=|>=|<<=|>>=|>>>=|<>|<|>|!|&&|\\|\\||\\?\\:|\\*=|%=|\\+=|\\-=|&=|\\^=|\\b(?:in|instanceof|new|delete|typeof|void)"},{token:"paren.lparen",regex:"[[({]"},{token:"paren.rparen",regex:"[\\])}]"},{token:"text",regex:"\\s+"}],comment:[{token:"comment",regex:"^=end(?:$|\\s.*$)",next:"start"},{token:"comment",regex:".+"}]},this.normalizeRules()};r.inherits(c,i),t.RubyHighlightRules=c}),define("ace/mode/html_ruby_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/html_highlight_rules","ace/mode/ruby_highlight_rules"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("./html_highlight_rules").HtmlHighlightRules,s=e("./ruby_highlight_rules").RubyHighlightRules,o=function(){i.call(this);var e=[{regex:"<%%|%%>",token:"constant.language.escape"},{token:"comment.start.erb",regex:"<%#",push:[{token:"comment.end.erb",regex:"%>",next:"pop",defaultToken:"comment"}]},{token:"support.ruby_tag",regex:"<%+(?!>)[-=]?",push:"ruby-start"}],t=[{token:"support.ruby_tag",regex:"%>",next:"pop"},{token:"comment",regex:"#(?:[^%]|%[^>])*"}];for(var n in this.$rules)this.$rules[n].unshift.apply(this.$rules[n],e);this.embedRules(s,"ruby-",t,["start"]),this.normalizeRules()};r.inherits(o,i),t.HtmlRubyHighlightRules=o}),define("ace/mode/matching_brace_outdent",["require","exports","module","ace/range"],function(e,t,n){"use strict";var r=e("../range").Range,i=function(){};(function(){this.checkOutdent=function(e,t){return/^\s+$/.test(e)?/^\s*\}/.test(t):!1},this.autoOutdent=function(e,t){var n=e.getLine(t),i=n.match(/^(\s*\})/);if(!i)return 0;var s=i[1].length,o=e.findMatchingBracket({row:t,column:s});if(!o||o.row==t)return 0;var u=this.$getIndent(e.getLine(o.row));e.replace(new r(t,0,t,s-1),u)},this.$getIndent=function(e){return e.match(/^\s*/)[0]}}).call(i.prototype),t.MatchingBraceOutdent=i}),define("ace/mode/folding/cstyle",["require","exports","module","ace/lib/oop","ace/range","ace/mode/folding/fold_mode"],function(e,t,n){"use strict";var r=e("../../lib/oop"),i=e("../../range").Range,s=e("./fold_mode").FoldMode,o=t.FoldMode=function(e){e&&(this.foldingStartMarker=new RegExp(this.foldingStartMarker.source.replace(/\|[^|]*?$/,"|"+e.start)),this.foldingStopMarker=new RegExp(this.foldingStopMarker.source.replace(/\|[^|]*?$/,"|"+e.end)))};r.inherits(o,s),function(){this.foldingStartMarker=/(\{|\[)[^\}\]]*$|^\s*(\/\*)/,this.foldingStopMarker=/^[^\[\{]*(\}|\])|^[\s\*]*(\*\/)/,this.singleLineBlockCommentRe=/^\s*(\/\*).*\*\/\s*$/,this.tripleStarBlockCommentRe=/^\s*(\/\*\*\*).*\*\/\s*$/,this.startRegionRe=/^\s*(\/\*|\/\/)#?region\b/,this._getFoldWidgetBase=this.getFoldWidget,this.getFoldWidget=function(e,t,n){var r=e.getLine(n);if(this.singleLineBlockCommentRe.test(r)&&!this.startRegionRe.test(r)&&!this.tripleStarBlockCommentRe.test(r))return"";var i=this._getFoldWidgetBase(e,t,n);return!i&&this.startRegionRe.test(r)?"start":i},this.getFoldWidgetRange=function(e,t,n,r){var i=e.getLine(n);if(this.startRegionRe.test(i))return this.getCommentRegionBlock(e,i,n);var s=i.match(this.foldingStartMarker);if(s){var o=s.index;if(s[1])return this.openingBracketBlock(e,s[1],n,o);var u=e.getCommentFoldRange(n,o+s[0].length,1);return u&&!u.isMultiLine()&&(r?u=this.getSectionRange(e,n):t!="all"&&(u=null)),u}if(t==="markbegin")return;var s=i.match(this.foldingStopMarker);if(s){var o=s.index+s[0].length;return s[1]?this.closingBracketBlock(e,s[1],n,o):e.getCommentFoldRange(n,o,-1)}},this.getSectionRange=function(e,t){var n=e.getLine(t),r=n.search(/\S/),s=t,o=n.length;t+=1;var u=t,a=e.getLength();while(++t<a){n=e.getLine(t);var f=n.search(/\S/);if(f===-1)continue;if(r>f)break;var l=this.getFoldWidgetRange(e,"all",t);if(l){if(l.start.row<=s)break;if(l.isMultiLine())t=l.end.row;else if(r==f)break}u=t}return new i(s,o,u,e.getLine(u).length)},this.getCommentRegionBlock=function(e,t,n){var r=t.search(/\s*$/),s=e.getLength(),o=n,u=/^\s*(?:\/\*|\/\/|--)#?(end)?region\b/,a=1;while(++n<s){t=e.getLine(n);var f=u.exec(t);if(!f)continue;f[1]?a--:a++;if(!a)break}var l=n;if(l>o)return new i(o,r,l,t.length)}}.call(o.prototype)}),define("ace/mode/javascript",["require","exports","module","ace/lib/oop","ace/mode/text","ace/mode/javascript_highlight_rules","ace/mode/matching_brace_outdent","ace/range","ace/worker/worker_client","ace/mode/behaviour/cstyle","ace/mode/folding/cstyle"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("./text").Mode,s=e("./javascript_highlight_rules").JavaScriptHighlightRules,o=e("./matching_brace_outdent").MatchingBraceOutdent,u=e("../range").Range,a=e("../worker/worker_client").WorkerClient,f=e("./behaviour/cstyle").CstyleBehaviour,l=e("./folding/cstyle").FoldMode,c=function(){this.HighlightRules=s,this.$outdent=new o,this.$behaviour=new f,this.foldingRules=new l};r.inherits(c,i),function(){this.lineCommentStart="//",this.blockComment={start:"/*",end:"*/"},this.getNextLineIndent=function(e,t,n){var r=this.$getIndent(t),i=this.getTokenizer().getLineTokens(t,e),s=i.tokens,o=i.state;if(s.length&&s[s.length-1].type=="comment")return r;if(e=="start"||e=="no_regex"){var u=t.match(/^.*(?:\bcase\b.*:|[\{\(\[])\s*$/);u&&(r+=n)}else if(e=="doc-start"){if(o=="start"||o=="no_regex")return"";var u=t.match(/^\s*(\/?)\*/);u&&(u[1]&&(r+=" "),r+="* ")}return r},this.checkOutdent=function(e,t,n){return this.$outdent.checkOutdent(t,n)},this.autoOutdent=function(e,t,n){this.$outdent.autoOutdent(t,n)},this.createWorker=function(e){var t=new a(["ace"],"ace/mode/javascript_worker","JavaScriptWorker");return t.attachToDocument(e.getDocument()),t.on("annotate",function(t){e.setAnnotations(t.data)}),t.on("terminate",function(){e.clearAnnotations()}),t},this.$id="ace/mode/javascript"}.call(c.prototype),t.Mode=c}),define("ace/mode/css_completions",["require","exports","module"],function(e,t,n){"use strict";var r={background:{"#$0":1},"background-color":{"#$0":1,transparent:1,fixed:1},"background-image":{"url('/$0')":1},"background-repeat":{repeat:1,"repeat-x":1,"repeat-y":1,"no-repeat":1,inherit:1},"background-position":{bottom:2,center:2,left:2,right:2,top:2,inherit:2},"background-attachment":{scroll:1,fixed:1},"background-size":{cover:1,contain:1},"background-clip":{"border-box":1,"padding-box":1,"content-box":1},"background-origin":{"border-box":1,"padding-box":1,"content-box":1},border:{"solid $0":1,"dashed $0":1,"dotted $0":1,"#$0":1},"border-color":{"#$0":1},"border-style":{solid:2,dashed:2,dotted:2,"double":2,groove:2,hidden:2,inherit:2,inset:2,none:2,outset:2,ridged:2},"border-collapse":{collapse:1,separate:1},bottom:{px:1,em:1,"%":1},clear:{left:1,right:1,both:1,none:1},color:{"#$0":1,"rgb(#$00,0,0)":1},cursor:{"default":1,pointer:1,move:1,text:1,wait:1,help:1,progress:1,"n-resize":1,"ne-resize":1,"e-resize":1,"se-resize":1,"s-resize":1,"sw-resize":1,"w-resize":1,"nw-resize":1},display:{none:1,block:1,inline:1,"inline-block":1,"table-cell":1},"empty-cells":{show:1,hide:1},"float":{left:1,right:1,none:1},"font-family":{Arial:2,"Comic Sans MS":2,Consolas:2,"Courier New":2,Courier:2,Georgia:2,Monospace:2,"Sans-Serif":2,"Segoe UI":2,Tahoma:2,"Times New Roman":2,"Trebuchet MS":2,Verdana:1},"font-size":{px:1,em:1,"%":1},"font-weight":{bold:1,normal:1},"font-style":{italic:1,normal:1},"font-variant":{normal:1,"small-caps":1},height:{px:1,em:1,"%":1},left:{px:1,em:1,"%":1},"letter-spacing":{normal:1},"line-height":{normal:1},"list-style-type":{none:1,disc:1,circle:1,square:1,decimal:1,"decimal-leading-zero":1,"lower-roman":1,"upper-roman":1,"lower-greek":1,"lower-latin":1,"upper-latin":1,georgian:1,"lower-alpha":1,"upper-alpha":1},margin:{px:1,em:1,"%":1},"margin-right":{px:1,em:1,"%":1},"margin-left":{px:1,em:1,"%":1},"margin-top":{px:1,em:1,"%":1},"margin-bottom":{px:1,em:1,"%":1},"max-height":{px:1,em:1,"%":1},"max-width":{px:1,em:1,"%":1},"min-height":{px:1,em:1,"%":1},"min-width":{px:1,em:1,"%":1},overflow:{hidden:1,visible:1,auto:1,scroll:1},"overflow-x":{hidden:1,visible:1,auto:1,scroll:1},"overflow-y":{hidden:1,visible:1,auto:1,scroll:1},padding:{px:1,em:1,"%":1},"padding-top":{px:1,em:1,"%":1},"padding-right":{px:1,em:1,"%":1},"padding-bottom":{px:1,em:1,"%":1},"padding-left":{px:1,em:1,"%":1},"page-break-after":{auto:1,always:1,avoid:1,left:1,right:1},"page-break-before":{auto:1,always:1,avoid:1,left:1,right:1},position:{absolute:1,relative:1,fixed:1,"static":1},right:{px:1,em:1,"%":1},"table-layout":{fixed:1,auto:1},"text-decoration":{none:1,underline:1,"line-through":1,blink:1},"text-align":{left:1,right:1,center:1,justify:1},"text-transform":{capitalize:1,uppercase:1,lowercase:1,none:1},top:{px:1,em:1,"%":1},"vertical-align":{top:1,bottom:1},visibility:{hidden:1,visible:1},"white-space":{nowrap:1,normal:1,pre:1,"pre-line":1,"pre-wrap":1},width:{px:1,em:1,"%":1},"word-spacing":{normal:1},filter:{"alpha(opacity=$0100)":1},"text-shadow":{"$02px 2px 2px #777":1},"text-overflow":{"ellipsis-word":1,clip:1,ellipsis:1},"-moz-border-radius":1,"-moz-border-radius-topright":1,"-moz-border-radius-bottomright":1,"-moz-border-radius-topleft":1,"-moz-border-radius-bottomleft":1,"-webkit-border-radius":1,"-webkit-border-top-right-radius":1,"-webkit-border-top-left-radius":1,"-webkit-border-bottom-right-radius":1,"-webkit-border-bottom-left-radius":1,"-moz-box-shadow":1,"-webkit-box-shadow":1,transform:{"rotate($00deg)":1,"skew($00deg)":1},"-moz-transform":{"rotate($00deg)":1,"skew($00deg)":1},"-webkit-transform":{"rotate($00deg)":1,"skew($00deg)":1}},i=function(){};(function(){this.completionsDefined=!1,this.defineCompletions=function(){if(document){var e=document.createElement("c").style;for(var t in e){if(typeof e[t]!="string")continue;var n=t.replace(/[A-Z]/g,function(e){return"-"+e.toLowerCase()});r.hasOwnProperty(n)||(r[n]=1)}}this.completionsDefined=!0},this.getCompletions=function(e,t,n,r){this.completionsDefined||this.defineCompletions();var i=t.getTokenAt(n.row,n.column);if(!i)return[];if(e==="ruleset"){var s=t.getLine(n.row).substr(0,n.column);return/:[^;]+$/.test(s)?(/([\w\-]+):[^:]*$/.test(s),this.getPropertyValueCompletions(e,t,n,r)):this.getPropertyCompletions(e,t,n,r)}return[]},this.getPropertyCompletions=function(e,t,n,i){var s=Object.keys(r);return s.map(function(e){return{caption:e,snippet:e+": $0",meta:"property",score:Number.MAX_VALUE}})},this.getPropertyValueCompletions=function(e,t,n,i){var s=t.getLine(n.row).substr(0,n.column),o=(/([\w\-]+):[^:]*$/.exec(s)||{})[1];if(!o)return[];var u=[];return o in r&&typeof r[o]=="object"&&(u=Object.keys(r[o])),u.map(function(e){return{caption:e,snippet:e,meta:"property value",score:Number.MAX_VALUE}})}}).call(i.prototype),t.CssCompletions=i}),define("ace/mode/behaviour/css",["require","exports","module","ace/lib/oop","ace/mode/behaviour","ace/mode/behaviour/cstyle","ace/token_iterator"],function(e,t,n){"use strict";var r=e("../../lib/oop"),i=e("../behaviour").Behaviour,s=e("./cstyle").CstyleBehaviour,o=e("../../token_iterator").TokenIterator,u=function(){this.inherit(s),this.add("colon","insertion",function(e,t,n,r,i){if(i===":"){var s=n.getCursorPosition(),u=new o(r,s.row,s.column),a=u.getCurrentToken();a&&a.value.match(/\s+/)&&(a=u.stepBackward());if(a&&a.type==="support.type"){var f=r.doc.getLine(s.row),l=f.substring(s.column,s.column+1);if(l===":")return{text:"",selection:[1,1]};if(!f.substring(s.column).match(/^\s*;/))return{text:":;",selection:[1,1]}}}}),this.add("colon","deletion",function(e,t,n,r,i){var s=r.doc.getTextRange(i);if(!i.isMultiLine()&&s===":"){var u=n.getCursorPosition(),a=new o(r,u.row,u.column),f=a.getCurrentToken();f&&f.value.match(/\s+/)&&(f=a.stepBackward());if(f&&f.type==="support.type"){var l=r.doc.getLine(i.start.row),c=l.substring(i.end.column,i.end.column+1);if(c===";")return i.end.column++,i}}}),this.add("semicolon","insertion",function(e,t,n,r,i){if(i===";"){var s=n.getCursorPosition(),o=r.doc.getLine(s.row),u=o.substring(s.column,s.column+1);if(u===";")return{text:"",selection:[1,1]}}})};r.inherits(u,s),t.CssBehaviour=u}),define("ace/mode/css",["require","exports","module","ace/lib/oop","ace/mode/text","ace/mode/css_highlight_rules","ace/mode/matching_brace_outdent","ace/worker/worker_client","ace/mode/css_completions","ace/mode/behaviour/css","ace/mode/folding/cstyle"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("./text").Mode,s=e("./css_highlight_rules").CssHighlightRules,o=e("./matching_brace_outdent").MatchingBraceOutdent,u=e("../worker/worker_client").WorkerClient,a=e("./css_completions").CssCompletions,f=e("./behaviour/css").CssBehaviour,l=e("./folding/cstyle").FoldMode,c=function(){this.HighlightRules=s,this.$outdent=new o,this.$behaviour=new f,this.$completer=new a,this.foldingRules=new l};r.inherits(c,i),function(){this.foldingRules="cStyle",this.blockComment={start:"/*",end:"*/"},this.getNextLineIndent=function(e,t,n){var r=this.$getIndent(t),i=this.getTokenizer().getLineTokens(t,e).tokens;if(i.length&&i[i.length-1].type=="comment")return r;var s=t.match(/^.*\{\s*$/);return s&&(r+=n),r},this.checkOutdent=function(e,t,n){return this.$outdent.checkOutdent(t,n)},this.autoOutdent=function(e,t,n){this.$outdent.autoOutdent(t,n)},this.getCompletions=function(e,t,n,r){return this.$completer.getCompletions(e,t,n,r)},this.createWorker=function(e){var t=new u(["ace"],"ace/mode/css_worker","Worker");return t.attachToDocument(e.getDocument()),t.on("annotate",function(t){e.setAnnotations(t.data)}),t.on("terminate",function(){e.clearAnnotations()}),t},this.$id="ace/mode/css"}.call(c.prototype),t.Mode=c}),define("ace/mode/behaviour/xml",["require","exports","module","ace/lib/oop","ace/mode/behaviour","ace/token_iterator","ace/lib/lang"],function(e,t,n){"use strict";function u(e,t){return e.type.lastIndexOf(t+".xml")>-1}var r=e("../../lib/oop"),i=e("../behaviour").Behaviour,s=e("../../token_iterator").TokenIterator,o=e("../../lib/lang"),a=function(){this.add("string_dquotes","insertion",function(e,t,n,r,i){if(i=='"'||i=="'"){var o=i,a=r.doc.getTextRange(n.getSelectionRange());if(a!==""&&a!=="'"&&a!='"'&&n.getWrapBehavioursEnabled())return{text:o+a+o,selection:!1};var f=n.getCursorPosition(),l=r.doc.getLine(f.row),c=l.substring(f.column,f.column+1),h=new s(r,f.row,f.column),p=h.getCurrentToken();if(c==o&&(u(p,"attribute-value")||u(p,"string")))return{text:"",selection:[1,1]};p||(p=h.stepBackward());if(!p)return;while(u(p,"tag-whitespace")||u(p,"whitespace"))p=h.stepBackward();var d=!c||c.match(/\s/);if(u(p,"attribute-equals")&&(d||c==">")||u(p,"decl-attribute-equals")&&(d||c=="?"))return{text:o+o,selection:[1,1]}}}),this.add("string_dquotes","deletion",function(e,t,n,r,i){var s=r.doc.getTextRange(i);if(!i.isMultiLine()&&(s=='"'||s=="'")){var o=r.doc.getLine(i.start.row),u=o.substring(i.start.column+1,i.start.column+2);if(u==s)return i.end.column++,i}}),this.add("autoclosing","insertion",function(e,t,n,r,i){if(i==">"){var o=n.getSelectionRange().start,a=new s(r,o.row,o.column),f=a.getCurrentToken()||a.stepBackward();if(!f||!(u(f,"tag-name")||u(f,"tag-whitespace")||u(f,"attribute-name")||u(f,"attribute-equals")||u(f,"attribute-value")))return;if(u(f,"reference.attribute-value"))return;if(u(f,"attribute-value")){var l=f.value.charAt(0);if(l=='"'||l=="'"){var c=f.value.charAt(f.value.length-1),h=a.getCurrentTokenColumn()+f.value.length;if(h>o.column||h==o.column&&l!=c)return}}while(!u(f,"tag-name")){f=a.stepBackward();if(f.value=="<"){f=a.stepForward();break}}var p=a.getCurrentTokenRow(),d=a.getCurrentTokenColumn();if(u(a.stepBackward(),"end-tag-open"))return;var v=f.value;p==o.row&&(v=v.substring(0,o.column-d));if(this.voidElements.hasOwnProperty(v.toLowerCase()))return;return{text:"></"+v+">",selection:[1,1]}}}),this.add("autoindent","insertion",function(e,t,n,r,i){if(i=="\n"){var o=n.getCursorPosition(),u=r.getLine(o.row),a=new s(r,o.row,o.column),f=a.getCurrentToken();if(f&&f.type.indexOf("tag-close")!==-1){if(f.value=="/>")return;while(f&&f.type.indexOf("tag-name")===-1)f=a.stepBackward();if(!f)return;var l=f.value,c=a.getCurrentTokenRow();f=a.stepBackward();if(!f||f.type.indexOf("end-tag")!==-1)return;if(this.voidElements&&!this.voidElements[l]){var h=r.getTokenAt(o.row,o.column+1),u=r.getLine(c),p=this.$getIndent(u),d=p+r.getTabString();return h&&h.value==="</"?{text:"\n"+d+"\n"+p,selection:[1,d.length,1,d.length]}:{text:"\n"+d}}}}})};r.inherits(a,i),t.XmlBehaviour=a}),define("ace/mode/folding/mixed",["require","exports","module","ace/lib/oop","ace/mode/folding/fold_mode"],function(e,t,n){"use strict";var r=e("../../lib/oop"),i=e("./fold_mode").FoldMode,s=t.FoldMode=function(e,t){this.defaultMode=e,this.subModes=t};r.inherits(s,i),function(){this.$getMode=function(e){typeof e!="string"&&(e=e[0]);for(var t in this.subModes)if(e.indexOf(t)===0)return this.subModes[t];return null},this.$tryMode=function(e,t,n,r){var i=this.$getMode(e);return i?i.getFoldWidget(t,n,r):""},this.getFoldWidget=function(e,t,n){return this.$tryMode(e.getState(n-1),e,t,n)||this.$tryMode(e.getState(n),e,t,n)||this.defaultMode.getFoldWidget(e,t,n)},this.getFoldWidgetRange=function(e,t,n){var r=this.$getMode(e.getState(n-1));if(!r||!r.getFoldWidget(e,t,n))r=this.$getMode(e.getState(n));if(!r||!r.getFoldWidget(e,t,n))r=this.defaultMode;return r.getFoldWidgetRange(e,t,n)}}.call(s.prototype)}),define("ace/mode/folding/xml",["require","exports","module","ace/lib/oop","ace/lib/lang","ace/range","ace/mode/folding/fold_mode","ace/token_iterator"],function(e,t,n){"use strict";function l(e,t){return e.type.lastIndexOf(t+".xml")>-1}var r=e("../../lib/oop"),i=e("../../lib/lang"),s=e("../../range").Range,o=e("./fold_mode").FoldMode,u=e("../../token_iterator").TokenIterator,a=t.FoldMode=function(e,t){o.call(this),this.voidElements=e||{},this.optionalEndTags=r.mixin({},this.voidElements),t&&r.mixin(this.optionalEndTags,t)};r.inherits(a,o);var f=function(){this.tagName="",this.closing=!1,this.selfClosing=!1,this.start={row:0,column:0},this.end={row:0,column:0}};(function(){this.getFoldWidget=function(e,t,n){var r=this._getFirstTagInLine(e,n);return r?r.closing||!r.tagName&&r.selfClosing?t=="markbeginend"?"end":"":!r.tagName||r.selfClosing||this.voidElements.hasOwnProperty(r.tagName.toLowerCase())?"":this._findEndTagInLine(e,n,r.tagName,r.end.column)?"":"start":""},this._getFirstTagInLine=function(e,t){var n=e.getTokens(t),r=new f;for(var i=0;i<n.length;i++){var s=n[i];if(l(s,"tag-open")){r.end.column=r.start.column+s.value.length,r.closing=l(s,"end-tag-open"),s=n[++i];if(!s)return null;r.tagName=s.value,r.end.column+=s.value.length;for(i++;i<n.length;i++){s=n[i],r.end.column+=s.value.length;if(l(s,"tag-close")){r.selfClosing=s.value=="/>";break}}return r}if(l(s,"tag-close"))return r.selfClosing=s.value=="/>",r;r.start.column+=s.value.length}return null},this._findEndTagInLine=function(e,t,n,r){var i=e.getTokens(t),s=0;for(var o=0;o<i.length;o++){var u=i[o];s+=u.value.length;if(s<r)continue;if(l(u,"end-tag-open")){u=i[o+1];if(u&&u.value==n)return!0}}return!1},this._readTagForward=function(e){var t=e.getCurrentToken();if(!t)return null;var n=new f;do if(l(t,"tag-open"))n.closing=l(t,"end-tag-open"),n.start.row=e.getCurrentTokenRow(),n.start.column=e.getCurrentTokenColumn();else if(l(t,"tag-name"))n.tagName=t.value;else if(l(t,"tag-close"))return n.selfClosing=t.value=="/>",n.end.row=e.getCurrentTokenRow(),n.end.column=e.getCurrentTokenColumn()+t.value.length,e.stepForward(),n;while(t=e.stepForward());return null},this._readTagBackward=function(e){var t=e.getCurrentToken();if(!t)return null;var n=new f;do{if(l(t,"tag-open"))return n.closing=l(t,"end-tag-open"),n.start.row=e.getCurrentTokenRow(),n.start.column=e.getCurrentTokenColumn(),e.stepBackward(),n;l(t,"tag-name")?n.tagName=t.value:l(t,"tag-close")&&(n.selfClosing=t.value=="/>",n.end.row=e.getCurrentTokenRow(),n.end.column=e.getCurrentTokenColumn()+t.value.length)}while(t=e.stepBackward());return null},this._pop=function(e,t){while(e.length){var n=e[e.length-1];if(!t||n.tagName==t.tagName)return e.pop();if(this.optionalEndTags.hasOwnProperty(n.tagName)){e.pop();continue}return null}},this.getFoldWidgetRange=function(e,t,n){var r=this._getFirstTagInLine(e,n);if(!r)return null;var i=r.closing||r.selfClosing,o=[],a;if(!i){var f=new u(e,n,r.start.column),l={row:n,column:r.start.column+r.tagName.length+2};r.start.row==r.end.row&&(l.column=r.end.column);while(a=this._readTagForward(f)){if(a.selfClosing){if(!o.length)return a.start.column+=a.tagName.length+2,a.end.column-=2,s.fromPoints(a.start,a.end);continue}if(a.closing){this._pop(o,a);if(o.length==0)return s.fromPoints(l,a.start)}else o.push(a)}}else{var f=new u(e,n,r.end.column),c={row:n,column:r.start.column};while(a=this._readTagBackward(f)){if(a.selfClosing){if(!o.length)return a.start.column+=a.tagName.length+2,a.end.column-=2,s.fromPoints(a.start,a.end);continue}if(!a.closing){this._pop(o,a);if(o.length==0)return a.start.column+=a.tagName.length+2,a.start.row==a.end.row&&a.start.column<a.end.column&&(a.start.column=a.end.column),s.fromPoints(a.start,c)}else o.push(a)}}}}).call(a.prototype)}),define("ace/mode/folding/html",["require","exports","module","ace/lib/oop","ace/mode/folding/mixed","ace/mode/folding/xml","ace/mode/folding/cstyle"],function(e,t,n){"use strict";var r=e("../../lib/oop"),i=e("./mixed").FoldMode,s=e("./xml").FoldMode,o=e("./cstyle").FoldMode,u=t.FoldMode=function(e,t){i.call(this,new s(e,t),{"js-":new o,"css-":new o})};r.inherits(u,i)}),define("ace/mode/html_completions",["require","exports","module","ace/token_iterator"],function(e,t,n){"use strict";function f(e,t){return e.type.lastIndexOf(t+".xml")>-1}function
(e,t){var n=new r(e,t.row,t.column),i=n.getCurrentToken();while(i&&!f(i,"tag-name"))i=n.stepBackward();if(i)return i.value}function c(e,t){var n=new r(e,t.row,t.column),i=n.getCurrentToken();while(i&&!f(i,"attribute-name"))i=n.stepBackward();if(i)return i.value}var r=e("../token_iterator").TokenIterator,i=["accesskey","class","contenteditable","contextmenu","dir","draggable","dropzone","hidden","id","inert","itemid","itemprop","itemref","itemscope","itemtype","lang","spellcheck","style","tabindex","title","translate"],s=["onabort","onblur","oncancel","oncanplay","oncanplaythrough","onchange","onclick","onclose","oncontextmenu","oncuechange","ondblclick","ondrag","ondragend","ondragenter","ondragleave","ondragover","ondragstart","ondrop","ondurationchange","onemptied","onended","onerror","onfocus","oninput","oninvalid","onkeydown","onkeypress","onkeyup","onload","onloadeddata","onloadedmetadata","onloadstart","onmousedown","onmousemove","onmouseout","onmouseover","onmouseup","onmousewheel","onpause","onplay","onplaying","onprogress","onratechange","onreset","onscroll","onseeked","onseeking","onselect","onshow","onstalled","onsubmit","onsuspend","ontimeupdate","onvolumechange","onwaiting"],o=i.concat(s),u={html:{manifest:1},head:{},title:{},base:{href:1,target:1},link:{href:1,hreflang:1,rel:{stylesheet:1,icon:1},media:{all:1,screen:1,print:1},type:{"text/css":1,"image/png":1,"image/jpeg":1,"image/gif":1},sizes:1},meta:{"http-equiv":{"content-type":1},name:{description:1,keywords:1},content:{"text/html; charset=UTF-8":1},charset:1},style:{type:1,media:{all:1,screen:1,print:1},scoped:1},script:{charset:1,type:{"text/javascript":1},src:1,defer:1,async:1},noscript:{href:1},body:{onafterprint:1,onbeforeprint:1,onbeforeunload:1,onhashchange:1,onmessage:1,onoffline:1,onpopstate:1,onredo:1,onresize:1,onstorage:1,onundo:1,onunload:1},section:{},nav:{},article:{pubdate:1},aside:{},h1:{},h2:{},h3:{},h4:{},h5:{},h6:{},header:{},footer:{},address:{},main:{},p:{},hr:{},pre:{},blockquote:{cite:1},ol:{start:1,reversed:1},ul:{},li:{value:1},dl:{},dt:{},dd:{},figure:{},figcaption:{},div:{},a:{href:1,target:{_blank:1,top:1},ping:1,rel:{nofollow:1,alternate:1,author:1,bookmark:1,help:1,license:1,next:1,noreferrer:1,prefetch:1,prev:1,search:1,tag:1},media:1,hreflang:1,type:1},em:{},strong:{},small:{},s:{},cite:{},q:{cite:1},dfn:{},abbr:{},data:{},time:{datetime:1},code:{},"var":{},samp:{},kbd:{},sub:{},sup:{},i:{},b:{},u:{},mark:{},ruby:{},rt:{},rp:{},bdi:{},bdo:{},span:{},br:{},wbr:{},ins:{cite:1,datetime:1},del:{cite:1,datetime:1},img:{alt:1,src:1,height:1,width:1,usemap:1,ismap:1},iframe:{name:1,src:1,height:1,width:1,sandbox:{"allow-same-origin":1,"allow-top-navigation":1,"allow-forms":1,"allow-scripts":1},seamless:{seamless:1}},embed:{src:1,height:1,width:1,type:1},object:{param:1,data:1,type:1,height:1,width:1,usemap:1,name:1,form:1,classid:1},param:{name:1,value:1},video:{src:1,autobuffer:1,autoplay:{autoplay:1},loop:{loop:1},controls:{controls:1},width:1,height:1,poster:1,muted:{muted:1},preload:{auto:1,metadata:1,none:1}},audio:{src:1,autobuffer:1,autoplay:{autoplay:1},loop:{loop:1},controls:{controls:1},muted:{muted:1},preload:{auto:1,metadata:1,none:1}},source:{src:1,type:1,media:1},track:{kind:1,src:1,srclang:1,label:1,"default":1},canvas:{width:1,height:1},map:{name:1},area:{shape:1,coords:1,href:1,hreflang:1,alt:1,target:1,media:1,rel:1,ping:1,type:1},svg:{},math:{},table:{summary:1},caption:{},colgroup:{span:1},col:{span:1},tbody:{},thead:{},tfoot:{},tr:{},td:{headers:1,rowspan:1,colspan:1},th:{headers:1,rowspan:1,colspan:1,scope:1},form:{"accept-charset":1,action:1,autocomplete:1,enctype:{"multipart/form-data":1,"application/x-www-form-urlencoded":1},method:{get:1,post:1},name:1,novalidate:1,target:{_blank:1,top:1}},fieldset:{disabled:1,form:1,name:1},legend:{},label:{form:1,"for":1},input:{type:{text:1,password:1,hidden:1,checkbox:1,submit:1,radio:1,file:1,button:1,reset:1,image:31,color:1,date:1,datetime:1,"datetime-local":1,email:1,month:1,number:1,range:1,search:1,tel:1,time:1,url:1,week:1},accept:1,alt:1,autocomplete:{on:1,off:1},autofocus:{autofocus:1},checked:{checked:1},disabled:{disabled:1},form:1,formaction:1,formenctype:{"application/x-www-form-urlencoded":1,"multipart/form-data":1,"text/plain":1},formmethod:{get:1,post:1},formnovalidate:{formnovalidate:1},formtarget:{_blank:1,_self:1,_parent:1,_top:1},height:1,list:1,max:1,maxlength:1,min:1,multiple:{multiple:1},name:1,pattern:1,placeholder:1,readonly:{readonly:1},required:{required:1},size:1,src:1,step:1,width:1,files:1,value:1},button:{autofocus:1,disabled:{disabled:1},form:1,formaction:1,formenctype:1,formmethod:1,formnovalidate:1,formtarget:1,name:1,value:1,type:{button:1,submit:1}},select:{autofocus:1,disabled:1,form:1,multiple:{multiple:1},name:1,size:1,readonly:{readonly:1}},datalist:{},optgroup:{disabled:1,label:1},option:{disabled:1,selected:1,label:1,value:1},textarea:{autofocus:{autofocus:1},disabled:{disabled:1},form:1,maxlength:1,name:1,placeholder:1,readonly:{readonly:1},required:{required:1},rows:1,cols:1,wrap:{on:1,off:1,hard:1,soft:1}},keygen:{autofocus:1,challenge:{challenge:1},disabled:{disabled:1},form:1,keytype:{rsa:1,dsa:1,ec:1},name:1},output:{"for":1,form:1,name:1},progress:{value:1,max:1},meter:{value:1,min:1,max:1,low:1,high:1,optimum:1},details:{open:1},summary:{},command:{type:1,label:1,icon:1,disabled:1,checked:1,radiogroup:1,command:1},menu:{type:1,label:1},dialog:{open:1}},a=Object.keys(u),h=function(){};(function(){this.getCompletions=function(e,t,n,r){var i=t.getTokenAt(n.row,n.column);if(!i)return[];if(f(i,"tag-name")||f(i,"tag-open")||f(i,"end-tag-open"))return this.getTagCompletions(e,t,n,r);if(f(i,"tag-whitespace")||f(i,"attribute-name"))return this.getAttributeCompletions(e,t,n,r);if(f(i,"attribute-value"))return this.getAttributeValueCompletions(e,t,n,r);var s=t.getLine(n.row).substr(0,n.column);return/&[A-z]*$/i.test(s)?this.getHTMLEntityCompletions(e,t,n,r):[]},this.getTagCompletions=function(e,t,n,r){return a.map(function(e){return{value:e,meta:"tag",score:Number.MAX_VALUE}})},this.getAttributeCompletions=function(e,t,n,r){var i=l(t,n);if(!i)return[];var s=o;return i in u&&(s=s.concat(Object.keys(u[i]))),s.map(function(e){return{caption:e,snippet:e+'="$0"',meta:"attribute",score:Number.MAX_VALUE}})},this.getAttributeValueCompletions=function(e,t,n,r){var i=l(t,n),s=c(t,n);if(!i)return[];var o=[];return i in u&&s in u[i]&&typeof u[i][s]=="object"&&(o=Object.keys(u[i][s])),o.map(function(e){return{caption:e,snippet:e,meta:"attribute value",score:Number.MAX_VALUE}})},this.getHTMLEntityCompletions=function(e,t,n,r){var i=["Aacute;","aacute;","Acirc;","acirc;","acute;","AElig;","aelig;","Agrave;","agrave;","alefsym;","Alpha;","alpha;","amp;","and;","ang;","Aring;","aring;","asymp;","Atilde;","atilde;","Auml;","auml;","bdquo;","Beta;","beta;","brvbar;","bull;","cap;","Ccedil;","ccedil;","cedil;","cent;","Chi;","chi;","circ;","clubs;","cong;","copy;","crarr;","cup;","curren;","Dagger;","dagger;","dArr;","darr;","deg;","Delta;","delta;","diams;","divide;","Eacute;","eacute;","Ecirc;","ecirc;","Egrave;","egrave;","empty;","emsp;","ensp;","Epsilon;","epsilon;","equiv;","Eta;","eta;","ETH;","eth;","Euml;","euml;","euro;","exist;","fnof;","forall;","frac12;","frac14;","frac34;","frasl;","Gamma;","gamma;","ge;","gt;","hArr;","harr;","hearts;","hellip;","Iacute;","iacute;","Icirc;","icirc;","iexcl;","Igrave;","igrave;","image;","infin;","int;","Iota;","iota;","iquest;","isin;","Iuml;","iuml;","Kappa;","kappa;","Lambda;","lambda;","lang;","laquo;","lArr;","larr;","lceil;","ldquo;","le;","lfloor;","lowast;","loz;","lrm;","lsaquo;","lsquo;","lt;","macr;","mdash;","micro;","middot;","minus;","Mu;","mu;","nabla;","nbsp;","ndash;","ne;","ni;","not;","notin;","nsub;","Ntilde;","ntilde;","Nu;","nu;","Oacute;","oacute;","Ocirc;","ocirc;","OElig;","oelig;","Ograve;","ograve;","oline;","Omega;","omega;","Omicron;","omicron;","oplus;","or;","ordf;","ordm;","Oslash;","oslash;","Otilde;","otilde;","otimes;","Ouml;","ouml;","para;","part;","permil;","perp;","Phi;","phi;","Pi;","pi;","piv;","plusmn;","pound;","Prime;","prime;","prod;","prop;","Psi;","psi;","quot;","radic;","rang;","raquo;","rArr;","rarr;","rceil;","rdquo;","real;","reg;","rfloor;","Rho;","rho;","rlm;","rsaquo;","rsquo;","sbquo;","Scaron;","scaron;","sdot;","sect;","shy;","Sigma;","sigma;","sigmaf;","sim;","spades;","sub;","sube;","sum;","sup;","sup1;","sup2;","sup3;","supe;","szlig;","Tau;","tau;","there4;","Theta;","theta;","thetasym;","thinsp;","THORN;","thorn;","tilde;","times;","trade;","Uacute;","uacute;","uArr;","uarr;","Ucirc;","ucirc;","Ugrave;","ugrave;","uml;","upsih;","Upsilon;","upsilon;","Uuml;","uuml;","weierp;","Xi;","xi;","Yacute;","yacute;","yen;","Yuml;","yuml;","Zeta;","zeta;","zwj;","zwnj;"];return i.map(function(e){return{caption:e,snippet:e,meta:"html entity",score:Number.MAX_VALUE}})}}).call(h.prototype),t.HtmlCompletions=h}),define("ace/mode/html",["require","exports","module","ace/lib/oop","ace/lib/lang","ace/mode/text","ace/mode/javascript","ace/mode/css","ace/mode/html_highlight_rules","ace/mode/behaviour/xml","ace/mode/folding/html","ace/mode/html_completions","ace/worker/worker_client"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("../lib/lang"),s=e("./text").Mode,o=e("./javascript").Mode,u=e("./css").Mode,a=e("./html_highlight_rules").HtmlHighlightRules,f=e("./behaviour/xml").XmlBehaviour,l=e("./folding/html").FoldMode,c=e("./html_completions").HtmlCompletions,h=e("../worker/worker_client").WorkerClient,p=["area","base","br","col","embed","hr","img","input","keygen","link","meta","menuitem","param","source","track","wbr"],d=["li","dt","dd","p","rt","rp","optgroup","option","colgroup","td","th"],v=function(e){this.fragmentContext=e&&e.fragmentContext,this.HighlightRules=a,this.$behaviour=new f,this.$completer=new c,this.createModeDelegates({"js-":o,"css-":u}),this.foldingRules=new l(this.voidElements,i.arrayToMap(d))};r.inherits(v,s),function(){this.blockComment={start:"<!--",end:"-->"},this.voidElements=i.arrayToMap(p),this.getNextLineIndent=function(e,t,n){return this.$getIndent(t)},this.checkOutdent=function(e,t,n){return!1},this.getCompletions=function(e,t,n,r){return this.$completer.getCompletions(e,t,n,r)},this.createWorker=function(e){if(this.constructor!=v)return;var t=new h(["ace"],"ace/mode/html_worker","Worker");return t.attachToDocument(e.getDocument()),this.fragmentContext&&t.call("setOptions",[{context:this.fragmentContext}]),t.on("error",function(t){e.setAnnotations(t.data)}),t.on("terminate",function(){e.clearAnnotations()}),t},this.$id="ace/mode/html"}.call(v.prototype),t.Mode=v}),define("ace/mode/folding/coffee",["require","exports","module","ace/lib/oop","ace/mode/folding/fold_mode","ace/range"],function(e,t,n){"use strict";var r=e("../../lib/oop"),i=e("./fold_mode").FoldMode,s=e("../../range").Range,o=t.FoldMode=function(){};r.inherits(o,i),function(){this.getFoldWidgetRange=function(e,t,n){var r=this.indentationBlock(e,n);if(r)return r;var i=/\S/,o=e.getLine(n),u=o.search(i);if(u==-1||o[u]!="#")return;var a=o.length,f=e.getLength(),l=n,c=n;while(++n<f){o=e.getLine(n);var h=o.search(i);if(h==-1)continue;if(o[h]!="#")break;c=n}if(c>l){var p=e.getLine(c).length;return new s(l,a,c,p)}},this.getFoldWidget=function(e,t,n){var r=e.getLine(n),i=r.search(/\S/),s=e.getLine(n+1),o=e.getLine(n-1),u=o.search(/\S/),a=s.search(/\S/);if(i==-1)return e.foldWidgets[n-1]=u!=-1&&u<a?"start":"","";if(u==-1){if(i==a&&r[i]=="#"&&s[i]=="#")return e.foldWidgets[n-1]="",e.foldWidgets[n+1]="","start"}else if(u==i&&r[i]=="#"&&o[i]=="#"&&e.getLine(n-2).search(/\S/)==-1)return e.foldWidgets[n-1]="start",e.foldWidgets[n+1]="","";return u!=-1&&u<i?e.foldWidgets[n-1]="start":e.foldWidgets[n-1]="",i<a?"start":""}}.call(o.prototype)}),define("ace/mode/ruby",["require","exports","module","ace/lib/oop","ace/mode/text","ace/mode/ruby_highlight_rules","ace/mode/matching_brace_outdent","ace/range","ace/mode/behaviour/cstyle","ace/mode/folding/coffee"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("./text").Mode,s=e("./ruby_highlight_rules").RubyHighlightRules,o=e("./matching_brace_outdent").MatchingBraceOutdent,u=e("../range").Range,a=e("./behaviour/cstyle").CstyleBehaviour,f=e("./folding/coffee").FoldMode,l=function(){this.HighlightRules=s,this.$outdent=new o,this.$behaviour=new a,this.foldingRules=new f};r.inherits(l,i),function(){this.lineCommentStart="#",this.getNextLineIndent=function(e,t,n){var r=this.$getIndent(t),i=this.getTokenizer().getLineTokens(t,e),s=i.tokens;if(s.length&&s[s.length-1].type=="comment")return r;if(e=="start"){var o=t.match(/^.*[\{\(\[]\s*$/),u=t.match(/^\s*(class|def|module)\s.*$/),a=t.match(/.*do(\s*|\s+\|.*\|\s*)$/),f=t.match(/^\s*(if|else|when)\s*/);if(o||u||a||f)r+=n}return r},this.checkOutdent=function(e,t,n){return/^\s+(end|else)$/.test(t+n)||this.$outdent.checkOutdent(t,n)},this.autoOutdent=function(e,t,n){var r=t.getLine(n);if(/}/.test(r))return this.$outdent.autoOutdent(t,n);var i=this.$getIndent(r),s=t.getLine(n-1),o=this.$getIndent(s),a=t.getTabString();o.length<=i.length&&i.slice(-a.length)==a&&t.remove(new u(n,i.length-a.length,n,i.length))},this.$id="ace/mode/ruby"}.call(l.prototype),t.Mode=l}),define("ace/mode/html_ruby",["require","exports","module","ace/lib/oop","ace/mode/html_ruby_highlight_rules","ace/mode/html","ace/mode/javascript","ace/mode/css","ace/mode/ruby"],function(e,t,n){"use strict";var r=e("../lib/oop"),i=e("./html_ruby_highlight_rules").HtmlRubyHighlightRules,s=e("./html").Mode,o=e("./javascript").Mode,u=e("./css").Mode,a=e("./ruby").Mode,f=function(){s.call(this),this.HighlightRules=i,this.createModeDelegates({"js-":o,"css-":u,"ruby-":a})};r.inherits(f,s),function(){this.$id="ace/mode/html_ruby"}.call(f.prototype),t.Mode=f})
l
operations.ts
/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ import * as msRest from "@azure/ms-rest-js"; import * as Models from "../models"; import * as Mappers from "../models/operationsMappers"; import * as Parameters from "../models/parameters"; import { SignalRManagementClientContext } from "../signalRManagementClientContext"; /** Class representing a Operations. */ export class Operations { private readonly client: SignalRManagementClientContext; /** * Create a Operations. * @param {SignalRManagementClientContext} client Reference to the service client. */ constructor(client: SignalRManagementClientContext) { this.client = client; } /** * Lists all of the available REST API operations of the Microsoft.SignalRService provider. * @param [options] The optional parameters * @returns Promise<Models.OperationsListResponse> */ list(options?: msRest.RequestOptionsBase): Promise<Models.OperationsListResponse>; /** * @param callback The callback */ list(callback: msRest.ServiceCallback<Models.OperationList>): void; /** * @param options The optional parameters * @param callback The callback */ list(options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.OperationList>): void; list(options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.OperationList>, callback?: msRest.ServiceCallback<Models.OperationList>): Promise<Models.OperationsListResponse> { return this.client.sendOperationRequest( { options
} /** * Lists all of the available REST API operations of the Microsoft.SignalRService provider. * @param nextPageLink The NextLink from the previous successful call to List operation. * @param [options] The optional parameters * @returns Promise<Models.OperationsListNextResponse> */ listNext(nextPageLink: string, options?: msRest.RequestOptionsBase): Promise<Models.OperationsListNextResponse>; /** * @param nextPageLink The NextLink from the previous successful call to List operation. * @param callback The callback */ listNext(nextPageLink: string, callback: msRest.ServiceCallback<Models.OperationList>): void; /** * @param nextPageLink The NextLink from the previous successful call to List operation. * @param options The optional parameters * @param callback The callback */ listNext(nextPageLink: string, options: msRest.RequestOptionsBase, callback: msRest.ServiceCallback<Models.OperationList>): void; listNext(nextPageLink: string, options?: msRest.RequestOptionsBase | msRest.ServiceCallback<Models.OperationList>, callback?: msRest.ServiceCallback<Models.OperationList>): Promise<Models.OperationsListNextResponse> { return this.client.sendOperationRequest( { nextPageLink, options }, listNextOperationSpec, callback) as Promise<Models.OperationsListNextResponse>; } } // Operation Specifications const serializer = new msRest.Serializer(Mappers); const listOperationSpec: msRest.OperationSpec = { httpMethod: "GET", path: "providers/Microsoft.SignalRService/operations", queryParameters: [ Parameters.apiVersion ], headerParameters: [ Parameters.acceptLanguage ], responses: { 200: { bodyMapper: Mappers.OperationList }, default: { bodyMapper: Mappers.ErrorResponse } }, serializer }; const listNextOperationSpec: msRest.OperationSpec = { httpMethod: "GET", baseUrl: "https://management.azure.com", path: "{nextLink}", urlParameters: [ Parameters.nextPageLink ], headerParameters: [ Parameters.acceptLanguage ], responses: { 200: { bodyMapper: Mappers.OperationList }, default: { bodyMapper: Mappers.ErrorResponse } }, serializer };
}, listOperationSpec, callback) as Promise<Models.OperationsListResponse>;
tab.js
import React from "react"; import PropTypes from "prop-types"; import clsx from "clsx"; import styled from "styled-components"; import Tab from "@material-ui/core/Tab"; import { withTheme } from "dry/theme"; import setPropTypes from "dry/utils/setPropTypes"; function
(props) { const { className, ...restProps } = props; const clsxName = clsx(className, {}); return <Tab {...restProps} className={clsxName} />; } DryTab.defaultProps = { className: "" }; DryTab.propTypes = { className: PropTypes.string }; const StyledTab = styled(DryTab)``; const ThemedStyledTab = withTheme(StyledTab); setPropTypes("DryTab", Tab, ThemedStyledTab); export default ThemedStyledTab;
DryTab
progress.rs
use indicatif::{ProgressBar, ProgressStyle}; use std::{ io, io::{Read, Seek, SeekFrom}, }; /// Wrapper around a `Read` that reports the progress made. /// /// Used to monitor slow IO readers
/// Unfortunately cannot use this with http client yet as it does not implement seek pub struct ProgressReader<R: Read + Seek> { rdr: R, pb: ProgressBar, } impl<R: Read + Seek> ProgressReader<R> { pub fn new(mut rdr: R) -> io::Result<ProgressReader<R>> { let len = rdr.seek(SeekFrom::End(0))?; rdr.seek(SeekFrom::Start(0))?; let pb = ProgressBar::new(len); pb.set_style( ProgressStyle::default_bar().template("{bar:40.green/black} {bytes}/{total_bytes} ({eta})"), ); Ok(ProgressReader { rdr, pb }) } } impl<R: Read + Seek> Read for ProgressReader<R> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { let rv = self.rdr.read(buf)?; self.pb.inc(rv as u64); Ok(rv) } }
mssql.go
package mssql import ( "database/sql" "fmt" "regexp" "strconv" "strings" "github.com/k1LoW/tbls/ddl" "github.com/k1LoW/tbls/dict" "github.com/k1LoW/tbls/schema" "github.com/pkg/errors" ) var defaultSchemaName = "dbo" var typeFk = schema.TypeFK var typeCheck = "CHECK" var reSystemNamed = regexp.MustCompile(`_[^_]+$`) // Mssql struct type Mssql struct { db *sql.DB } type relationLink struct { table string columns []string parentTable string parentColumns []string } // New ... func New(db *sql.DB) *Mssql { return &Mssql{ db: db, } } func (m *Mssql) Analyze(s *schema.Schema) error { d, err := m.Info() if err != nil { return errors.WithStack(err) } s.Driver = d // tables and comments tableRows, err := m.db.Query(` SELECT schema_name(schema_id) AS table_schema, o.name, o.object_id, o.type, cast(e.value as NVARCHAR(MAX)) AS table_comment FROM sys.objects AS o LEFT JOIN sys.extended_properties AS e ON e.major_id = o.object_id AND e.name = 'MS_Description' AND e.minor_id = 0 WHERE type IN ('U', 'V') ORDER BY OBJECT_ID `) if err != nil { return errors.WithStack(err) } defer tableRows.Close() tables := []*schema.Table{} links := []relationLink{} for tableRows.Next() { var ( tableSchema string tableName string tableOid string tableType string tableComment sql.NullString ) err := tableRows.Scan(&tableSchema, &tableName, &tableOid, &tableType, &tableComment) if err != nil { return errors.WithStack(err) } tableType = convertTableType(tableType) name := tableName if tableSchema != defaultSchemaName { name = fmt.Sprintf("%s.%s", tableSchema, tableName) } table := &schema.Table{ Name: name, Type: tableType, Comment: tableComment.String, } // view definition if tableType == "VIEW" { viewDefRows, err := m.db.Query(` SELECT definition FROM sys.sql_modules WHERE object_id = @p1 `, tableOid) if err != nil { return errors.WithStack(err) } defer viewDefRows.Close() for viewDefRows.Next() { var tableDef sql.NullString err := viewDefRows.Scan(&tableDef) if err != nil { return errors.WithStack(err) } table.Def = tableDef.String } } // columns and comments columnRows, err := m.db.Query(` SELECT c.name, t.name AS type, c.max_length, c.is_nullable, c.is_identity, object_definition(c.default_object_id), CAST(e.value AS NVARCHAR(MAX)) AS column_comment FROM sys.columns AS c LEFT JOIN sys.types AS t ON c.system_type_id = t.system_type_id LEFT JOIN sys.extended_properties AS e ON e.major_id = c.object_id AND e.name = 'MS_Description' AND e.minor_id = c.column_id WHERE c.object_id = @p1 and t.name != 'sysname' ORDER BY c.column_id `, tableOid) if err != nil { return errors.WithStack(err) } defer columnRows.Close() columns := []*schema.Column{} for columnRows.Next() { var ( columnName string dataType string maxLength int isNullable bool isIdentity bool columnDefault sql.NullString columnComment sql.NullString ) err = columnRows.Scan(&columnName, &dataType, &maxLength, &isNullable, &isIdentity, &columnDefault, &columnComment) if err != nil { return errors.WithStack(err) } column := &schema.Column{ Name: columnName, Type: convertColumnType(dataType, maxLength), Nullable: isNullable, Default: columnDefault, Comment: columnComment.String, } columns = append(columns, column) } table.Columns = columns // constraints constraints := []*schema.Constraint{} /// key constraints keyRows, err := m.db.Query(` SELECT c.name, i.type_desc, i.is_unique, i.is_primary_key, i.is_unique_constraint, STRING_AGG(COL_NAME(ic.object_id, ic.column_id), ', ') WITHIN GROUP ( ORDER BY ic.key_ordinal ), c.is_system_named FROM sys.key_constraints AS c LEFT JOIN sys.indexes AS i ON i.object_id = c.parent_object_id AND i.index_id = c.unique_index_id INNER JOIN sys.index_columns AS ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id WHERE i.object_id = object_id(@p1) GROUP BY c.name, i.index_id, i.type_desc, i.is_unique, i.is_primary_key, i.is_unique_constraint, c.is_system_named ORDER BY i.index_id `, fmt.Sprintf("%s.%s", tableSchema, tableName)) if err != nil { return errors.WithStack(err) } defer keyRows.Close() for keyRows.Next() { var ( indexName string indexClusterType string indexIsUnique bool indexIsPrimaryKey bool indexIsUniqueConstraint bool indexColumnName sql.NullString indexIsSystemNamed bool ) err = keyRows.Scan(&indexName, &indexClusterType, &indexIsUnique, &indexIsPrimaryKey, &indexIsUniqueConstraint, &indexColumnName, &indexIsSystemNamed) if err != nil { return errors.WithStack(err) } indexType := "-" indexDef := []string{ indexClusterType, } if indexIsUnique { indexDef = append(indexDef, "unique") } if indexIsPrimaryKey { indexType = "PRIMARY KEY" indexDef = append(indexDef, "part of a PRIMARY KEY constraint") } if indexIsUniqueConstraint { indexType = "UNIQUE" indexDef = append(indexDef, "part of a UNIQUE constraint") } indexDef = append(indexDef, fmt.Sprintf("[ %s ]", indexColumnName.String)) constraint := &schema.Constraint{ Name: convertSystemNamed(indexName, indexIsSystemNamed), Type: indexType, Def: strings.Join(indexDef, ", "), Table: &table.Name, Columns: strings.Split(indexColumnName.String, ", "), } constraints = append(constraints, constraint) } /// foreign_keys fkRows, err := m.db.Query(` SELECT f.name AS f_name, object_name(f.parent_object_id) AS table_name, object_name(f.referenced_object_id) AS parent_table_name, STRING_AGG(COL_NAME(fc.parent_object_id, fc.parent_column_id), ', ') AS column_names, STRING_AGG(COL_NAME(fc.referenced_object_id, fc.referenced_column_id), ', ') AS parent_column_names, update_referential_action_desc, delete_referential_action_desc, f.is_system_named FROM sys.foreign_keys AS f LEFT JOIN sys.foreign_key_columns AS fc ON f.object_id = fc.constraint_object_id WHERE f.parent_object_id = object_id(@p1) GROUP BY f.name, f.parent_object_id, f.referenced_object_id, delete_referential_action_desc, update_referential_action_desc, f.is_system_named `, fmt.Sprintf("%s.%s", tableSchema, tableName)) if err != nil { return errors.WithStack(err) } defer fkRows.Close() for fkRows.Next() { var ( fkName string fkTableName string fkParentTableName string fkColumnNames string fkParentColumnNames string fkUpdateAction string fkDeleteAction string fkIsSystemNamed bool ) err = fkRows.Scan(&fkName, &fkTableName, &fkParentTableName, &fkColumnNames, &fkParentColumnNames, &fkUpdateAction, &fkDeleteAction, &fkIsSystemNamed) if err != nil { return errors.WithStack(err) } fkDef := fmt.Sprintf("FOREIGN KEY(%s) REFERENCES %s(%s) ON UPDATE %s ON DELETE %s", fkColumnNames, fkParentTableName, fkParentColumnNames, fkUpdateAction, fkDeleteAction) // #nosec constraint := &schema.Constraint{ Name: convertSystemNamed(fkName, fkIsSystemNamed), Type: typeFk, Def: fkDef, Table: &table.Name, Columns: strings.Split(fkColumnNames, ", "), ReferencedTable: &fkParentTableName, ReferencedColumns: strings.Split(fkParentColumnNames, ", "), } links = append(links, relationLink{ table: table.Name, columns: strings.Split(fkColumnNames, ", "), parentTable: fkParentTableName, parentColumns: strings.Split(fkParentColumnNames, ", "), }) constraints = append(constraints, constraint) } /// check_constraints checkRows, err := m.db.Query(` SELECT name, definition, is_system_named FROM sys.check_constraints WHERE parent_object_id = object_id(@p1) `, fmt.Sprintf("%s.%s", tableSchema, tableName)) if err != nil { return errors.WithStack(err) } defer checkRows.Close() for checkRows.Next() { var ( checkName string checkDef string checkIsSystemNamed bool ) err = checkRows.Scan(&checkName, &checkDef, &checkIsSystemNamed) if err != nil { return errors.WithStack(err) } constraint := &schema.Constraint{ Name: convertSystemNamed(checkName, checkIsSystemNamed), Type: typeCheck, Def: fmt.Sprintf("CHECK%s", checkDef), Table: &table.Name, } constraints = append(constraints, constraint) } table.Constraints = constraints // triggers triggerRows, err := m.db.Query(` SELECT name, definition FROM sys.triggers AS t INNER JOIN sys.sql_modules AS sm ON sm.object_id = t.object_id WHERE type = 'TR' AND parent_id = object_id(@p1) `, fmt.Sprintf("%s.%s", tableSchema, tableName)) if err != nil { return errors.WithStack(err) } defer triggerRows.Close() triggers := []*schema.Trigger{} for triggerRows.Next() { var ( triggerName string triggerDef string ) err = triggerRows.Scan(&triggerName, &triggerDef) if err != nil { return errors.WithStack(err) } trigger := &schema.Trigger{ Name: triggerName, Def: triggerDef, } triggers = append(triggers, trigger) } table.Triggers = triggers // indexes indexRows, err := m.db.Query(` SELECT i.name AS index_name, i.type_desc, i.is_unique, i.is_primary_key, i.is_unique_constraint, STRING_AGG(COL_NAME(ic.object_id, ic.column_id), ', ') WITHIN GROUP ( ORDER BY ic.key_ordinal ), c.is_system_named FROM sys.indexes AS i INNER JOIN sys.index_columns AS ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id LEFT JOIN sys.key_constraints AS c ON i.object_id = c.parent_object_id AND i.index_id = c.unique_index_id WHERE i.object_id = object_id(@p1) GROUP BY i.name, i.index_id, i.type_desc, i.is_unique, i.is_primary_key, i.is_unique_constraint, c.is_system_named ORDER BY i.index_id `, fmt.Sprintf("%s.%s", tableSchema, tableName)) if err != nil { return errors.WithStack(err) } defer indexRows.Close() indexes := []*schema.Index{} for indexRows.Next() { var ( indexName string indexType string indexIsUnique bool indexIsPrimaryKey bool indexIsUniqueConstraint bool indexColumnName sql.NullString indexIsSytemNamed sql.NullBool ) err = indexRows.Scan(&indexName, &indexType, &indexIsUnique, &indexIsPrimaryKey, &indexIsUniqueConstraint, &indexColumnName, &indexIsSytemNamed) if err != nil { return errors.WithStack(err) } indexDef := []string{ indexType, } if indexIsUnique { indexDef = append(indexDef, "unique") } if indexIsPrimaryKey { indexDef = append(indexDef, "part of a PRIMARY KEY constraint") } if indexIsUniqueConstraint { indexDef = append(indexDef, "part of a UNIQUE constraint") } indexDef = append(indexDef, fmt.Sprintf("[ %s ]", indexColumnName.String)) index := &schema.Index{ Name: convertSystemNamed(indexName, indexIsSytemNamed.Bool), Def: strings.Join(indexDef, ", "), Table: &table.Name, Columns: strings.Split(indexColumnName.String, ", "), } indexes = append(indexes, index) } table.Indexes = indexes tables = append(tables, table) } functions, err := m.getFunctions() if err != nil { return err } s.Functions = functions s.Tables = tables // relations relations := []*schema.Relation{} for _, l := range links { r := &schema.Relation{} table, err := s.FindTableByName(l.table) if err != nil { return err } r.Table = table for _, c := range l.columns { column, err := table.FindColumnByName(c) if err != nil { return err } r.Columns = append(r.Columns, column) column.ParentRelations = append(column.ParentRelations, r) } parentTable, err := s.FindTableByName(l.parentTable) if err != nil { return err } r.ParentTable = parentTable for _, c := range l.parentColumns { column, err := parentTable.FindColumnByName(c) if err != nil { return err } r.ParentColumns = append(r.ParentColumns, column) column.ChildRelations = append(column.ChildRelations, r) } relations = append(relations, r) } s.Relations = relations // referenced tables of view for _, t := range s.Tables { if t.Type != "VIEW" { continue } for _, rts := range ddl.ParseReferencedTables(t.Def) { rt, err := s.FindTableByName(rts) if err != nil { rt = &schema.Table{ Name: rts, External: true, } } t.ReferencedTables = append(t.ReferencedTables, rt) } } return nil } const query = `select schema_name(obj.schema_id) as schema_name, obj.name as name, case type when 'FN' then 'SQL scalar function' when 'TF' then 'SQL table-valued-function' when 'IF' then 'SQL inline table-valued function' when 'P' then 'SQL Stored Procedure' when 'X' then 'Extended stored procedure' end as type, TYPE_NAME(ret.user_type_id) as return_type, substring(par.parameters, 0, len(par.parameters)) as parameters from sys.objects obj join sys.sql_modules mod on mod.object_id = obj.object_id cross apply (select p.name + ' ' + TYPE_NAME(p.user_type_id) + ', ' from sys.parameters p where p.object_id = obj.object_id and p.parameter_id != 0 for xml path ('') ) par (parameters) left join sys.parameters ret on obj.object_id = ret.object_id and ret.parameter_id = 0 where obj.type in ('FN', 'TF', 'IF', 'P', 'X') order by schema_name, name;` func (m *Mssql) getFunctions() ([]*schema.Function, error) { functions := []*schema.Function{} functionsResult, err := m.db.Query(query) if err != nil { return nil, errors.WithStack(err) } defer functionsResult.Close() for functionsResult.Next() { var ( schemaName string name string typeValue string returnType sql.NullString arguments sql.NullString ) err := functionsResult.Scan(&schemaName, &name, &typeValue, &returnType, &arguments) if err != nil { return functions, errors.WithStack(err) } function := &schema.Function{ Name: fullTableName(schemaName, name), Type: typeValue, ReturnType: returnType.String, Arguments: arguments.String, } functions = append(functions, function) } return functions, nil } func
(owner string, tableName string) string { return fmt.Sprintf("%s.%s", owner, tableName) } func (m *Mssql) Info() (*schema.Driver, error) { var v string row := m.db.QueryRow(`SELECT @@VERSION`) err := row.Scan(&v) if err != nil { return nil, err } dct := dict.New() dct.Merge(map[string]string{ "Functions": "Stored procedures and functions", }) d := &schema.Driver{ Name: "sqlserver", DatabaseVersion: v, Meta: &schema.DriverMeta{ Dict: &dct, }, } return d, nil } func convertTableType(t string) string { switch strings.Trim(t, " ") { case "U": return "BASIC TABLE" case "V": return "VIEW" default: return t } } func convertColumnType(t string, maxLength int) string { switch t { case "varchar": var len string = strconv.Itoa(maxLength) if maxLength == -1 { len = "MAX" } return fmt.Sprintf("varchar(%s)", len) case "nvarchar": //nvarchar length is 2 byte, return character length var len string = strconv.Itoa(maxLength / 2) if maxLength == -1 { len = "MAX" } return fmt.Sprintf("nvarchar(%s)", len) case "varbinary": var len string = strconv.Itoa(maxLength) if maxLength == -1 { len = "MAX" } return fmt.Sprintf("varbinary(%s)", len) default: return t } } func convertSystemNamed(name string, isSytemNamed bool) string { if isSytemNamed { return reSystemNamed.ReplaceAllString(name, "*") } return name }
fullTableName
test_01_getting_started.py
from testbook import testbook from tests.conftest import REPO_ROOT @testbook(REPO_ROOT / "examples/01-Getting-started.ipynb", execute=False) def test_func(tb):
tb.inject( """ from unittest.mock import patch from merlin.datasets.synthetic import generate_data mock_train, mock_valid = generate_data( input="movielens-1m", num_rows=1000, set_sizes=(0.8, 0.2) ) p1 = patch( "merlin.datasets.entertainment.get_movielens", return_value=[mock_train, mock_valid] ) p1.start() """ ) tb.execute() metrics = tb.ref("metrics") assert sorted(list(metrics.keys())) == [ "loss", "rating_binary/binary_classification_task/auc", "rating_binary/binary_classification_task/binary_accuracy", "rating_binary/binary_classification_task/precision", "rating_binary/binary_classification_task/recall", "regularization_loss", "total_loss", ]
acceleration.rs
use specs::prelude::*; #[derive(Debug, Deserialize)] pub struct Acceleration { pub vector: Vector2<f32>, } impl Acceleration { pub fn new(x: f32, y: f32) -> Self { Self { vector: Vector2::new(x, y), } } } impl Component for Acceleration { type Storage = VecStorage<Self>; }
use nalgebra::Vector2; use serde::Deserialize;
memory.rs
// Copyright 2018 Parity Technologies (UK) Ltd. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. use bytes::{Bytes, IntoBuf}; use futures::{future::{self, FutureResult}, prelude::*, stream, sync::mpsc}; use multiaddr::{Protocol, Multiaddr}; use parking_lot::Mutex; use rw_stream_sink::RwStreamSink; use std::{io, sync::Arc}; use Transport; /// Builds a new pair of `Transport`s. The dialer can reach the listener by dialing `/memory`. #[inline] pub fn connector() -> (Dialer, Listener) { let (tx, rx) = mpsc::unbounded(); (Dialer(tx), Listener(Arc::new(Mutex::new(rx)))) } /// Same as `connector()`, but allows customizing the type used for transmitting packets between /// the two endpoints. #[inline] pub fn connector_custom_type<T>() -> (Dialer<T>, Listener<T>) { let (tx, rx) = mpsc::unbounded(); (Dialer(tx), Listener(Arc::new(Mutex::new(rx)))) } /// Dialing end of the memory transport. pub struct Dialer<T = Bytes>(mpsc::UnboundedSender<Chan<T>>); impl<T> Clone for Dialer<T> { fn clone(&self) -> Self { Dialer(self.0.clone()) } } impl<T: IntoBuf + Send + 'static> Transport for Dialer<T> { type Output = Channel<T>; type Listener = Box<Stream<Item=(Self::ListenerUpgrade, Multiaddr), Error=io::Error> + Send>; type ListenerUpgrade = FutureResult<Self::Output, io::Error>; type Dial = Box<Future<Item=Self::Output, Error=io::Error> + Send>; fn listen_on(self, addr: Multiaddr) -> Result<(Self::Listener, Multiaddr), (Self, Multiaddr)> { Err((self, addr)) } fn dial(self, addr: Multiaddr) -> Result<Self::Dial, (Self, Multiaddr)> { if !is_memory_addr(&addr) { return Err((self, addr)) } let (a_tx, a_rx) = mpsc::unbounded(); let (b_tx, b_rx) = mpsc::unbounded(); let a = Chan { incoming: a_rx, outgoing: b_tx }; let b = Chan { incoming: b_rx, outgoing: a_tx }; let future = self.0.send(b) .map(move |_| a.into()) .map_err(|_| io::ErrorKind::ConnectionRefused.into()); Ok(Box::new(future)) } fn nat_traversal(&self, server: &Multiaddr, observed: &Multiaddr) -> Option<Multiaddr> { if server == observed { Some(server.clone()) } else { None } } } /// Receiving end of the memory transport. pub struct Listener<T = Bytes>(Arc<Mutex<mpsc::UnboundedReceiver<Chan<T>>>>); impl<T> Clone for Listener<T> { fn clone(&self) -> Self { Listener(self.0.clone()) } } impl<T: IntoBuf + Send + 'static> Transport for Listener<T> { type Output = Channel<T>; type Listener = Box<Stream<Item=(Self::ListenerUpgrade, Multiaddr), Error=io::Error> + Send>; type ListenerUpgrade = FutureResult<Self::Output, io::Error>; type Dial = Box<Future<Item=Self::Output, Error=io::Error> + Send>; fn listen_on(self, addr: Multiaddr) -> Result<(Self::Listener, Multiaddr), (Self, Multiaddr)> { if !is_memory_addr(&addr) { return Err((self, addr)) } let addr2 = addr.clone(); let receiver = self.0.clone(); let stream = stream::poll_fn(move || receiver.lock().poll()) .map(move |channel| { (future::ok(channel.into()), addr.clone()) }) .map_err(|()| unreachable!()); Ok((Box::new(stream), addr2)) } #[inline] fn dial(self, addr: Multiaddr) -> Result<Self::Dial, (Self, Multiaddr)> { Err((self, addr)) } #[inline] fn nat_traversal(&self, server: &Multiaddr, observed: &Multiaddr) -> Option<Multiaddr> { if server == observed { Some(server.clone()) } else { None } } } /// Returns `true` if and only if the address is `/memory`. fn is_memory_addr(a: &Multiaddr) -> bool { let mut iter = a.iter(); if iter.next() != Some(Protocol::Memory) { return false; } if iter.next().is_some()
true } /// A channel represents an established, in-memory, logical connection between two endpoints. /// /// Implements `AsyncRead` and `AsyncWrite`. pub type Channel<T> = RwStreamSink<Chan<T>>; /// A channel represents an established, in-memory, logical connection between two endpoints. /// /// Implements `Sink` and `Stream`. pub struct Chan<T = Bytes> { incoming: mpsc::UnboundedReceiver<T>, outgoing: mpsc::UnboundedSender<T>, } impl<T> Stream for Chan<T> { type Item = T; type Error = io::Error; #[inline] fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> { self.incoming.poll().map_err(|()| io::ErrorKind::ConnectionReset.into()) } } impl<T> Sink for Chan<T> { type SinkItem = T; type SinkError = io::Error; #[inline] fn start_send(&mut self, item: Self::SinkItem) -> StartSend<Self::SinkItem, Self::SinkError> { self.outgoing.start_send(item).map_err(|_| io::ErrorKind::ConnectionReset.into()) } #[inline] fn poll_complete(&mut self) -> Poll<(), Self::SinkError> { self.outgoing.poll_complete().map_err(|_| io::ErrorKind::ConnectionReset.into()) } #[inline] fn close(&mut self) -> Poll<(), Self::SinkError> { self.outgoing.close().map_err(|_| io::ErrorKind::ConnectionReset.into()) } } impl<T: IntoBuf> Into<RwStreamSink<Chan<T>>> for Chan<T> { #[inline] fn into(self) -> RwStreamSink<Chan<T>> { RwStreamSink::new(self) } }
{ return false; }
eval.rs
//! Main evaluator loop and setting up the initial stack frame. use std::convert::TryFrom; use std::ffi::OsStr; use log::info; use rustc_hir::def_id::DefId; use rustc_middle::ty::{self, layout::LayoutCx, TyCtxt}; use rustc_target::abi::LayoutOf; use rustc_target::spec::abi::Abi; use crate::*; #[derive(Copy, Clone, Debug, PartialEq)] pub enum AlignmentCheck { /// Do not check alignment. None, /// Check alignment "symbolically", i.e., using only the requested alignment for an allocation and not its real base address. Symbolic, /// Check alignment on the actual physical integer address. Int, } #[derive(Copy, Clone, Debug, PartialEq)] pub enum RejectOpWith { /// Isolated op is rejected with an abort of the machine. Abort, /// If not Abort, miri returns an error for an isolated op. /// Following options determine if user should be warned about such error. /// Do not print warning about rejected isolated op. NoWarning, /// Print a warning about rejected isolated op, with backtrace. Warning, /// Print a warning about rejected isolated op, without backtrace. WarningWithoutBacktrace, } #[derive(Copy, Clone, Debug, PartialEq)] pub enum IsolatedOp { /// Reject an op requiring communication with the host. By /// default, miri rejects the op with an abort. If not, it returns /// an error code, and prints a warning about it. Warning levels /// are controlled by `RejectOpWith` enum. Reject(RejectOpWith), /// Execute op requiring communication with the host, i.e. disable isolation. Allow, } /// Configuration needed to spawn a Miri instance. #[derive(Clone)] pub struct MiriConfig { /// Determine if validity checking is enabled. pub validate: bool, /// Determines if Stacked Borrows is enabled. pub stacked_borrows: bool, /// Controls alignment checking. pub check_alignment: AlignmentCheck, /// Controls function [ABI](Abi) checking. pub check_abi: bool, /// Action for an op requiring communication with the host. pub isolated_op: IsolatedOp, /// Determines if memory leaks should be ignored. pub ignore_leaks: bool, /// Environment variables that should always be isolated from the host. pub excluded_env_vars: Vec<String>, /// Command-line arguments passed to the interpreted program. pub args: Vec<String>, /// The seed to use when non-determinism or randomness are required (e.g. ptr-to-int cast, `getrandom()`). pub seed: Option<u64>, /// The stacked borrows pointer id to report about pub tracked_pointer_tag: Option<PtrId>, /// The stacked borrows call ID to report about pub tracked_call_id: Option<CallId>, /// The allocation id to report about. pub tracked_alloc_id: Option<AllocId>, /// Whether to track raw pointers in stacked borrows. pub track_raw: bool, /// Determine if data race detection should be enabled pub data_race_detector: bool, /// Rate of spurious failures for compare_exchange_weak atomic operations, /// between 0.0 and 1.0, defaulting to 0.8 (80% chance of failure). pub cmpxchg_weak_failure_rate: f64, /// If `Some`, enable the `measureme` profiler, writing results to a file /// with the specified prefix. pub measureme_out: Option<String>, /// Panic when unsupported functionality is encountered pub panic_on_unsupported: bool, } impl Default for MiriConfig { fn default() -> MiriConfig { MiriConfig { validate: true, stacked_borrows: true, check_alignment: AlignmentCheck::Int, check_abi: true, isolated_op: IsolatedOp::Reject(RejectOpWith::Abort), ignore_leaks: false, excluded_env_vars: vec![], args: vec![], seed: None, tracked_pointer_tag: None, tracked_call_id: None, tracked_alloc_id: None, track_raw: false, data_race_detector: true, cmpxchg_weak_failure_rate: 0.8, measureme_out: None, panic_on_unsupported: false, } } } /// Returns a freshly created `InterpCx`, along with an `MPlaceTy` representing /// the location where the return value of the `start` lang item will be /// written to. /// Public because this is also used by `priroda`. pub fn create_ecx<'mir, 'tcx: 'mir>( tcx: TyCtxt<'tcx>, main_id: DefId, config: MiriConfig, ) -> InterpResult<'tcx, (InterpCx<'mir, 'tcx, Evaluator<'mir, 'tcx>>, MPlaceTy<'tcx, Tag>)> { let param_env = ty::ParamEnv::reveal_all(); let layout_cx = LayoutCx { tcx, param_env }; let mut ecx = InterpCx::new( tcx, rustc_span::source_map::DUMMY_SP, param_env, Evaluator::new(&config, layout_cx), MemoryExtra::new(&config), ); // Complete initialization. EnvVars::init(&mut ecx, config.excluded_env_vars)?; MemoryExtra::init_extern_statics(&mut ecx)?; // Setup first stack-frame let main_instance = ty::Instance::mono(tcx, main_id); let main_mir = ecx.load_mir(main_instance.def, None)?; if main_mir.arg_count != 0 { bug!("main function must not take any arguments"); } let start_id = tcx.lang_items().start_fn().unwrap(); let main_ret_ty = tcx.fn_sig(main_id).output(); let main_ret_ty = main_ret_ty.no_bound_vars().unwrap(); let start_instance = ty::Instance::resolve( tcx, ty::ParamEnv::reveal_all(), start_id, tcx.mk_substs(::std::iter::once(ty::subst::GenericArg::from(main_ret_ty))), ) .unwrap() .unwrap(); // First argument: pointer to `main()`. let main_ptr = ecx.memory.create_fn_alloc(FnVal::Instance(main_instance)); // Second argument (argc): length of `config.args`. let argc = Scalar::from_machine_usize(u64::try_from(config.args.len()).unwrap(), &ecx); // Third argument (`argv`): created from `config.args`. let argv = { // Put each argument in memory, collect pointers. let mut argvs = Vec::<Scalar<Tag>>::new(); for arg in config.args.iter() { // Make space for `0` terminator. let size = u64::try_from(arg.len()).unwrap().checked_add(1).unwrap(); let arg_type = tcx.mk_array(tcx.types.u8, size); let arg_place = ecx.allocate(ecx.layout_of(arg_type)?, MiriMemoryKind::Machine.into())?; ecx.write_os_str_to_c_str(OsStr::new(arg), arg_place.ptr, size)?; argvs.push(arg_place.ptr); } // Make an array with all these pointers, in the Miri memory. let argvs_layout = ecx.layout_of( tcx.mk_array(tcx.mk_imm_ptr(tcx.types.u8), u64::try_from(argvs.len()).unwrap()), )?; let argvs_place = ecx.allocate(argvs_layout, MiriMemoryKind::Machine.into())?; for (idx, arg) in argvs.into_iter().enumerate() { let place = ecx.mplace_field(&argvs_place, idx)?; ecx.write_scalar(arg, &place.into())?; } ecx.memory.mark_immutable(argvs_place.ptr.assert_ptr().alloc_id)?; // A pointer to that place is the 3rd argument for main. let argv = argvs_place.ptr; // Store `argc` and `argv` for macOS `_NSGetArg{c,v}`. { let argc_place = ecx.allocate(ecx.machine.layouts.isize, MiriMemoryKind::Machine.into())?; ecx.write_scalar(argc, &argc_place.into())?; ecx.machine.argc = Some(argc_place.ptr); let argv_place = ecx.allocate( ecx.layout_of(tcx.mk_imm_ptr(tcx.types.unit))?, MiriMemoryKind::Machine.into(), )?; ecx.write_scalar(argv, &argv_place.into())?; ecx.machine.argv = Some(argv_place.ptr); } // Store command line as UTF-16 for Windows `GetCommandLineW`. { // Construct a command string with all the aguments. let mut cmd = String::new(); for arg in config.args.iter() { if !cmd.is_empty() { cmd.push(' '); } cmd.push_str(&*shell_escape::windows::escape(arg.as_str().into())); } // Don't forget `0` terminator. cmd.push(std::char::from_u32(0).unwrap()); let cmd_utf16: Vec<u16> = cmd.encode_utf16().collect(); let cmd_type = tcx.mk_array(tcx.types.u16, u64::try_from(cmd_utf16.len()).unwrap()); let cmd_place = ecx.allocate(ecx.layout_of(cmd_type)?, MiriMemoryKind::Machine.into())?; ecx.machine.cmd_line = Some(cmd_place.ptr); // Store the UTF-16 string. We just allocated so we know the bounds are fine. for (idx, &c) in cmd_utf16.iter().enumerate() { let place = ecx.mplace_field(&cmd_place, idx)?; ecx.write_scalar(Scalar::from_u16(c), &place.into())?; } } argv }; // Return place (in static memory so that it does not count as leak). let ret_place = ecx.allocate(ecx.machine.layouts.isize, MiriMemoryKind::Machine.into())?; // Call start function. ecx.call_function( start_instance, Abi::Rust, &[main_ptr.into(), argc.into(), argv.into()], Some(&ret_place.into()), StackPopCleanup::None { cleanup: true }, )?;
} /// Evaluates the main function specified by `main_id`. /// Returns `Some(return_code)` if program executed completed. /// Returns `None` if an evaluation error occured. pub fn eval_main<'tcx>(tcx: TyCtxt<'tcx>, main_id: DefId, config: MiriConfig) -> Option<i64> { // Copy setting before we move `config`. let ignore_leaks = config.ignore_leaks; let (mut ecx, ret_place) = match create_ecx(tcx, main_id, config) { Ok(v) => v, Err(err) => { err.print_backtrace(); panic!("Miri initialization error: {}", err.kind()) } }; // Perform the main execution. let res: InterpResult<'_, i64> = (|| { // Main loop. loop { let info = ecx.preprocess_diagnostics(); match ecx.schedule()? { SchedulingAction::ExecuteStep => { assert!(ecx.step()?, "a terminated thread was scheduled for execution"); } SchedulingAction::ExecuteTimeoutCallback => { assert!( ecx.machine.communicate(), "scheduler callbacks require disabled isolation, but the code \ that created the callback did not check it" ); ecx.run_timeout_callback()?; } SchedulingAction::ExecuteDtors => { // This will either enable the thread again (so we go back // to `ExecuteStep`), or determine that this thread is done // for good. ecx.schedule_next_tls_dtor_for_active_thread()?; } SchedulingAction::Stop => { break; } } ecx.process_diagnostics(info); } let return_code = ecx.read_scalar(&ret_place.into())?.check_init()?.to_machine_isize(&ecx)?; Ok(return_code) })(); // Machine cleanup. EnvVars::cleanup(&mut ecx).unwrap(); // Process the result. match res { Ok(return_code) => { if !ignore_leaks { info!("Additonal static roots: {:?}", ecx.machine.static_roots); let leaks = ecx.memory.leak_report(&ecx.machine.static_roots); if leaks != 0 { tcx.sess.err("the evaluated program leaked memory"); // Ignore the provided return code - let the reported error // determine the return code. return None; } } Some(return_code) } Err(e) => report_error(&ecx, e), } }
Ok((ecx, ret_place))
approximate_pattern_matching.rs
extern crate bio; use bio::seq; use bio::dna::Dna; /// Approximate Pattern Matching Problem: Find all approximate occurrences of a pattern in a string. /// Input: Strings Pattern and Text along with an integer d. /// Output: All starting positions where Pattern appears as a substring of Text with at most d mismatches. fn main() {
let mut pattern_string = String::new(); let mut dna_string = String::new(); let mut d_string = String::new(); bio::io::read_line(&mut pattern_string); bio::io::read_line(&mut dna_string); bio::io::read_line(&mut d_string); let pattern = Dna::from_string(pattern_string); let dna = Dna::from_string(dna_string); let d = d_string.parse::<usize>().unwrap(); let (indices, _) = seq::find_by(&dna, &pattern, |chunk, pat| seq::hamming_distance(chunk, pat) <= d); bio::io::print_vec(&indices); }
move_unit_tests.rs
// Copyright (c) Aptos // SPDX-License-Identifier: Apache-2.0 use aptos_vm::natives::aptos_natives; use framework::diem_framework_named_addresses; use move_compiler::shared::NumericalAddress; use move_unit_test::UnitTestingConfig; #[test] fn move_unit_tests() { let mut named_addresses = diem_framework_named_addresses(); named_addresses.insert( "HelloBlockchain".to_owned(), NumericalAddress::parse_str("0xe110").unwrap(),
); let config = UnitTestingConfig::default_with_bound(Some(100_000)).with_named_addresses(named_addresses); move_unit_test::cargo_runner::run_tests_with_config_and_filter( config, "sources", r".*\.move$", Some(&move_stdlib::move_stdlib_modules_full_path()), Some(aptos_natives()), ); }
stream.go
package twitter import ( "bytes" "encoding/json" "io" "io/ioutil" "net/http" "net/url" "strings" "sync" "time" ) // Stream connects to a streaming endpoint on the Twitter API. // It receives messages from the streaming endpoint and sends them on the // Queue channel from a goroutine. type Stream struct { Config Config APIInfo APIInfo EndPointInfo *EndPointInfo AttemptTime time.Time Time time.Time HTTPRequest *http.Request HTTPResponse *http.Response Retryable *bool PayLoad interface{} Error error Data interface{} Handlers StreamHandlers Retryer MessageQueue chan interface{} rawData chan []byte done chan struct{} errorChan chan error waitGroup *sync.WaitGroup body io.ReadCloser } // NewStream returns a new stream object that is connect to a streaming endpoint. // If there is no error in the stream connection you can start reading messages // from the Queue channel func (c *Client) NewStream(endpoint *EndPointInfo, input, output interface{}) *Stream { return createStream(*c.Config, c.APIInfo, c.Retryer, endpoint, input, output) } func createStream(cfg Config, apiInfo APIInfo, retryer Retryer, endpointInfo *EndPointInfo, payLoad interface{}, data interface{}) *Stream { var err error if retryer == nil { retryer = noRetryer{} } if err = endpointInfo.Validate(); err != nil { return &Stream{Error: err} } httpReq, err := http.NewRequest(endpointInfo.HTTPMethod, "", nil) if err != nil { return &Stream{Error: err} } httpReq.Header.Add("Content-type", "application/json") httpReq.URL, err = url.Parse(apiInfo.Endpoint + "/" + apiInfo.APIVersion + "/" + endpointInfo.HTTPPath) if err != nil { httpReq.URL = &url.URL{} return &Stream{Error: err} } if endpointInfo.QueryParams != nil { q := httpReq.URL.Query() for k, v := range endpointInfo.QueryParams { q.Add(k, v) } httpReq.URL.RawQuery = q.Encode() } if payLoad != nil { b, err := json.Marshal(payLoad) if err != nil { return &Stream{Error: err} } httpReq.Body = ioutil.NopCloser(strings.NewReader(string(b))) } handlers := StreamHandlers{ Send: StreamSendHandler, Sign: StreamSigner, } s := &Stream{ Config: cfg, APIInfo: apiInfo, EndPointInfo: endpointInfo, Handlers: handlers.Copy(), Retryer: retryer, Time: time.Now(), HTTPRequest: httpReq, waitGroup: &sync.WaitGroup{}, PayLoad: payLoad, Error: err, Data: data, MessageQueue: make(chan interface{}), rawData: make(chan []byte), done: make(chan struct{}), } s.waitGroup.Add(2) go s.consume() go s.processMessage() return s } func (s *Stream) consume() { defer close(s.rawData) defer s.waitGroup.Done() for !s.stopped() { s.Error = nil s.AttemptTime = time.Now() if err := s.sign(); err != nil { s.Config.Logger.Error().Err(err).Msg("Failed to sign stream request") return } if err := s.sendRequest(); err == nil { s.receive(s.body) } s.Handlers.Retry.Run(s) if s.Error != nil || !BoolValue(s.Retryable) { s.Config.Logger.Error().Err(s.Error).Msg("stream request can not be retried") return } } } // Sign will sign the request, returning error if errors are encountered. func (s *Stream) sign() error { s.Handlers.Sign.Run(s) return s.Error } func (s *Stream) sendRequest() (sendErr error) { s.Retryable = nil s.Handlers.Send.Run(s) if s.Error != nil { return s.Error } s.body = s.HTTPResponse.Body return nil } func (s *Stream) stopped() bool { select { case <-s.done: return true default: return false } } // Stop signals retry and receiver to stop, closes the Messages channel, and // blocks until done. func (s *Stream) Stop() { close(s.done) // Scanner does not have a Stop() or take a done channel, so for low volume // streams Scan() blocks until the next keep-alive. Close the resp.Body to // escape and stop the stream in a timely fashion. if s.body != nil { s.body.Close() } // block until the retry goroutine stops s.waitGroup.Wait() } func (s *Stream) receive(body io.Reader) { reader := newStreamResponseBodyReader(body) for !s.stopped() { data, err := reader.readNext() if err != nil { return } if len(data) == 0 { // empty keep-alive continue } select { // send messages, data, or errors case s.rawData <- data: continue // allow client to Stop(), even if not receiving case <-s.done: return } } } func (s *Stream) processMessage() { defer close(s.MessageQueue) defer s.waitGroup.Done() for !s.stopped() { messageBytes, ok := <-s.rawData if !ok { return } message, err := s.getMessage(messageBytes) if err != nil { s.Retryable = Bool(false) return } select { // send messages, data, or errors case s.MessageQueue <- message: continue // allow client to Stop(), even if not receiving case <-s.done: return } } } func (s *Stream) getMessage(messageBytes []byte) (interface{}, error) { reader := bytes.NewReader(messageBytes) err := UnmarshalJSON(s.Data, reader) if err != nil
return s.Data, nil }
{ return nil, err }
test_date.py
#! /usr/bin/env python # $Id: test_date.py 4667 2006-07-12 21:40:56Z wiemann $ # Author: David Goodger <[email protected]> # Copyright: This module has been placed in the public domain. """ Tests for the misc.py "date" directive. """ from __init__ import DocutilsTestSupport import time def suite(): s = DocutilsTestSupport.ParserTestSuite() s.generateTests(totest) return s totest = {} totest['date'] = [ ["""\ .. |date| date:: Today's date is |date|. """, """\ <document source="test data"> <substitution_definition names="date"> %s <paragraph> Today's date is \n\ <substitution_reference refname="date"> date . """ % time.strftime('%Y-%m-%d')], ["""\ .. |date| date:: %a, %d %b %Y """, """\ <document source="test data"> <substitution_definition names="date"> %s """ % time.strftime('%a, %d %b %Y')], ["""\ .. date:: """, """\ <document source="test data"> <system_message level="3" line="1" source="test data" type="ERROR"> <paragraph> Invalid context: the "date" directive can only be used within a substitution definition. <literal_block xml:space="preserve">
if __name__ == '__main__': import unittest unittest.main(defaultTest='suite')
.. date:: """], ]
deploy_environment.go
// Copyright (c) 2016, 2018, 2021, Oracle and/or its affiliates. All rights reserved. // This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. // Code generated. DO NOT EDIT. // DevOps API // // Use the DevOps API to create DevOps projects, configure code repositories, add artifacts to deploy, build and test software applications, configure target deployment environments, and deploy software applications. For more information, see DevOps (https://docs.cloud.oracle.com/Content/devops/using/home.htm). // package devops import ( "encoding/json" "github.com/oracle/oci-go-sdk/v54/common" ) // DeployEnvironment The target OCI resources, such as Compute instances, Container Engine for Kubernetes(OKE) clusters, or Function, where artifacts will be deployed. type DeployEnvironment interface { // Unique identifier that is immutable on creation. GetId() *string // The OCID of a project. GetProjectId() *string // The OCID of a compartment. GetCompartmentId() *string // Optional description about the deployment environment. GetDescription() *string // Deployment environment display name, which can be renamed and is not necessarily unique. Avoid entering confidential information. GetDisplayName() *string // Time the deployment environment was created. Format defined by RFC3339 (https://datatracker.ietf.org/doc/html/rfc3339). GetTimeCreated() *common.SDKTime // Time the deployment environment was updated. Format defined by RFC3339 (https://datatracker.ietf.org/doc/html/rfc3339). GetTimeUpdated() *common.SDKTime // The current state of the deployment environment. GetLifecycleState() DeployEnvironmentLifecycleStateEnum // A message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state. GetLifecycleDetails() *string // Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. See Resource Tags (https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm). Example: `{"bar-key": "value"}` GetFreeformTags() map[string]string // Defined tags for this resource. Each key is predefined and scoped to a namespace. See Resource Tags (https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm). Example: `{"foo-namespace": {"bar-key": "value"}}` GetDefinedTags() map[string]map[string]interface{} // Usage of system tag keys. These predefined keys are scoped to namespaces. See Resource Tags (https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm). Example: `{"orcl-cloud": {"free-tier-retained": "true"}}` GetSystemTags() map[string]map[string]interface{} } type deployenvironment struct { JsonData []byte Id *string `mandatory:"true" json:"id"` ProjectId *string `mandatory:"true" json:"projectId"` CompartmentId *string `mandatory:"true" json:"compartmentId"` Description *string `mandatory:"false" json:"description"` DisplayName *string `mandatory:"false" json:"displayName"` TimeCreated *common.SDKTime `mandatory:"false" json:"timeCreated"` TimeUpdated *common.SDKTime `mandatory:"false" json:"timeUpdated"` LifecycleState DeployEnvironmentLifecycleStateEnum `mandatory:"false" json:"lifecycleState,omitempty"` LifecycleDetails *string `mandatory:"false" json:"lifecycleDetails"` FreeformTags map[string]string `mandatory:"false" json:"freeformTags"` DefinedTags map[string]map[string]interface{} `mandatory:"false" json:"definedTags"` SystemTags map[string]map[string]interface{} `mandatory:"false" json:"systemTags"` DeployEnvironmentType string `json:"deployEnvironmentType"` } // UnmarshalJSON unmarshals json func (m *deployenvironment) UnmarshalJSON(data []byte) error { m.JsonData = data type Unmarshalerdeployenvironment deployenvironment s := struct { Model Unmarshalerdeployenvironment }{} err := json.Unmarshal(data, &s.Model) if err != nil { return err } m.Id = s.Model.Id m.ProjectId = s.Model.ProjectId m.CompartmentId = s.Model.CompartmentId m.Description = s.Model.Description m.DisplayName = s.Model.DisplayName m.TimeCreated = s.Model.TimeCreated m.TimeUpdated = s.Model.TimeUpdated m.LifecycleState = s.Model.LifecycleState m.LifecycleDetails = s.Model.LifecycleDetails m.FreeformTags = s.Model.FreeformTags m.DefinedTags = s.Model.DefinedTags m.SystemTags = s.Model.SystemTags m.DeployEnvironmentType = s.Model.DeployEnvironmentType return err } // UnmarshalPolymorphicJSON unmarshals polymorphic json func (m *deployenvironment) UnmarshalPolymorphicJSON(data []byte) (interface{}, error) { if data == nil || string(data) == "null" { return nil, nil } var err error switch m.DeployEnvironmentType { case "COMPUTE_INSTANCE_GROUP": mm := ComputeInstanceGroupDeployEnvironment{} err = json.Unmarshal(data, &mm) return mm, err case "OKE_CLUSTER": mm := OkeClusterDeployEnvironment{} err = json.Unmarshal(data, &mm) return mm, err case "FUNCTION": mm := FunctionDeployEnvironment{} err = json.Unmarshal(data, &mm) return mm, err default: return *m, nil } } //GetId returns Id func (m deployenvironment) GetId() *string { return m.Id } //GetProjectId returns ProjectId func (m deployenvironment) GetProjectId() *string { return m.ProjectId } //GetCompartmentId returns CompartmentId func (m deployenvironment) GetCompartmentId() *string { return m.CompartmentId } //GetDescription returns Description func (m deployenvironment) GetDescription() *string { return m.Description } //GetDisplayName returns DisplayName func (m deployenvironment) GetDisplayName() *string { return m.DisplayName } //GetTimeCreated returns TimeCreated func (m deployenvironment) GetTimeCreated() *common.SDKTime { return m.TimeCreated } //GetTimeUpdated returns TimeUpdated func (m deployenvironment) GetTimeUpdated() *common.SDKTime { return m.TimeUpdated } //GetLifecycleState returns LifecycleState func (m deployenvironment) GetLifecycleState() DeployEnvironmentLifecycleStateEnum { return m.LifecycleState } //GetLifecycleDetails returns LifecycleDetails func (m deployenvironment) GetLifecycleDetails() *string { return m.LifecycleDetails } //GetFreeformTags returns FreeformTags func (m deployenvironment) GetFreeformTags() map[string]string { return m.FreeformTags } //GetDefinedTags returns DefinedTags func (m deployenvironment) GetDefinedTags() map[string]map[string]interface{} { return m.DefinedTags } //GetSystemTags returns SystemTags func (m deployenvironment) GetSystemTags() map[string]map[string]interface{} { return m.SystemTags } func (m deployenvironment) String() string { return common.PointerString(m) } // DeployEnvironmentLifecycleStateEnum Enum with underlying type: string type DeployEnvironmentLifecycleStateEnum string // Set of constants representing the allowable values for DeployEnvironmentLifecycleStateEnum const ( DeployEnvironmentLifecycleStateCreating DeployEnvironmentLifecycleStateEnum = "CREATING" DeployEnvironmentLifecycleStateUpdating DeployEnvironmentLifecycleStateEnum = "UPDATING" DeployEnvironmentLifecycleStateActive DeployEnvironmentLifecycleStateEnum = "ACTIVE" DeployEnvironmentLifecycleStateDeleting DeployEnvironmentLifecycleStateEnum = "DELETING" DeployEnvironmentLifecycleStateDeleted DeployEnvironmentLifecycleStateEnum = "DELETED" DeployEnvironmentLifecycleStateFailed DeployEnvironmentLifecycleStateEnum = "FAILED" ) var mappingDeployEnvironmentLifecycleState = map[string]DeployEnvironmentLifecycleStateEnum{ "CREATING": DeployEnvironmentLifecycleStateCreating, "UPDATING": DeployEnvironmentLifecycleStateUpdating, "ACTIVE": DeployEnvironmentLifecycleStateActive, "DELETING": DeployEnvironmentLifecycleStateDeleting, "DELETED": DeployEnvironmentLifecycleStateDeleted, "FAILED": DeployEnvironmentLifecycleStateFailed, } // GetDeployEnvironmentLifecycleStateEnumValues Enumerates the set of values for DeployEnvironmentLifecycleStateEnum func GetDeployEnvironmentLifecycleStateEnumValues() []DeployEnvironmentLifecycleStateEnum { values := make([]DeployEnvironmentLifecycleStateEnum, 0) for _, v := range mappingDeployEnvironmentLifecycleState { values = append(values, v) } return values } // DeployEnvironmentDeployEnvironmentTypeEnum Enum with underlying type: string type DeployEnvironmentDeployEnvironmentTypeEnum string // Set of constants representing the allowable values for DeployEnvironmentDeployEnvironmentTypeEnum const ( DeployEnvironmentDeployEnvironmentTypeOkeCluster DeployEnvironmentDeployEnvironmentTypeEnum = "OKE_CLUSTER" DeployEnvironmentDeployEnvironmentTypeComputeInstanceGroup DeployEnvironmentDeployEnvironmentTypeEnum = "COMPUTE_INSTANCE_GROUP" DeployEnvironmentDeployEnvironmentTypeFunction DeployEnvironmentDeployEnvironmentTypeEnum = "FUNCTION" ) var mappingDeployEnvironmentDeployEnvironmentType = map[string]DeployEnvironmentDeployEnvironmentTypeEnum{ "OKE_CLUSTER": DeployEnvironmentDeployEnvironmentTypeOkeCluster, "COMPUTE_INSTANCE_GROUP": DeployEnvironmentDeployEnvironmentTypeComputeInstanceGroup, "FUNCTION": DeployEnvironmentDeployEnvironmentTypeFunction, } // GetDeployEnvironmentDeployEnvironmentTypeEnumValues Enumerates the set of values for DeployEnvironmentDeployEnvironmentTypeEnum func GetDeployEnvironmentDeployEnvironmentTypeEnumValues() []DeployEnvironmentDeployEnvironmentTypeEnum
{ values := make([]DeployEnvironmentDeployEnvironmentTypeEnum, 0) for _, v := range mappingDeployEnvironmentDeployEnvironmentType { values = append(values, v) } return values }
jwt.strategy.ts
import { Injectable, UnauthorizedException } from "@nestjs/common"; import { Strategy, ExtractJwt } from 'passport-jwt'; import { PassportStrategy } from "@nestjs/passport"; import { InjectRepository } from "@nestjs/typeorm"; import { UserRepository } from "src/user/user.repository"; @Injectable() export class JwtStrategy extends PassportStrategy(Strategy) { constructor( @InjectRepository(UserRepository) private repository: UserRepository ){ super({ jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(), secretOrKey: 'super-secret', });
const { id } = payload; const user = await this.repository.findOne(id, { select: ['name', 'email', 'role'] }); if(!user) { throw new UnauthorizedException('Usuário não encontrado.'); } return user; } }
} async validate(payload: {id: string}){
smpl.py
import torch import numpy as np import smplx from smplx import SMPL as _SMPL from smplx.body_models import ModelOutput
from smplx.lbs import vertices2joints import spin.config as config import spin.constants as constants class SMPL(_SMPL): """ Extension of the official SMPL implementation to support more joints """ def __init__(self, *args, **kwargs): super(SMPL, self).__init__(*args, **kwargs) joints = [constants.JOINT_MAP[i] for i in constants.JOINT_NAMES] J_regressor_extra = np.load(config.JOINT_REGRESSOR_TRAIN_EXTRA) self.register_buffer('J_regressor_extra', torch.tensor(J_regressor_extra, dtype=torch.float32)) self.joint_map = torch.tensor(joints, dtype=torch.long) def forward(self, *args, **kwargs): kwargs['get_skin'] = True smpl_output = super(SMPL, self).forward(*args, **kwargs) extra_joints = vertices2joints(self.J_regressor_extra, smpl_output.vertices) joints = torch.cat([smpl_output.joints, extra_joints], dim=1) joints = smpl_output.joints # print(smpl_output.joints.shape) # joints = joints[:, self.joint_map, :] output = ModelOutput(vertices=smpl_output.vertices, global_orient=smpl_output.global_orient, body_pose=smpl_output.body_pose, joints=joints, betas=smpl_output.betas, full_pose=smpl_output.full_pose) return output
app.component.ts
import {Component, OnInit} from '@angular/core'; import { ElectronService } from './core/services'; import { TranslateService } from '@ngx-translate/core'; import { AppConfig } from '../environments/environment'; declare let device; // Cordova inhered var. @Component({ selector: 'app-root', templateUrl: './app.component.html', styleUrls: ['./app.component.scss'] }) export class
implements OnInit { constructor( public electronService: ElectronService, private translate: TranslateService ) { translate.setDefaultLang('en'); console.log('AppConfig', AppConfig); if (electronService.isElectron) { console.log(process.env); console.log('Mode electron'); console.log('Electron ipcRenderer', electronService.ipcRenderer); console.log('NodeJS childProcess', electronService.childProcess); } else { console.log('Mode web'); } } ngOnInit(): void { // Used for IOS/Android. Triggers when device is ready and app opened. document.addEventListener("deviceready", function() { alert(device.platform); }, false); } }
AppComponent
kind.rs
/* Copyright (c) 2021 Jeremy Carter <[email protected]> All uses of this project in part or in whole are governed by the terms of the license contained in the file titled "LICENSE" that's distributed along with the project, which can be found in the top-level directory of this project. If you don't agree to follow those terms or you won't follow them, you are not allowed to use this project or anything that's made with parts of it at all. The project is also depending on some third-party technologies, and some of those are governed by their own separate licenses, so furthermore, whenever legally possible, all license terms from all of the different technologies apply, with this project's license terms taking first priority. */ use serde::{Deserialize, Serialize}; #[derive(Serialize, Deserialize, Debug)] pub enum NobKind { NobRootKind, } #[derive(Serialize, Deserialize, Debug)] pub enum
{ NobRootKind0, }
NobRootKind
start_many_to_1_ctrl.py
#!/usr/bin/python from mininet.net import Mininet from mininet.topo import Topo from mininet.cli import CLI from mininet.node import UserSwitch,RemoteController from mininet.term import makeTerm import os, time class MyTopo( Topo ): "Simple topology example." def
( self): "Create custom topo." # Add default members to class. Topo.__init__(self) # Add nodes Host1=self.addHost('h1', ip='10.0.0.1/24') Host2=self.addHost('h2', ip='10.0.0.2/24') switch1=self.addSwitch('s1') switch2=self.addSwitch('s2') switch3=self.addSwitch('s3') switch4=self.addSwitch('s4') switch5=self.addSwitch('s5') # Add edges self.addLink( Host1, switch1, 1, 1) self.addLink( switch1, switch2, 2, 1) self.addLink( switch1, switch3, 3, 1) self.addLink( switch1, switch4, 4, 1) self.addLink( switch2, switch5, 2, 1) self.addLink( switch3, switch5, 2, 2) self.addLink( switch4, switch5, 2, 3) self.addLink( switch5, Host2, 4, 1) ######Starting controller os.system("xterm -e 'ryu-manager ~/ryu/ryu/app/openstate/playground/forwarding_consistency_many_to_1_ctrl.py'&") ######Starting mininet topos = { 'mytopo': ( lambda: MyTopo() ) } mytopo=MyTopo() time.sleep(1) print("\n********************************** HELP *********************************************") print("Type \"python ~/ryu/ryu/app/openstate/echo_server.py 200\" in h2's xterm") print("Type \"nc 10.0.0.2 200\" in h1's xterm") print("Watching the tcpdump results, it is possible to see that forwarding consistency is guaranteed\n" "In order to test new path selection, close and reopen netcat") print("\nTo exit type \"ctrl+D\" or exit") print("*************************************************************************************") net = Mininet(topo=mytopo,switch=UserSwitch,controller=RemoteController,cleanup=True,autoSetMacs=True,autoStaticArp=True,listenPort=6634) net.start() os.system("xterm -e 'tcpdump -i s2-eth1'&") os.system("xterm -e 'tcpdump -i s3-eth1'&") os.system("xterm -e 'tcpdump -i s4-eth1'&") h1,h2 = net.hosts[0], net.hosts[1] makeTerm(h1) makeTerm(h2) CLI(net) net.stop() os.system("sudo mn -c") os.system("kill -9 $(pidof -x ryu-manager)")
__init__
utils.py
'''Some helper functions for PyTorch, including: - get_mean_and_std: calculate the mean and std value of dataset. - msr_init: net parameter initialization. - progress_bar: progress bar mimic xlua.progress. ''' import os import sys import time import math import torch import torch.nn as nn import torch.nn.init as init import numpy as np def get_mean_and_std(dataset): '''Compute the mean and std value of dataset.''' dataloader = torch.utils.data.DataLoader(dataset, batch_size=1, shuffle=True, num_workers=2) mean = torch.zeros(3) std = torch.zeros(3) print('==> Computing mean and std..') for inputs, targets in dataloader: for i in range(3): mean[i] += inputs[:,i,:,:].mean() std[i] += inputs[:,i,:,:].std() mean.div_(len(dataset)) std.div_(len(dataset)) return mean, std def init_params(net): '''Init layer parameters.''' for m in net.modules(): if isinstance(m, nn.Conv2d): init.kaiming_normal(m.weight, mode='fan_out') if m.bias: init.constant(m.bias, 0) elif isinstance(m, nn.BatchNorm2d): init.constant(m.weight, 1) init.constant(m.bias, 0) elif isinstance(m, nn.Linear): init.normal(m.weight, std=1e-3) if m.bias: init.constant(m.bias, 0) _, term_width = os.popen('stty size', 'r').read().split() term_width = int(term_width) TOTAL_BAR_LENGTH = 65. last_time = time.time() begin_time = last_time def progress_bar(current, total, msg=None): global last_time, begin_time if current == 0: begin_time = time.time() # Reset for new bar. cur_len = int(TOTAL_BAR_LENGTH*current/total) rest_len = int(TOTAL_BAR_LENGTH - cur_len) - 1 sys.stdout.write(' [') for i in range(cur_len): sys.stdout.write('=') sys.stdout.write('>') for i in range(rest_len): sys.stdout.write('.') sys.stdout.write(']') cur_time = time.time() step_time = cur_time - last_time last_time = cur_time tot_time = cur_time - begin_time L = [] L.append(' Step: %s' % format_time(step_time)) L.append(' | Tot: %s' % format_time(tot_time)) if msg: L.append(' | ' + msg) msg = ''.join(L) sys.stdout.write(msg) for i in range(term_width-int(TOTAL_BAR_LENGTH)-len(msg)-3): sys.stdout.write(' ') # Go back to the center of the bar. for i in range(term_width-int(TOTAL_BAR_LENGTH/2)+2): sys.stdout.write('\b') sys.stdout.write(' %d/%d ' % (current+1, total)) if current < total-1: sys.stdout.write('\r') else: sys.stdout.write('\n') sys.stdout.flush() def format_time(seconds): days = int(seconds / 3600/24) seconds = seconds - days*3600*24 hours = int(seconds / 3600) seconds = seconds - hours*3600 minutes = int(seconds / 60) seconds = seconds - minutes*60 secondsf = int(seconds) seconds = seconds - secondsf millis = int(seconds*1000) f = '' i = 1 if days > 0: f += str(days) + 'D' i += 1 if hours > 0 and i <= 2: f += str(hours) + 'h' i += 1 if minutes > 0 and i <= 2: f += str(minutes) + 'm' i += 1 if secondsf > 0 and i <= 2: f += str(secondsf) + 's' i += 1 if millis > 0 and i <= 2: f += str(millis) + 'ms' i += 1 if f == '': f = '0ms' return f class Cutout(object): """Randomly mask out one or more patches from an image. Args: n_holes (int): Number of patches to cut out of each image. length (int): The length (in pixels) of each square patch. """ def __init__(self, n_holes, length):
def __call__(self, img): """ Args: img (Tensor): Tensor image of size (C, H, W). Returns: Tensor: Image with n_holes of dimension length x length cut out of it. """ h = img.size(1) w = img.size(2) mask = np.ones((h, w), np.float32) for n in range(self.n_holes): y = np.random.randint(h) x = np.random.randint(w) y1 = np.clip(y - self.length // 2, 0, h) y2 = np.clip(y + self.length // 2, 0, h) x1 = np.clip(x - self.length // 2, 0, w) x2 = np.clip(x + self.length // 2, 0, w) mask[y1: y2, x1: x2] = 0. mask = torch.from_numpy(mask) mask = mask.expand_as(img) img = img * mask return img
self.n_holes = n_holes self.length = length
check_comments_no_inline.go
// Copyright (c) 2020 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package lint import ( "github.com/emicklei/proto" "github.com/xutaox/prototool/internal/text" ) var commentsNoInlineLinter = NewLinter( "COMMENTS_NO_INLINE", "Verifies that there are no inline comments.", checkCommentsNoInline, ) func checkCommentsNoInline(add func(*text.Failure), dirPath string, descriptors []*FileDescriptor) error { return runVisitor(&commentsNoInlineVisitor{baseAddVisitor: newBaseAddVisitor(add)}, descriptors) } type commentsNoInlineVisitor struct { baseAddVisitor } func (v commentsNoInlineVisitor) VisitMessage(element *proto.Message) { for _, child := range element.Elements { child.Accept(v) } }
func (v commentsNoInlineVisitor) VisitService(element *proto.Service) { for _, child := range element.Elements { child.Accept(v) } } func (v commentsNoInlineVisitor) VisitSyntax(element *proto.Syntax) { v.checkInlineComment("syntax", element.InlineComment) } func (v commentsNoInlineVisitor) VisitPackage(element *proto.Package) { v.checkInlineComment("packages", element.InlineComment) } func (v commentsNoInlineVisitor) VisitOption(element *proto.Option) { v.checkInlineComment("options", element.InlineComment) } func (v commentsNoInlineVisitor) VisitImport(element *proto.Import) { v.checkInlineComment("imports", element.InlineComment) } func (v commentsNoInlineVisitor) VisitNormalField(element *proto.NormalField) { v.checkInlineComment("fields", element.InlineComment) for _, child := range element.Options { child.Accept(v) } } func (v commentsNoInlineVisitor) VisitEnumField(element *proto.EnumField) { v.checkInlineComment("enum values", element.InlineComment) for _, child := range element.Elements { child.Accept(v) } } func (v commentsNoInlineVisitor) VisitEnum(element *proto.Enum) { for _, child := range element.Elements { child.Accept(v) } } func (v commentsNoInlineVisitor) VisitOneof(element *proto.Oneof) { for _, child := range element.Elements { child.Accept(v) } } func (v commentsNoInlineVisitor) VisitOneofField(element *proto.OneOfField) { v.checkInlineComment("oneofs", element.InlineComment) for _, child := range element.Options { child.Accept(v) } } func (v commentsNoInlineVisitor) VisitReserved(element *proto.Reserved) { v.checkInlineComment("reserved", element.InlineComment) } func (v commentsNoInlineVisitor) VisitRPC(element *proto.RPC) { v.checkInlineComment("RPCs", element.InlineComment) for _, child := range element.Elements { child.Accept(v) } } func (v commentsNoInlineVisitor) VisitMapField(element *proto.MapField) { v.checkInlineComment("fields", element.InlineComment) for _, child := range element.Options { child.Accept(v) } } func (v commentsNoInlineVisitor) VisitGroup(element *proto.Group) { for _, child := range element.Elements { child.Accept(v) } } func (v commentsNoInlineVisitor) VisitExtensions(element *proto.Extensions) { v.checkInlineComment("extensions", element.InlineComment) } func (v commentsNoInlineVisitor) checkInlineComment(t string, inlineComment *proto.Comment) { if inlineComment != nil { v.AddFailuref(inlineComment.Position, "Inline comments are not allowed on %s, only comment above the type.", t) } }
_sizeref.py
import _plotly_utils.basevalidators class SizerefValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="sizeref", parent_name="scatter3d.marker", **kwargs): super(SizerefValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, edit_type=kwargs.pop("edit_type", "calc"), **kwargs )
Main.js
import React from 'react'; import {Menu} from '../components'; const Main = () => { return ( <div> <Menu /> <h1>메인화면</h1>
</div> ); }; export default Main;
<p>이 프로젝트는 리액트 라우터 기초를 실습해보는 예제 프로젝트랍니다.</p>
index.tsx
import React from "react"; import Page from "@components/app/page"; import { getNodezapIconLink } from "@components/library/icon-picker"; import { useRouter } from "next/router"; import withAuth from "@service/supabase/with-auth"; import { TStore } from "@service/redux/store"; import { prodAppId } from "@service/utils/utils"; import Head from "next/head"; import { useSelector } from "react-redux"; import { useUserAppContext } from "@components/app/hooks"; import { AppContext } from "@components/app/context"; function Index() { const router = useRouter(); const routes = useUserAppContext({ appId: prodAppId(router.query.appId as string), workspaceId: router.query.workspaceId as string,
appDetails: store.appDetails, user: store.user, })); return ( <> <Head> <title>{appDetails.displayName || "Nodezap"}</title> <link rel="shortcut icon" href={getNodezapIconLink( appDetails.icon || { type: "EMOJI", source: "", } )} /> <meta name="viewport" content="initial-scale=1.0, width=device-width" /> </Head> <AppContext.Provider value={routes}> <Page pageId={"HOME"} appId={prodAppId(router.query.appId as string)} workspaceId={router.query.workspaceId as string} query={router.query as any} /> </AppContext.Provider> </> ); } export default withAuth(Index);
}); const { appDetails, user } = useSelector((store: TStore) => ({
tcp.rs
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use io::prelude::*; use fmt; use io::{self, Initializer}; use net::{ToSocketAddrs, SocketAddr, Shutdown}; use sys_common::net as net_imp; use sys_common::{AsInner, FromInner, IntoInner}; use time::Duration; /// A TCP stream between a local and a remote socket. /// /// After creating a `TcpStream` by either [`connect`]ing to a remote host or /// [`accept`]ing a connection on a [`TcpListener`], data can be transmitted /// by [reading] and [writing] to it. /// /// The connection will be closed when the value is dropped. The reading and writing /// portions of the connection can also be shut down individually with the [`shutdown`] /// method. /// /// The Transmission Control Protocol is specified in [IETF RFC 793]. /// /// [`accept`]: ../../std/net/struct.TcpListener.html#method.accept /// [`connect`]: #method.connect /// [IETF RFC 793]: https://tools.ietf.org/html/rfc793 /// [reading]: ../../std/io/trait.Read.html /// [`shutdown`]: #method.shutdown /// [`TcpListener`]: ../../std/net/struct.TcpListener.html /// [writing]: ../../std/io/trait.Write.html /// /// # Examples /// /// ```no_run /// use std::io::prelude::*; /// use std::net::TcpStream; /// /// { /// let mut stream = TcpStream::connect("127.0.0.1:34254").unwrap(); /// /// // ignore the Result /// let _ = stream.write(&[1]); /// let _ = stream.read(&mut [0; 128]); // ignore here too /// } // the stream is closed here /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub struct TcpStream(net_imp::TcpStream); /// A TCP socket server, listening for connections. /// /// After creating a `TcpListener` by [`bind`]ing it to a socket address, it listens /// for incoming TCP connections. These can be accepted by calling [`accept`] or by /// iterating over the [`Incoming`] iterator returned by [`incoming`][`TcpListener::incoming`]. /// /// The socket will be closed when the value is dropped. /// /// The Transmission Control Protocol is specified in [IETF RFC 793]. /// /// [`accept`]: #method.accept /// [`bind`]: #method.bind /// [IETF RFC 793]: https://tools.ietf.org/html/rfc793 /// [`Incoming`]: ../../std/net/struct.Incoming.html /// [`TcpListener::incoming`]: #method.incoming /// /// # Examples /// /// ``` /// # use std::io; /// use std::net::{TcpListener, TcpStream}; /// /// fn handle_client(stream: TcpStream) { /// // ... /// } /// /// # fn process() -> io::Result<()> { /// let listener = TcpListener::bind("127.0.0.1:80").unwrap(); /// /// // accept connections and process them serially /// for stream in listener.incoming() { /// handle_client(stream?); /// } /// # Ok(()) /// # } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub struct TcpListener(net_imp::TcpListener); /// An iterator that infinitely [`accept`]s connections on a [`TcpListener`]. /// /// This `struct` is created by the [`incoming`] method on [`TcpListener`]. /// See its documentation for more. /// /// [`accept`]: ../../std/net/struct.TcpListener.html#method.accept /// [`incoming`]: ../../std/net/struct.TcpListener.html#method.incoming /// [`TcpListener`]: ../../std/net/struct.TcpListener.html #[stable(feature = "rust1", since = "1.0.0")] #[derive(Debug)] pub struct Incoming<'a> { listener: &'a TcpListener } impl TcpStream { /// Opens a TCP connection to a remote host. /// /// `addr` is an address of the remote host. Anything which implements /// [`ToSocketAddrs`] trait can be supplied for the address; see this trait /// documentation for concrete examples. /// /// If `addr` yields multiple addresses, `connect` will be attempted with /// each of the addresses until a connection is successful. If none of /// the addresses result in a successful connection, the error returned from /// the last connection attempt (the last address) is returned. /// /// [`ToSocketAddrs`]: ../../std/net/trait.ToSocketAddrs.html /// /// # Examples /// /// Open a TCP connection to `127.0.0.1:8080`: /// /// ```no_run /// use std::net::TcpStream; /// /// if let Ok(stream) = TcpStream::connect("127.0.0.1:8080") { /// println!("Connected to the server!"); /// } else { /// println!("Couldn't connect to server..."); /// } /// ``` /// /// Open a TCP connection to `127.0.0.1:8080`. If the connection fails, open /// a TCP connection to `127.0.0.1:8081`: /// /// ```no_run /// use std::net::{SocketAddr, TcpStream}; /// /// let addrs = [ /// SocketAddr::from(([127, 0, 0, 1], 8080)), /// SocketAddr::from(([127, 0, 0, 1], 8081)), /// ]; /// if let Ok(stream) = TcpStream::connect(&addrs[..]) { /// println!("Connected to the server!"); /// } else { /// println!("Couldn't connect to server..."); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn connect<A: ToSocketAddrs>(addr: A) -> io::Result<TcpStream> { super::each_addr(addr, net_imp::TcpStream::connect).map(TcpStream) } /// Opens a TCP connection to a remote host with a timeout. /// /// Unlike `connect`, `connect_timeout` takes a single [`SocketAddr`] since /// timeout must be applied to individual addresses. /// /// It is an error to pass a zero `Duration` to this function. /// /// Unlike other methods on `TcpStream`, this does not correspond to a /// single system call. It instead calls `connect` in nonblocking mode and /// then uses an OS-specific mechanism to await the completion of the /// connection request. /// /// [`SocketAddr`]: ../../std/net/enum.SocketAddr.html #[stable(feature = "tcpstream_connect_timeout", since = "1.21.0")] pub fn connect_timeout(addr: &SocketAddr, timeout: Duration) -> io::Result<TcpStream> { net_imp::TcpStream::connect_timeout(addr, timeout).map(TcpStream) } /// Returns the socket address of the remote peer of this TCP connection. /// /// # Examples /// /// ```no_run /// use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4, TcpStream}; /// /// let stream = TcpStream::connect("127.0.0.1:8080") /// .expect("Couldn't connect to the server..."); /// assert_eq!(stream.peer_addr().unwrap(), /// SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(127, 0, 0, 1), 8080))); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn peer_addr(&self) -> io::Result<SocketAddr> { self.0.peer_addr() } /// Returns the socket address of the local half of this TCP connection. /// /// # Examples /// /// ```no_run /// use std::net::{IpAddr, Ipv4Addr, TcpStream}; /// /// let stream = TcpStream::connect("127.0.0.1:8080") /// .expect("Couldn't connect to the server..."); /// assert_eq!(stream.local_addr().unwrap().ip(), /// IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1))); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn local_addr(&self) -> io::Result<SocketAddr> { self.0.socket_addr() } /// Shuts down the read, write, or both halves of this connection. /// /// This function will cause all pending and future I/O on the specified /// portions to return immediately with an appropriate value (see the /// documentation of [`Shutdown`]). /// /// [`Shutdown`]: ../../std/net/enum.Shutdown.html /// /// # Platform-specific behavior /// /// Calling this function multiple times may result in different behavior, /// depending on the operating system. On Linux, the second call will /// return `Ok(())`, but on macOS, it will return `ErrorKind::NotConnected`. /// This may change in the future. /// /// # Examples /// /// ```no_run /// use std::net::{Shutdown, TcpStream}; /// /// let stream = TcpStream::connect("127.0.0.1:8080") /// .expect("Couldn't connect to the server..."); /// stream.shutdown(Shutdown::Both).expect("shutdown call failed"); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn shutdown(&self, how: Shutdown) -> io::Result<()> { self.0.shutdown(how) } /// Creates a new independently owned handle to the underlying socket. /// /// The returned `TcpStream` is a reference to the same stream that this /// object references. Both handles will read and write the same stream of /// data, and options set on one stream will be propagated to the other /// stream. /// /// # Examples /// /// ```no_run /// use std::net::TcpStream; /// /// let stream = TcpStream::connect("127.0.0.1:8080") /// .expect("Couldn't connect to the server..."); /// let stream_clone = stream.try_clone().expect("clone failed..."); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn try_clone(&self) -> io::Result<TcpStream> { self.0.duplicate().map(TcpStream) } /// Sets the read timeout to the timeout specified. /// /// If the value specified is [`None`], then [`read`] calls will block /// indefinitely. It is an error to pass the zero `Duration` to this /// method. /// /// # Platform-specific behavior /// /// Platforms may return a different error code whenever a read times out as /// a result of setting this option. For example Unix typically returns an /// error of the kind [`WouldBlock`], but Windows may return [`TimedOut`]. /// /// [`None`]: ../../std/option/enum.Option.html#variant.None /// [`read`]: ../../std/io/trait.Read.html#tymethod.read /// [`WouldBlock`]: ../../std/io/enum.ErrorKind.html#variant.WouldBlock /// [`TimedOut`]: ../../std/io/enum.ErrorKind.html#variant.TimedOut /// /// # Examples /// /// ```no_run /// use std::net::TcpStream; /// /// let stream = TcpStream::connect("127.0.0.1:8080") /// .expect("Couldn't connect to the server..."); /// stream.set_read_timeout(None).expect("set_read_timeout call failed"); /// ``` #[stable(feature = "socket_timeout", since = "1.4.0")] pub fn set_read_timeout(&self, dur: Option<Duration>) -> io::Result<()> { self.0.set_read_timeout(dur) } /// Sets the write timeout to the timeout specified. /// /// If the value specified is [`None`], then [`write`] calls will block /// indefinitely. It is an error to pass the zero [`Duration`] to this /// method. /// /// # Platform-specific behavior /// /// Platforms may return a different error code whenever a write times out /// as a result of setting this option. For example Unix typically returns /// an error of the kind [`WouldBlock`], but Windows may return [`TimedOut`]. /// /// [`None`]: ../../std/option/enum.Option.html#variant.None /// [`write`]: ../../std/io/trait.Write.html#tymethod.write /// [`Duration`]: ../../std/time/struct.Duration.html /// [`WouldBlock`]: ../../std/io/enum.ErrorKind.html#variant.WouldBlock /// [`TimedOut`]: ../../std/io/enum.ErrorKind.html#variant.TimedOut /// /// # Examples /// /// ```no_run /// use std::net::TcpStream; /// /// let stream = TcpStream::connect("127.0.0.1:8080") /// .expect("Couldn't connect to the server..."); /// stream.set_write_timeout(None).expect("set_write_timeout call failed"); /// ``` #[stable(feature = "socket_timeout", since = "1.4.0")] pub fn set_write_timeout(&self, dur: Option<Duration>) -> io::Result<()> { self.0.set_write_timeout(dur) } /// Returns the read timeout of this socket. /// /// If the timeout is [`None`], then [`read`] calls will block indefinitely. /// /// # Platform-specific behavior /// /// Some platforms do not provide access to the current timeout. /// /// [`None`]: ../../std/option/enum.Option.html#variant.None /// [`read`]: ../../std/io/trait.Read.html#tymethod.read /// /// # Examples /// /// ```no_run /// use std::net::TcpStream; /// /// let stream = TcpStream::connect("127.0.0.1:8080") /// .expect("Couldn't connect to the server..."); /// stream.set_read_timeout(None).expect("set_read_timeout call failed"); /// assert_eq!(stream.read_timeout().unwrap(), None); /// ``` #[stable(feature = "socket_timeout", since = "1.4.0")] pub fn read_timeout(&self) -> io::Result<Option<Duration>> { self.0.read_timeout() } /// Returns the write timeout of this socket. /// /// If the timeout is [`None`], then [`write`] calls will block indefinitely. /// /// # Platform-specific behavior /// /// Some platforms do not provide access to the current timeout. /// /// [`None`]: ../../std/option/enum.Option.html#variant.None /// [`write`]: ../../std/io/trait.Write.html#tymethod.write /// /// # Examples /// /// ```no_run /// use std::net::TcpStream; /// /// let stream = TcpStream::connect("127.0.0.1:8080") /// .expect("Couldn't connect to the server..."); /// stream.set_write_timeout(None).expect("set_write_timeout call failed"); /// assert_eq!(stream.write_timeout().unwrap(), None); /// ``` #[stable(feature = "socket_timeout", since = "1.4.0")] pub fn write_timeout(&self) -> io::Result<Option<Duration>> { self.0.write_timeout() } /// Receives data on the socket from the remote address to which it is /// connected, without removing that data from the queue. On success, /// returns the number of bytes peeked. /// /// Successive calls return the same data. This is accomplished by passing /// `MSG_PEEK` as a flag to the underlying `recv` system call. /// /// # Examples /// /// ```no_run /// use std::net::TcpStream; /// /// let stream = TcpStream::connect("127.0.0.1:8000") /// .expect("couldn't bind to address"); /// let mut buf = [0; 10]; /// let len = stream.peek(&mut buf).expect("peek failed"); /// ``` #[stable(feature = "peek", since = "1.18.0")] pub fn peek(&self, buf: &mut [u8]) -> io::Result<usize> { self.0.peek(buf) } /// Sets the value of the `TCP_NODELAY` option on this socket. /// /// If set, this option disables the Nagle algorithm. This means that /// segments are always sent as soon as possible, even if there is only a /// small amount of data. When not set, data is buffered until there is a /// sufficient amount to send out, thereby avoiding the frequent sending of /// small packets. /// /// # Examples /// /// ```no_run /// use std::net::TcpStream; /// /// let stream = TcpStream::connect("127.0.0.1:8080") /// .expect("Couldn't connect to the server..."); /// stream.set_nodelay(true).expect("set_nodelay call failed"); /// ``` #[stable(feature = "net2_mutators", since = "1.9.0")] pub fn set_nodelay(&self, nodelay: bool) -> io::Result<()> { self.0.set_nodelay(nodelay) } /// Gets the value of the `TCP_NODELAY` option on this socket. /// /// For more information about this option, see [`set_nodelay`][link]. /// /// [link]: #method.set_nodelay /// /// # Examples /// /// ```no_run /// use std::net::TcpStream; /// /// let stream = TcpStream::connect("127.0.0.1:8080") /// .expect("Couldn't connect to the server..."); /// stream.set_nodelay(true).expect("set_nodelay call failed"); /// assert_eq!(stream.nodelay().unwrap_or(false), true); /// ``` #[stable(feature = "net2_mutators", since = "1.9.0")] pub fn nodelay(&self) -> io::Result<bool> { self.0.nodelay() } /// Sets the value for the `IP_TTL` option on this socket. /// /// This value sets the time-to-live field that is used in every packet sent /// from this socket. /// /// # Examples /// /// ```no_run /// use std::net::TcpStream; /// /// let stream = TcpStream::connect("127.0.0.1:8080") /// .expect("Couldn't connect to the server..."); /// stream.set_ttl(100).expect("set_ttl call failed"); /// ``` #[stable(feature = "net2_mutators", since = "1.9.0")] pub fn set_ttl(&self, ttl: u32) -> io::Result<()> { self.0.set_ttl(ttl) } /// Gets the value of the `IP_TTL` option for this socket. /// /// For more information about this option, see [`set_ttl`][link]. /// /// [link]: #method.set_ttl /// /// # Examples /// /// ```no_run /// use std::net::TcpStream; /// /// let stream = TcpStream::connect("127.0.0.1:8080") /// .expect("Couldn't connect to the server..."); /// stream.set_ttl(100).expect("set_ttl call failed"); /// assert_eq!(stream.ttl().unwrap_or(0), 100); /// ``` #[stable(feature = "net2_mutators", since = "1.9.0")] pub fn ttl(&self) -> io::Result<u32> { self.0.ttl() } /// Get the value of the `SO_ERROR` option on this socket. /// /// This will retrieve the stored error in the underlying socket, clearing /// the field in the process. This can be useful for checking errors between /// calls. /// /// # Examples /// /// ```no_run /// use std::net::TcpStream; /// /// let stream = TcpStream::connect("127.0.0.1:8080") /// .expect("Couldn't connect to the server..."); /// stream.take_error().expect("No error was expected..."); /// ``` #[stable(feature = "net2_mutators", since = "1.9.0")] pub fn take_error(&self) -> io::Result<Option<io::Error>> { self.0.take_error() } /// Moves this TCP stream into or out of nonblocking mode. /// /// This will result in `read`, `write`, `recv` and `send` operations /// becoming nonblocking, i.e. immediately returning from their calls. /// If the IO operation is successful, `Ok` is returned and no further /// action is required. If the IO operation could not be completed and needs /// to be retried, an error with kind [`io::ErrorKind::WouldBlock`] is /// returned. /// /// On Unix platforms, calling this method corresponds to calling `fcntl` /// `FIONBIO`. On Windows calling this method corresponds to calling /// `ioctlsocket` `FIONBIO`. /// /// # Examples /// /// Reading bytes from a TCP stream in non-blocking mode: /// /// ```no_run /// use std::io::{self, Read}; /// use std::net::TcpStream; /// /// let mut stream = TcpStream::connect("127.0.0.1:7878") /// .expect("Couldn't connect to the server..."); /// stream.set_nonblocking(true).expect("set_nonblocking call failed"); /// /// # fn wait_for_fd() { unimplemented!() } /// let mut buf = vec![]; /// loop { /// match stream.read_to_end(&mut buf) { /// Ok(_) => break, /// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { /// // wait until network socket is ready, typically implemented /// // via platform-specific APIs such as epoll or IOCP /// wait_for_fd(); /// } /// Err(e) => panic!("encountered IO error: {}", e), /// }; /// }; /// println!("bytes: {:?}", buf); /// ``` /// /// [`io::ErrorKind::WouldBlock`]: ../io/enum.ErrorKind.html#variant.WouldBlock #[stable(feature = "net2_mutators", since = "1.9.0")] pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { self.0.set_nonblocking(nonblocking) } } #[stable(feature = "rust1", since = "1.0.0")] impl Read for TcpStream { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { self.0.read(buf) } #[inline] unsafe fn initializer(&self) -> Initializer { Initializer::nop() } } #[stable(feature = "rust1", since = "1.0.0")] impl Write for TcpStream { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.0.write(buf) } fn flush(&mut self) -> io::Result<()> { Ok(()) } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a> Read for &'a TcpStream { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { self.0.read(buf) } #[inline] unsafe fn initializer(&self) -> Initializer { Initializer::nop() } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a> Write for &'a TcpStream { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.0.write(buf) } fn flush(&mut self) -> io::Result<()> { Ok(()) } } impl AsInner<net_imp::TcpStream> for TcpStream { fn as_inner(&self) -> &net_imp::TcpStream { &self.0 } } impl FromInner<net_imp::TcpStream> for TcpStream { fn from_inner(inner: net_imp::TcpStream) -> TcpStream { TcpStream(inner) } } impl IntoInner<net_imp::TcpStream> for TcpStream { fn into_inner(self) -> net_imp::TcpStream { self.0 } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for TcpStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.0.fmt(f) } } impl TcpListener { /// Creates a new `TcpListener` which will be bound to the specified /// address. /// /// The returned listener is ready for accepting connections. /// /// Binding with a port number of 0 will request that the OS assigns a port /// to this listener. The port allocated can be queried via the /// [`local_addr`] method. /// /// The address type can be any implementor of [`ToSocketAddrs`] trait. See /// its documentation for concrete examples. /// /// If `addr` yields multiple addresses, `bind` will be attempted with /// each of the addresses until one succeeds and returns the listener. If /// none of the addresses succeed in creating a listener, the error returned /// from the last attempt (the last address) is returned. /// /// [`local_addr`]: #method.local_addr /// [`ToSocketAddrs`]: ../../std/net/trait.ToSocketAddrs.html /// /// # Examples /// /// Create a TCP listener bound to `127.0.0.1:80`: /// /// ```no_run /// use std::net::TcpListener; /// /// let listener = TcpListener::bind("127.0.0.1:80").unwrap(); /// ``` /// /// Create a TCP listener bound to `127.0.0.1:80`. If that fails, create a /// TCP listener bound to `127.0.0.1:443`: /// /// ```no_run /// use std::net::{SocketAddr, TcpListener}; /// /// let addrs = [ /// SocketAddr::from(([127, 0, 0, 1], 80)), /// SocketAddr::from(([127, 0, 0, 1], 443)), /// ]; /// let listener = TcpListener::bind(&addrs[..]).unwrap(); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn bind<A: ToSocketAddrs>(addr: A) -> io::Result<TcpListener> { super::each_addr(addr, net_imp::TcpListener::bind).map(TcpListener) } /// Returns the local socket address of this listener. /// /// # Examples /// /// ```no_run /// use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4, TcpListener}; /// /// let listener = TcpListener::bind("127.0.0.1:8080").unwrap(); /// assert_eq!(listener.local_addr().unwrap(), /// SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(127, 0, 0, 1), 8080))); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn local_addr(&self) -> io::Result<SocketAddr> { self.0.socket_addr() } /// Creates a new independently owned handle to the underlying socket. /// /// The returned [`TcpListener`] is a reference to the same socket that this /// object references. Both handles can be used to accept incoming /// connections and options set on one listener will affect the other. /// /// [`TcpListener`]: ../../std/net/struct.TcpListener.html /// /// # Examples /// /// ```no_run /// use std::net::TcpListener; /// /// let listener = TcpListener::bind("127.0.0.1:8080").unwrap(); /// let listener_clone = listener.try_clone().unwrap(); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn try_clone(&self) -> io::Result<TcpListener> { self.0.duplicate().map(TcpListener) } /// Accept a new incoming connection from this listener. /// /// This function will block the calling thread until a new TCP connection /// is established. When established, the corresponding [`TcpStream`] and the /// remote peer's address will be returned. /// /// [`TcpStream`]: ../../std/net/struct.TcpStream.html /// /// # Examples /// /// ```no_run /// use std::net::TcpListener; /// /// let listener = TcpListener::bind("127.0.0.1:8080").unwrap(); /// match listener.accept() { /// Ok((_socket, addr)) => println!("new client: {:?}", addr), /// Err(e) => println!("couldn't get client: {:?}", e), /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn accept(&self) -> io::Result<(TcpStream, SocketAddr)> { self.0.accept().map(|(a, b)| (TcpStream(a), b)) } /// Returns an iterator over the connections being received on this /// listener. /// /// The returned iterator will never return [`None`] and will also not yield /// the peer's [`SocketAddr`] structure. Iterating over it is equivalent to /// calling [`accept`] in a loop. /// /// [`None`]: ../../std/option/enum.Option.html#variant.None /// [`SocketAddr`]: ../../std/net/enum.SocketAddr.html /// [`accept`]: #method.accept /// /// # Examples /// /// ```no_run /// use std::net::TcpListener; /// /// let listener = TcpListener::bind("127.0.0.1:80").unwrap(); /// /// for stream in listener.incoming() { /// match stream { /// Ok(stream) => { /// println!("new client!"); /// } /// Err(e) => { /* connection failed */ } /// } /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn incoming(&self) -> Incoming { Incoming { listener: self } } /// Sets the value for the `IP_TTL` option on this socket. /// /// This value sets the time-to-live field that is used in every packet sent /// from this socket. /// /// # Examples /// /// ```no_run /// use std::net::TcpListener; /// /// let listener = TcpListener::bind("127.0.0.1:80").unwrap(); /// listener.set_ttl(100).expect("could not set TTL"); /// ``` #[stable(feature = "net2_mutators", since = "1.9.0")] pub fn set_ttl(&self, ttl: u32) -> io::Result<()> { self.0.set_ttl(ttl) } /// Gets the value of the `IP_TTL` option for this socket. /// /// For more information about this option, see [`set_ttl`][link]. /// /// [link]: #method.set_ttl /// /// # Examples /// /// ```no_run /// use std::net::TcpListener; /// /// let listener = TcpListener::bind("127.0.0.1:80").unwrap(); /// listener.set_ttl(100).expect("could not set TTL"); /// assert_eq!(listener.ttl().unwrap_or(0), 100); /// ``` #[stable(feature = "net2_mutators", since = "1.9.0")] pub fn ttl(&self) -> io::Result<u32> { self.0.ttl() } #[stable(feature = "net2_mutators", since = "1.9.0")] #[rustc_deprecated(since = "1.16.0", reason = "this option can only be set before the socket is bound")] #[allow(missing_docs)] pub fn set_only_v6(&self, only_v6: bool) -> io::Result<()> { self.0.set_only_v6(only_v6) } #[stable(feature = "net2_mutators", since = "1.9.0")] #[rustc_deprecated(since = "1.16.0", reason = "this option can only be set before the socket is bound")] #[allow(missing_docs)] pub fn only_v6(&self) -> io::Result<bool> { self.0.only_v6() } /// Get the value of the `SO_ERROR` option on this socket. /// /// This will retrieve the stored error in the underlying socket, clearing /// the field in the process. This can be useful for checking errors between /// calls. /// /// # Examples /// /// ```no_run /// use std::net::TcpListener; /// /// let listener = TcpListener::bind("127.0.0.1:80").unwrap(); /// listener.take_error().expect("No error was expected"); /// ``` #[stable(feature = "net2_mutators", since = "1.9.0")] pub fn take_error(&self) -> io::Result<Option<io::Error>> { self.0.take_error() } /// Moves this TCP stream into or out of nonblocking mode. /// /// This will result in the `accept` operation becoming nonblocking, /// i.e. immediately returning from their calls. If the IO operation is /// successful, `Ok` is returned and no further action is required. If the /// IO operation could not be completed and needs to be retried, an error /// with kind [`io::ErrorKind::WouldBlock`] is returned. /// /// On Unix platforms, calling this method corresponds to calling `fcntl` /// `FIONBIO`. On Windows calling this method corresponds to calling /// `ioctlsocket` `FIONBIO`. /// /// # Examples /// /// Bind a TCP listener to an address, listen for connections, and read /// bytes in nonblocking mode: /// /// ```no_run /// use std::io; /// use std::net::TcpListener; /// /// let listener = TcpListener::bind("127.0.0.1:7878").unwrap(); /// listener.set_nonblocking(true).expect("Cannot set non-blocking"); /// /// # fn wait_for_fd() { unimplemented!() } /// # fn handle_connection(stream: std::net::TcpStream) { unimplemented!() } /// for stream in listener.incoming() { /// match stream { /// Ok(s) => { /// // do something with the TcpStream /// handle_connection(s); /// } /// Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => { /// // wait until network socket is ready, typically implemented /// // via platform-specific APIs such as epoll or IOCP /// wait_for_fd(); /// continue; /// } /// Err(e) => panic!("encountered IO error: {}", e), /// } /// } /// ``` /// /// [`io::ErrorKind::WouldBlock`]: ../io/enum.ErrorKind.html#variant.WouldBlock #[stable(feature = "net2_mutators", since = "1.9.0")] pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { self.0.set_nonblocking(nonblocking) } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a> Iterator for Incoming<'a> { type Item = io::Result<TcpStream>; fn next(&mut self) -> Option<io::Result<TcpStream>> { Some(self.listener.accept().map(|p| p.0)) } } impl AsInner<net_imp::TcpListener> for TcpListener { fn as_inner(&self) -> &net_imp::TcpListener { &self.0 } } impl FromInner<net_imp::TcpListener> for TcpListener { fn from_inner(inner: net_imp::TcpListener) -> TcpListener { TcpListener(inner) } } impl IntoInner<net_imp::TcpListener> for TcpListener { fn into_inner(self) -> net_imp::TcpListener { self.0 } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for TcpListener { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.0.fmt(f) } } #[cfg(all(test, not(any(target_os = "cloudabi", target_os = "emscripten"))))] mod tests { use io::ErrorKind; use io::prelude::*; use net::*; use net::test::{next_test_ip4, next_test_ip6}; use sync::mpsc::channel; use sys_common::AsInner; use time::{Instant, Duration}; use thread; fn each_ip(f: &mut FnMut(SocketAddr))
macro_rules! t { ($e:expr) => { match $e { Ok(t) => t, Err(e) => panic!("received error for `{}`: {}", stringify!($e), e), } } } #[test] fn bind_error() { match TcpListener::bind("1.1.1.1:9999") { Ok(..) => panic!(), Err(e) => assert_eq!(e.kind(), ErrorKind::AddrNotAvailable), } } #[test] fn connect_error() { match TcpStream::connect("0.0.0.0:1") { Ok(..) => panic!(), Err(e) => assert!(e.kind() == ErrorKind::ConnectionRefused || e.kind() == ErrorKind::InvalidInput || e.kind() == ErrorKind::AddrInUse || e.kind() == ErrorKind::AddrNotAvailable, "bad error: {} {:?}", e, e.kind()), } } #[test] fn listen_localhost() { let socket_addr = next_test_ip4(); let listener = t!(TcpListener::bind(&socket_addr)); let _t = thread::spawn(move || { let mut stream = t!(TcpStream::connect(&("localhost", socket_addr.port()))); t!(stream.write(&[144])); }); let mut stream = t!(listener.accept()).0; let mut buf = [0]; t!(stream.read(&mut buf)); assert!(buf[0] == 144); } #[test] fn connect_loopback() { each_ip(&mut |addr| { let acceptor = t!(TcpListener::bind(&addr)); let _t = thread::spawn(move|| { let host = match addr { SocketAddr::V4(..) => "127.0.0.1", SocketAddr::V6(..) => "::1", }; let mut stream = t!(TcpStream::connect(&(host, addr.port()))); t!(stream.write(&[66])); }); let mut stream = t!(acceptor.accept()).0; let mut buf = [0]; t!(stream.read(&mut buf)); assert!(buf[0] == 66); }) } #[test] fn smoke_test() { each_ip(&mut |addr| { let acceptor = t!(TcpListener::bind(&addr)); let (tx, rx) = channel(); let _t = thread::spawn(move|| { let mut stream = t!(TcpStream::connect(&addr)); t!(stream.write(&[99])); tx.send(t!(stream.local_addr())).unwrap(); }); let (mut stream, addr) = t!(acceptor.accept()); let mut buf = [0]; t!(stream.read(&mut buf)); assert!(buf[0] == 99); assert_eq!(addr, t!(rx.recv())); }) } #[test] fn read_eof() { each_ip(&mut |addr| { let acceptor = t!(TcpListener::bind(&addr)); let _t = thread::spawn(move|| { let _stream = t!(TcpStream::connect(&addr)); // Close }); let mut stream = t!(acceptor.accept()).0; let mut buf = [0]; let nread = t!(stream.read(&mut buf)); assert_eq!(nread, 0); let nread = t!(stream.read(&mut buf)); assert_eq!(nread, 0); }) } #[test] fn write_close() { each_ip(&mut |addr| { let acceptor = t!(TcpListener::bind(&addr)); let (tx, rx) = channel(); let _t = thread::spawn(move|| { drop(t!(TcpStream::connect(&addr))); tx.send(()).unwrap(); }); let mut stream = t!(acceptor.accept()).0; rx.recv().unwrap(); let buf = [0]; match stream.write(&buf) { Ok(..) => {} Err(e) => { assert!(e.kind() == ErrorKind::ConnectionReset || e.kind() == ErrorKind::BrokenPipe || e.kind() == ErrorKind::ConnectionAborted, "unknown error: {}", e); } } }) } #[test] fn multiple_connect_serial() { each_ip(&mut |addr| { let max = 10; let acceptor = t!(TcpListener::bind(&addr)); let _t = thread::spawn(move|| { for _ in 0..max { let mut stream = t!(TcpStream::connect(&addr)); t!(stream.write(&[99])); } }); for stream in acceptor.incoming().take(max) { let mut stream = t!(stream); let mut buf = [0]; t!(stream.read(&mut buf)); assert_eq!(buf[0], 99); } }) } #[test] fn multiple_connect_interleaved_greedy_schedule() { const MAX: usize = 10; each_ip(&mut |addr| { let acceptor = t!(TcpListener::bind(&addr)); let _t = thread::spawn(move|| { let acceptor = acceptor; for (i, stream) in acceptor.incoming().enumerate().take(MAX) { // Start another thread to handle the connection let _t = thread::spawn(move|| { let mut stream = t!(stream); let mut buf = [0]; t!(stream.read(&mut buf)); assert!(buf[0] == i as u8); }); } }); connect(0, addr); }); fn connect(i: usize, addr: SocketAddr) { if i == MAX { return } let t = thread::spawn(move|| { let mut stream = t!(TcpStream::connect(&addr)); // Connect again before writing connect(i + 1, addr); t!(stream.write(&[i as u8])); }); t.join().ok().unwrap(); } } #[test] fn multiple_connect_interleaved_lazy_schedule() { const MAX: usize = 10; each_ip(&mut |addr| { let acceptor = t!(TcpListener::bind(&addr)); let _t = thread::spawn(move|| { for stream in acceptor.incoming().take(MAX) { // Start another thread to handle the connection let _t = thread::spawn(move|| { let mut stream = t!(stream); let mut buf = [0]; t!(stream.read(&mut buf)); assert!(buf[0] == 99); }); } }); connect(0, addr); }); fn connect(i: usize, addr: SocketAddr) { if i == MAX { return } let t = thread::spawn(move|| { let mut stream = t!(TcpStream::connect(&addr)); connect(i + 1, addr); t!(stream.write(&[99])); }); t.join().ok().unwrap(); } } #[test] fn socket_and_peer_name() { each_ip(&mut |addr| { let listener = t!(TcpListener::bind(&addr)); let so_name = t!(listener.local_addr()); assert_eq!(addr, so_name); let _t = thread::spawn(move|| { t!(listener.accept()); }); let stream = t!(TcpStream::connect(&addr)); assert_eq!(addr, t!(stream.peer_addr())); }) } #[test] fn partial_read() { each_ip(&mut |addr| { let (tx, rx) = channel(); let srv = t!(TcpListener::bind(&addr)); let _t = thread::spawn(move|| { let mut cl = t!(srv.accept()).0; cl.write(&[10]).unwrap(); let mut b = [0]; t!(cl.read(&mut b)); tx.send(()).unwrap(); }); let mut c = t!(TcpStream::connect(&addr)); let mut b = [0; 10]; assert_eq!(c.read(&mut b).unwrap(), 1); t!(c.write(&[1])); rx.recv().unwrap(); }) } #[test] fn double_bind() { each_ip(&mut |addr| { let _listener = t!(TcpListener::bind(&addr)); match TcpListener::bind(&addr) { Ok(..) => panic!(), Err(e) => { assert!(e.kind() == ErrorKind::ConnectionRefused || e.kind() == ErrorKind::Other || e.kind() == ErrorKind::AddrInUse, "unknown error: {} {:?}", e, e.kind()); } } }) } #[test] fn fast_rebind() { each_ip(&mut |addr| { let acceptor = t!(TcpListener::bind(&addr)); let _t = thread::spawn(move|| { t!(TcpStream::connect(&addr)); }); t!(acceptor.accept()); drop(acceptor); t!(TcpListener::bind(&addr)); }); } #[test] fn tcp_clone_smoke() { each_ip(&mut |addr| { let acceptor = t!(TcpListener::bind(&addr)); let _t = thread::spawn(move|| { let mut s = t!(TcpStream::connect(&addr)); let mut buf = [0, 0]; assert_eq!(s.read(&mut buf).unwrap(), 1); assert_eq!(buf[0], 1); t!(s.write(&[2])); }); let mut s1 = t!(acceptor.accept()).0; let s2 = t!(s1.try_clone()); let (tx1, rx1) = channel(); let (tx2, rx2) = channel(); let _t = thread::spawn(move|| { let mut s2 = s2; rx1.recv().unwrap(); t!(s2.write(&[1])); tx2.send(()).unwrap(); }); tx1.send(()).unwrap(); let mut buf = [0, 0]; assert_eq!(s1.read(&mut buf).unwrap(), 1); rx2.recv().unwrap(); }) } #[test] fn tcp_clone_two_read() { each_ip(&mut |addr| { let acceptor = t!(TcpListener::bind(&addr)); let (tx1, rx) = channel(); let tx2 = tx1.clone(); let _t = thread::spawn(move|| { let mut s = t!(TcpStream::connect(&addr)); t!(s.write(&[1])); rx.recv().unwrap(); t!(s.write(&[2])); rx.recv().unwrap(); }); let mut s1 = t!(acceptor.accept()).0; let s2 = t!(s1.try_clone()); let (done, rx) = channel(); let _t = thread::spawn(move|| { let mut s2 = s2; let mut buf = [0, 0]; t!(s2.read(&mut buf)); tx2.send(()).unwrap(); done.send(()).unwrap(); }); let mut buf = [0, 0]; t!(s1.read(&mut buf)); tx1.send(()).unwrap(); rx.recv().unwrap(); }) } #[test] fn tcp_clone_two_write() { each_ip(&mut |addr| { let acceptor = t!(TcpListener::bind(&addr)); let _t = thread::spawn(move|| { let mut s = t!(TcpStream::connect(&addr)); let mut buf = [0, 1]; t!(s.read(&mut buf)); t!(s.read(&mut buf)); }); let mut s1 = t!(acceptor.accept()).0; let s2 = t!(s1.try_clone()); let (done, rx) = channel(); let _t = thread::spawn(move|| { let mut s2 = s2; t!(s2.write(&[1])); done.send(()).unwrap(); }); t!(s1.write(&[2])); rx.recv().unwrap(); }) } #[test] fn shutdown_smoke() { each_ip(&mut |addr| { let a = t!(TcpListener::bind(&addr)); let _t = thread::spawn(move|| { let mut c = t!(a.accept()).0; let mut b = [0]; assert_eq!(c.read(&mut b).unwrap(), 0); t!(c.write(&[1])); }); let mut s = t!(TcpStream::connect(&addr)); t!(s.shutdown(Shutdown::Write)); assert!(s.write(&[1]).is_err()); let mut b = [0, 0]; assert_eq!(t!(s.read(&mut b)), 1); assert_eq!(b[0], 1); }) } #[test] fn close_readwrite_smoke() { each_ip(&mut |addr| { let a = t!(TcpListener::bind(&addr)); let (tx, rx) = channel::<()>(); let _t = thread::spawn(move|| { let _s = t!(a.accept()); let _ = rx.recv(); }); let mut b = [0]; let mut s = t!(TcpStream::connect(&addr)); let mut s2 = t!(s.try_clone()); // closing should prevent reads/writes t!(s.shutdown(Shutdown::Write)); assert!(s.write(&[0]).is_err()); t!(s.shutdown(Shutdown::Read)); assert_eq!(s.read(&mut b).unwrap(), 0); // closing should affect previous handles assert!(s2.write(&[0]).is_err()); assert_eq!(s2.read(&mut b).unwrap(), 0); // closing should affect new handles let mut s3 = t!(s.try_clone()); assert!(s3.write(&[0]).is_err()); assert_eq!(s3.read(&mut b).unwrap(), 0); // make sure these don't die let _ = s2.shutdown(Shutdown::Read); let _ = s2.shutdown(Shutdown::Write); let _ = s3.shutdown(Shutdown::Read); let _ = s3.shutdown(Shutdown::Write); drop(tx); }) } #[test] #[cfg(unix)] // test doesn't work on Windows, see #31657 fn close_read_wakes_up() { each_ip(&mut |addr| { let a = t!(TcpListener::bind(&addr)); let (tx1, rx) = channel::<()>(); let _t = thread::spawn(move|| { let _s = t!(a.accept()); let _ = rx.recv(); }); let s = t!(TcpStream::connect(&addr)); let s2 = t!(s.try_clone()); let (tx, rx) = channel(); let _t = thread::spawn(move|| { let mut s2 = s2; assert_eq!(t!(s2.read(&mut [0])), 0); tx.send(()).unwrap(); }); // this should wake up the child thread t!(s.shutdown(Shutdown::Read)); // this test will never finish if the child doesn't wake up rx.recv().unwrap(); drop(tx1); }) } #[test] fn clone_while_reading() { each_ip(&mut |addr| { let accept = t!(TcpListener::bind(&addr)); // Enqueue a thread to write to a socket let (tx, rx) = channel(); let (txdone, rxdone) = channel(); let txdone2 = txdone.clone(); let _t = thread::spawn(move|| { let mut tcp = t!(TcpStream::connect(&addr)); rx.recv().unwrap(); t!(tcp.write(&[0])); txdone2.send(()).unwrap(); }); // Spawn off a reading clone let tcp = t!(accept.accept()).0; let tcp2 = t!(tcp.try_clone()); let txdone3 = txdone.clone(); let _t = thread::spawn(move|| { let mut tcp2 = tcp2; t!(tcp2.read(&mut [0])); txdone3.send(()).unwrap(); }); // Try to ensure that the reading clone is indeed reading for _ in 0..50 { thread::yield_now(); } // clone the handle again while it's reading, then let it finish the // read. let _ = t!(tcp.try_clone()); tx.send(()).unwrap(); rxdone.recv().unwrap(); rxdone.recv().unwrap(); }) } #[test] fn clone_accept_smoke() { each_ip(&mut |addr| { let a = t!(TcpListener::bind(&addr)); let a2 = t!(a.try_clone()); let _t = thread::spawn(move|| { let _ = TcpStream::connect(&addr); }); let _t = thread::spawn(move|| { let _ = TcpStream::connect(&addr); }); t!(a.accept()); t!(a2.accept()); }) } #[test] fn clone_accept_concurrent() { each_ip(&mut |addr| { let a = t!(TcpListener::bind(&addr)); let a2 = t!(a.try_clone()); let (tx, rx) = channel(); let tx2 = tx.clone(); let _t = thread::spawn(move|| { tx.send(t!(a.accept())).unwrap(); }); let _t = thread::spawn(move|| { tx2.send(t!(a2.accept())).unwrap(); }); let _t = thread::spawn(move|| { let _ = TcpStream::connect(&addr); }); let _t = thread::spawn(move|| { let _ = TcpStream::connect(&addr); }); rx.recv().unwrap(); rx.recv().unwrap(); }) } #[test] fn debug() { let name = if cfg!(windows) {"socket"} else {"fd"}; let socket_addr = next_test_ip4(); let listener = t!(TcpListener::bind(&socket_addr)); let listener_inner = listener.0.socket().as_inner(); let compare = format!("TcpListener {{ addr: {:?}, {}: {:?} }}", socket_addr, name, listener_inner); assert_eq!(format!("{:?}", listener), compare); let stream = t!(TcpStream::connect(&("localhost", socket_addr.port()))); let stream_inner = stream.0.socket().as_inner(); let compare = format!("TcpStream {{ addr: {:?}, \ peer: {:?}, {}: {:?} }}", stream.local_addr().unwrap(), stream.peer_addr().unwrap(), name, stream_inner); assert_eq!(format!("{:?}", stream), compare); } // FIXME: re-enabled bitrig/openbsd tests once their socket timeout code // no longer has rounding errors. #[cfg_attr(any(target_os = "bitrig", target_os = "netbsd", target_os = "openbsd"), ignore)] #[test] fn timeouts() { let addr = next_test_ip4(); let listener = t!(TcpListener::bind(&addr)); let stream = t!(TcpStream::connect(&("localhost", addr.port()))); let dur = Duration::new(15410, 0); assert_eq!(None, t!(stream.read_timeout())); t!(stream.set_read_timeout(Some(dur))); assert_eq!(Some(dur), t!(stream.read_timeout())); assert_eq!(None, t!(stream.write_timeout())); t!(stream.set_write_timeout(Some(dur))); assert_eq!(Some(dur), t!(stream.write_timeout())); t!(stream.set_read_timeout(None)); assert_eq!(None, t!(stream.read_timeout())); t!(stream.set_write_timeout(None)); assert_eq!(None, t!(stream.write_timeout())); drop(listener); } #[test] fn test_read_timeout() { let addr = next_test_ip4(); let listener = t!(TcpListener::bind(&addr)); let mut stream = t!(TcpStream::connect(&("localhost", addr.port()))); t!(stream.set_read_timeout(Some(Duration::from_millis(1000)))); let mut buf = [0; 10]; let start = Instant::now(); let kind = stream.read(&mut buf).err().expect("expected error").kind(); assert!(kind == ErrorKind::WouldBlock || kind == ErrorKind::TimedOut); assert!(start.elapsed() > Duration::from_millis(400)); drop(listener); } #[test] fn test_read_with_timeout() { let addr = next_test_ip4(); let listener = t!(TcpListener::bind(&addr)); let mut stream = t!(TcpStream::connect(&("localhost", addr.port()))); t!(stream.set_read_timeout(Some(Duration::from_millis(1000)))); let mut other_end = t!(listener.accept()).0; t!(other_end.write_all(b"hello world")); let mut buf = [0; 11]; t!(stream.read(&mut buf)); assert_eq!(b"hello world", &buf[..]); let start = Instant::now(); let kind = stream.read(&mut buf).err().expect("expected error").kind(); assert!(kind == ErrorKind::WouldBlock || kind == ErrorKind::TimedOut); assert!(start.elapsed() > Duration::from_millis(400)); drop(listener); } // Ensure the `set_read_timeout` and `set_write_timeout` calls return errors // when passed zero Durations #[test] fn test_timeout_zero_duration() { let addr = next_test_ip4(); let listener = t!(TcpListener::bind(&addr)); let stream = t!(TcpStream::connect(&addr)); let result = stream.set_write_timeout(Some(Duration::new(0, 0))); let err = result.unwrap_err(); assert_eq!(err.kind(), ErrorKind::InvalidInput); let result = stream.set_read_timeout(Some(Duration::new(0, 0))); let err = result.unwrap_err(); assert_eq!(err.kind(), ErrorKind::InvalidInput); drop(listener); } #[test] fn nodelay() { let addr = next_test_ip4(); let _listener = t!(TcpListener::bind(&addr)); let stream = t!(TcpStream::connect(&("localhost", addr.port()))); assert_eq!(false, t!(stream.nodelay())); t!(stream.set_nodelay(true)); assert_eq!(true, t!(stream.nodelay())); t!(stream.set_nodelay(false)); assert_eq!(false, t!(stream.nodelay())); } #[test] fn ttl() { let ttl = 100; let addr = next_test_ip4(); let listener = t!(TcpListener::bind(&addr)); t!(listener.set_ttl(ttl)); assert_eq!(ttl, t!(listener.ttl())); let stream = t!(TcpStream::connect(&("localhost", addr.port()))); t!(stream.set_ttl(ttl)); assert_eq!(ttl, t!(stream.ttl())); } #[test] fn set_nonblocking() { let addr = next_test_ip4(); let listener = t!(TcpListener::bind(&addr)); t!(listener.set_nonblocking(true)); t!(listener.set_nonblocking(false)); let mut stream = t!(TcpStream::connect(&("localhost", addr.port()))); t!(stream.set_nonblocking(false)); t!(stream.set_nonblocking(true)); let mut buf = [0]; match stream.read(&mut buf) { Ok(_) => panic!("expected error"), Err(ref e) if e.kind() == ErrorKind::WouldBlock => {} Err(e) => panic!("unexpected error {}", e), } } #[test] fn peek() { each_ip(&mut |addr| { let (txdone, rxdone) = channel(); let srv = t!(TcpListener::bind(&addr)); let _t = thread::spawn(move|| { let mut cl = t!(srv.accept()).0; cl.write(&[1,3,3,7]).unwrap(); t!(rxdone.recv()); }); let mut c = t!(TcpStream::connect(&addr)); let mut b = [0; 10]; for _ in 1..3 { let len = c.peek(&mut b).unwrap(); assert_eq!(len, 4); } let len = c.read(&mut b).unwrap(); assert_eq!(len, 4); t!(c.set_nonblocking(true)); match c.peek(&mut b) { Ok(_) => panic!("expected error"), Err(ref e) if e.kind() == ErrorKind::WouldBlock => {} Err(e) => panic!("unexpected error {}", e), } t!(txdone.send(())); }) } #[test] fn connect_timeout_unroutable() { // this IP is unroutable, so connections should always time out, // provided the network is reachable to begin with. let addr = "10.255.255.1:80".parse().unwrap(); let e = TcpStream::connect_timeout(&addr, Duration::from_millis(250)).unwrap_err(); assert!(e.kind() == io::ErrorKind::TimedOut || e.kind() == io::ErrorKind::Other, "bad error: {} {:?}", e, e.kind()); } #[test] fn connect_timeout_unbound() { // bind and drop a socket to track down a "probably unassigned" port let socket = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = socket.local_addr().unwrap(); drop(socket); let timeout = Duration::from_secs(1); let e = TcpStream::connect_timeout(&addr, timeout).unwrap_err(); assert!(e.kind() == io::ErrorKind::ConnectionRefused || e.kind() == io::ErrorKind::TimedOut || e.kind() == io::ErrorKind::Other, "bad error: {} {:?}", e, e.kind()); } #[test] fn connect_timeout_valid() { let listener = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = listener.local_addr().unwrap(); TcpStream::connect_timeout(&addr, Duration::from_secs(2)).unwrap(); } }
{ f(next_test_ip4()); f(next_test_ip6()); }
get_health_responses.go
// Code generated by go-swagger; DO NOT EDIT. package operations // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "net/http" "github.com/go-openapi/runtime" ) // GetHealthOKCode is the HTTP code returned for type GetHealthOK const GetHealthOKCode int = 200 /*GetHealthOK Service is healthy swagger:response getHealthOK */ type GetHealthOK struct { } // NewGetHealthOK creates GetHealthOK with default headers values func NewGetHealthOK() *GetHealthOK
// WriteResponse to the client func (o *GetHealthOK) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) { rw.Header().Del(runtime.HeaderContentType) //Remove Content-Type on empty responses rw.WriteHeader(200) } // GetHealthInternalServerErrorCode is the HTTP code returned for type GetHealthInternalServerError const GetHealthInternalServerErrorCode int = 500 /*GetHealthInternalServerError Service is not healty swagger:response getHealthInternalServerError */ type GetHealthInternalServerError struct { } // NewGetHealthInternalServerError creates GetHealthInternalServerError with default headers values func NewGetHealthInternalServerError() *GetHealthInternalServerError { return &GetHealthInternalServerError{} } // WriteResponse to the client func (o *GetHealthInternalServerError) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) { rw.Header().Del(runtime.HeaderContentType) //Remove Content-Type on empty responses rw.WriteHeader(500) }
{ return &GetHealthOK{} }
useDebounce.ts
import { debounce } from '../utils' import { useMemo } from 'react' import { useRefValue } from './useRefValue'
) { const fnRef = useRefValue(fn) const debouncedFn = useMemo(() => { return debounce(fnRef.current, delay) }, []) return debouncedFn }
export function useDebounce<T extends (...args: any[]) => any>( fn: T, delay: number,
item-sub-class-mask.js
import r from 'restructure'; import Entity from '../entity'; import LocalizedStringRef from '../localized-string-ref'; export default Entity({
});
classID: r.uint32le, subClassID: r.uint32le, name: LocalizedStringRef,
clusteraddonsconfiguration.go
/* Copyright 2019 The Helm Broker Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by lister-gen. DO NOT EDIT. package v1alpha1 import ( v1alpha1 "github.com/kyma-project/helm-broker/pkg/apis/addons/v1alpha1" "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" "k8s.io/client-go/tools/cache" ) // ClusterAddonsConfigurationLister helps list ClusterAddonsConfigurations. type ClusterAddonsConfigurationLister interface { // List lists all ClusterAddonsConfigurations in the indexer. List(selector labels.Selector) (ret []*v1alpha1.ClusterAddonsConfiguration, err error) // Get retrieves the ClusterAddonsConfiguration from the index for a given name. Get(name string) (*v1alpha1.ClusterAddonsConfiguration, error) ClusterAddonsConfigurationListerExpansion } // clusterAddonsConfigurationLister implements the ClusterAddonsConfigurationLister interface. type clusterAddonsConfigurationLister struct { indexer cache.Indexer } // NewClusterAddonsConfigurationLister returns a new ClusterAddonsConfigurationLister. func NewClusterAddonsConfigurationLister(indexer cache.Indexer) ClusterAddonsConfigurationLister
// List lists all ClusterAddonsConfigurations in the indexer. func (s *clusterAddonsConfigurationLister) List(selector labels.Selector) (ret []*v1alpha1.ClusterAddonsConfiguration, err error) { err = cache.ListAll(s.indexer, selector, func(m interface{}) { ret = append(ret, m.(*v1alpha1.ClusterAddonsConfiguration)) }) return ret, err } // Get retrieves the ClusterAddonsConfiguration from the index for a given name. func (s *clusterAddonsConfigurationLister) Get(name string) (*v1alpha1.ClusterAddonsConfiguration, error) { obj, exists, err := s.indexer.GetByKey(name) if err != nil { return nil, err } if !exists { return nil, errors.NewNotFound(v1alpha1.Resource("clusteraddonsconfiguration"), name) } return obj.(*v1alpha1.ClusterAddonsConfiguration), nil }
{ return &clusterAddonsConfigurationLister{indexer: indexer} }
views.py
from django.shortcuts import render from pmedian.tasks import * from pandas import errors from pmedapp.common.utilities import * import json import pandas as pd from django.views.decorators.csrf import csrf_exempt from django.utils.datastructures import MultiValueDictKeyError import glob import os.path @csrf_exempt def extract_csv(request): """ Getting a (two-column) csv and returning it as a json **Expected a lat/lon csv with headers """ if request.method == 'POST' and request.FILES['myfile']: if not validate_upload(request, '.csv'): return HttpResponseBadRequest("Data error: Please provide a valid csv file") try: # expecting csv with headers df = pd.read_csv(request.FILES['myfile']) if column_numeric(df[df.columns[0]]) and column_numeric(df[df.columns[1]]) and not df.isnull().values.any(): df.columns = ['latitude', 'longitude'] return HttpResponse(df.to_json(orient='records')) else: return HttpResponseBadRequest("Data input error: Ensure data is numeric and no missing values exist") except errors.EmptyDataError: return HttpResponse('CSV file is empty') else: # In case of GET request, just show the form return render(request, 'file_upload.html', locals()) @csrf_exempt def create_task(request): if request.method == 'POST': try: args = json.loads(request.POST.get('data')) # error checking input_df = pd.read_csv(request.FILES['myfile'], header=0) task = p_median_calculation_task.delay(input_df.to_json(), args) response_data = {'task_id': str(task)} return HttpResponse(json.dumps(response_data), content_type="application/json") except MultiValueDictKeyError: return HttpResponseBadRequest("Please provide the correct input data") else: return HttpResponse(status=405, reason="Method not allowed") @csrf_exempt def get_task(request): """ Return the status of a task given it's id """ try: task_id = request.GET['task-id'] result = AsyncResult(task_id) result_dct = {result.task_id: { 'status': result.status, 'date_done': str(result.date_done)}} result_dct[result.task_id]['result'] = result.result try: file = glob.glob("output/*"+str(result)+".json")[0] result_dct['result_location'] = "http://localhost:8000/pmedian/get-file?filename=" + file[7:] except IndexError: result_dct['result_location'] = 'Calculation ongoing' return HttpResponse(json.dumps(result_dct)) except KeyError: return HttpResponseBadRequest("Please provide a valid task-id") @csrf_exempt def get_all_tasks(request): """ Get all celery tasks from and return id, status (json) """ path = "/tmp/results/celery-task-meta-*" results = (glob.glob(path)) result_array = [] for result in results: asyng_result = AsyncResult(result[len(path) - 1:]) result_dct = {} result_dct['id'] = result[len(path) - 1:] result_dct['status'] = asyng_result.status result_dct['date_done'] = str(asyng_result.date_done) try: file = glob.glob("output/*"+str(asyng_result)+".json")[0] result_dct['result'] = "http://localhost:8000/pmedian/get-file?filename=" + file[7:] with open(file) as f: result_dct['name'] = json.load(f)['name'] except IndexError: result_dct['result'] = 'Calculation ongoing' result_array.append(result_dct) return HttpResponse(json.dumps(result_array)) @csrf_exempt def
(request): """ Download output file to disk. """ return download_output_file(request)
get_file
apply_test.go
/* Copyright 2022 The cert-manager Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package orders import ( "encoding/json" "strconv" "sync" "testing" fuzz "github.com/google/gofuzz" "github.com/stretchr/testify/assert" cmacme "github.com/cert-manager/cert-manager/pkg/apis/acme/v1" ) func
(t *testing.T) { const ( expReg = `^{"kind":"Order","apiVersion":"acme.cert-manager.io/v1","metadata":{"name":"foo","namespace":"bar","creationTimestamp":null},"spec":{"request":null,"issuerRef":{"name":""}},"status":{.*}$` expEmpty = `{"kind":"Order","apiVersion":"acme.cert-manager.io/v1","metadata":{"name":"foo","namespace":"bar","creationTimestamp":null},"spec":{"request":null,"issuerRef":{"name":""}},"status":{}}` numJobs = 10000 ) var wg sync.WaitGroup jobs := make(chan int) wg.Add(numJobs) for i := 0; i < 3; i++ { go func() { for j := range jobs { t.Run("fuzz_"+strconv.Itoa(j), func(t *testing.T) { var order cmacme.Order fuzz.New().NilChance(0.5).Fuzz(&order) order.Name = "foo" order.Namespace = "bar" // Test regex with non-empty status. orderData, err := serializeApplyStatus(&order) assert.NoError(t, err) assert.Regexp(t, expReg, string(orderData)) // Test round trip preserves the status. var rtOrder cmacme.Order assert.NoError(t, json.Unmarshal(orderData, &rtOrder)) assert.Equal(t, order.Status, rtOrder.Status) // String match on empty status. order.Status = cmacme.OrderStatus{} orderData, err = serializeApplyStatus(&order) assert.NoError(t, err) assert.Equal(t, expEmpty, string(orderData)) wg.Done() }) } }() } for i := 0; i < numJobs; i++ { jobs <- i } close(jobs) wg.Wait() }
Test_serializeApplyStatus
alpha_ride.rs
//! Alpha Ride text corrector extern crate clipboard_win; use clipboard_win::{set_clipboard}; use ::utils; pub fn handler_clip_text(text: &String) { if !utils::is_jp(text) { return; } const BEGIN: &'static[char] = &['「', '(']; const END: &'static[char] = &['」', ')']; if let (Some(begin_pos), Some(end_pos)) = (text.find(BEGIN), text.find(END)) { let end_pos = end_pos + 3; //+3 to go at the symbol of dialogue end if end_pos == text.len() { return
println!(">>>Action:"); if set_clipboard(&text[begin_pos..end_pos].replace("\n", "")).is_err() { println!("Hmph... failed to update clipboard"); } else { println!("Dialogue is extracted"); } } else if let Some(first_sen_end) = text.rfind('。') { if (first_sen_end+3) != text.len() { return; } let mut sen_end = first_sen_end; let mut sen_start: usize = text[..sen_end].rfind('。').unwrap_or(0); if sen_start == 0 { return; } while let Some(next_sen_start) = text[..sen_start].rfind('。') { if text[sen_start..sen_end] != text[next_sen_start..sen_start] { sen_end = sen_start; break; } sen_end = sen_start; sen_start = next_sen_start; } if text[..sen_start+3].ends_with(&text[sen_start+3..sen_end+3]) { sen_end = sen_start; } else if first_sen_end == sen_end { return; } let text = utils::remove_text_reps(&text[..sen_end+3]); println!(">>>Action:"); if set_clipboard(&text).is_err() { println!("Hmph... failed to update clipboard"); } else { println!("Text is trimmed of repetitions"); } } }
; }
0003_auto_20211227_0338.py
# Generated by Django 3.2.9 on 2021-12-27 03:38 from django.db import migrations, models class Migration(migrations.Migration):
dependencies = [ ('inheritanceapp', '0002_remove_artifact_description_remove_artifact_img_link_and_more'), ] operations = [ migrations.AlterField( model_name='artifact', name='image', field=models.ImageField(upload_to='images/'), ), migrations.AlterField( model_name='artifact', name='imgdesc', field=models.CharField(max_length=44), ), migrations.AlterField( model_name='artifact', name='imgtitle', field=models.CharField(max_length=13), ), ]
controllers.go
package build import ( "fmt" "sync/atomic" "time" "k8s.io/klog" "k8s.io/kubernetes/pkg/api/legacyscheme" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/fields" "k8s.io/apimachinery/pkg/util/wait" watchapi "k8s.io/apimachinery/pkg/watch" "k8s.io/client-go/kubernetes" "k8s.io/client-go/util/retry" buildv1 "github.com/openshift/api/build/v1" imagev1 "github.com/openshift/api/image/v1" buildv1clienttyped "github.com/openshift/client-go/build/clientset/versioned/typed/build/v1" imagev1clienttyped "github.com/openshift/client-go/image/clientset/versioned/typed/image/v1" buildutil "github.com/openshift/origin/pkg/build/util" testutil "github.com/openshift/origin/test/util" ) var ( //TODO: Make these externally configurable // BuildControllerTestWait is the time that RunBuildControllerTest waits // for any other changes to happen when testing whether only a single build got processed BuildControllerTestWait = 10 * time.Second // BuildControllerTestTransitionTimeout is the time RunBuildControllerPodSyncTest waits // for a build trasition to occur after the pod's status has been updated BuildControllerTestTransitionTimeout = 60 * time.Second // BuildControllersWatchTimeout is used by all tests to wait for watch events. In case where only // a single watch event is expected, the test will fail after the timeout. // The value is 6 minutes to allow for a resync to occur, which allows for necessarily // reconciliation to occur in tests where events occur in a non-deterministic order. BuildControllersWatchTimeout = 360 * time.Second ) type testingT interface { Fail() Error(args ...interface{}) Errorf(format string, args ...interface{}) FailNow() Fatal(args ...interface{}) Fatalf(format string, args ...interface{}) Log(args ...interface{}) Logf(format string, args ...interface{}) Failed() bool Parallel() Skip(args ...interface{}) Skipf(format string, args ...interface{}) SkipNow() Skipped() bool } func mockBuild() *buildv1.Build { return &buildv1.Build{ ObjectMeta: metav1.ObjectMeta{ GenerateName: "mock-build", Labels: map[string]string{ "label1": "value1", "label2": "value2", buildutil.BuildConfigLabel: "mock-build-config", buildutil.BuildRunPolicyLabel: string(buildv1.BuildRunPolicyParallel), }, }, Spec: buildv1.BuildSpec{ CommonSpec: buildv1.CommonSpec{ Source: buildv1.BuildSource{ Git: &buildv1.GitBuildSource{ URI: "http://my.docker/build", }, ContextDir: "context", }, Strategy: buildv1.BuildStrategy{ DockerStrategy: &buildv1.DockerBuildStrategy{}, }, Output: buildv1.BuildOutput{ To: &corev1.ObjectReference{ Kind: "DockerImage", Name: "namespace/builtimage", }, }, }, }, } } func RunBuildControllerTest(t testingT, buildClient buildv1clienttyped.BuildsGetter, kClientset kubernetes.Interface, ns string) { // Setup an error channel errChan := make(chan error) // go routines will send a message on this channel if an error occurs. Once this happens the test is over // Create a build b, err := buildClient.Builds(ns).Create(mockBuild()) if err != nil { t.Fatal(err) } // Start watching builds for New -> Pending transition buildWatch, err := buildClient.Builds(ns).Watch(metav1.ListOptions{FieldSelector: fields.OneTermEqualSelector("metadata.name", b.Name).String(), ResourceVersion: b.ResourceVersion}) if err != nil { t.Fatal(err) } defer buildWatch.Stop() buildModifiedCount := int32(0) go func() { for e := range buildWatch.ResultChan() { if e.Type != watchapi.Modified { errChan <- fmt.Errorf("received an unexpected event of type: %s with object: %#v", e.Type, e.Object) } build, ok := e.Object.(*buildv1.Build) if !ok { errChan <- fmt.Errorf("received something other than build: %#v", e.Object) break } // If unexpected status, throw error if build.Status.Phase != buildv1.BuildPhasePending && build.Status.Phase != buildv1.BuildPhaseNew { errChan <- fmt.Errorf("received unexpected build status: %s", build.Status.Phase) break } atomic.AddInt32(&buildModifiedCount, 1) } }() // Watch build pods as they are created podWatch, err := kClientset.CoreV1().Pods(ns).Watch(metav1.ListOptions{FieldSelector: fields.OneTermEqualSelector("metadata.name", buildutil.GetBuildPodName(b)).String()}) if err != nil { t.Fatal(err) } defer podWatch.Stop() podAddedCount := int32(0) go func() { for e := range podWatch.ResultChan() { // Look for creation events if e.Type == watchapi.Added { atomic.AddInt32(&podAddedCount, 1) } } }() select { case err := <-errChan: t.Errorf("Error: %v", err) case <-time.After(BuildControllerTestWait): if atomic.LoadInt32(&buildModifiedCount) < 1 { t.Errorf("The build was modified an unexpected number of times. Got: %d, Expected: >= 1", buildModifiedCount) } if atomic.LoadInt32(&podAddedCount) != 1 { t.Errorf("The build pod was created an unexpected number of times. Got: %d, Expected: 1", podAddedCount) } } } type buildControllerPodState struct { PodPhase corev1.PodPhase BuildPhase buildv1.BuildPhase } type buildControllerPodTest struct { Name string States []buildControllerPodState } func RunBuildControllerPodSyncTest(t testingT, buildClient buildv1clienttyped.BuildsGetter, kClient kubernetes.Interface, ns string) { tests := []buildControllerPodTest{ { Name: "running state test", States: []buildControllerPodState{ { PodPhase: corev1.PodRunning, BuildPhase: buildv1.BuildPhaseRunning, }, }, }, { Name: "build succeeded", States: []buildControllerPodState{ { PodPhase: corev1.PodRunning, BuildPhase: buildv1.BuildPhaseRunning, }, { PodPhase: corev1.PodSucceeded, BuildPhase: buildv1.BuildPhaseComplete, }, }, }, { Name: "build failed", States: []buildControllerPodState{ { PodPhase: corev1.PodRunning, BuildPhase: buildv1.BuildPhaseRunning, }, { PodPhase: corev1.PodFailed, BuildPhase: buildv1.BuildPhaseFailed, }, }, }, } for _, test := range tests { // Setup communications channels podReadyChan := make(chan *corev1.Pod) // Will receive a value when a build pod is ready errChan := make(chan error) // Will receive a value when an error occurs // Create a build b, err := buildClient.Builds(ns).Create(mockBuild()) if err != nil { t.Fatal(err) } // Watch build pod for transition to pending podWatch, err := kClient.CoreV1().Pods(ns).Watch(metav1.ListOptions{FieldSelector: fields.OneTermEqualSelector("metadata.name", buildutil.GetBuildPodName(b)).String()}) if err != nil { t.Fatal(err) } go func() { for e := range podWatch.ResultChan() { pod, ok := e.Object.(*corev1.Pod) if !ok { t.Fatalf("%s: unexpected object received: %#v\n", test.Name, e.Object) } klog.Infof("pod watch event received for pod %s/%s: %v, pod phase: %v", pod.Namespace, pod.Name, e.Type, pod.Status.Phase) if pod.Status.Phase == corev1.PodPending { podReadyChan <- pod break } } }() var pod *corev1.Pod select { case pod = <-podReadyChan: if pod.Status.Phase != corev1.PodPending { t.Errorf("Got wrong pod phase: %s", pod.Status.Phase) podWatch.Stop() continue } case <-time.After(BuildControllersWatchTimeout): t.Errorf("Timed out waiting for build pod to be ready") podWatch.Stop() continue } podWatch.Stop() for _, state := range test.States { if err := retry.RetryOnConflict(retry.DefaultRetry, func() error { // Update pod state and verify that corresponding build state happens accordingly pod, err := kClient.CoreV1().Pods(ns).Get(pod.Name, metav1.GetOptions{}) if err != nil { return err } if pod.Status.Phase == state.PodPhase { return fmt.Errorf("another client altered the pod phase to %s: %#v", state.PodPhase, pod) } pod.Status.Phase = state.PodPhase if pod.Status.Phase == corev1.PodSucceeded { pod.Status.ContainerStatuses = []corev1.ContainerStatus{ { Name: "container", State: corev1.ContainerState{ Terminated: &corev1.ContainerStateTerminated{ ExitCode: 0, }, }, }, } } _, err = kClient.CoreV1().Pods(ns).UpdateStatus(pod) return err }); err != nil { t.Fatal(err) } shouldContinue := func() bool { buildWatch, err := buildClient.Builds(ns).Watch(metav1.ListOptions{FieldSelector: fields.OneTermEqualSelector("metadata.name", b.Name).String(), ResourceVersion: b.ResourceVersion}) if err != nil { t.Fatal(err) } defer buildWatch.Stop() stateReached := make(chan struct{}) go func() { done := false for e := range buildWatch.ResultChan() { var ok bool b, ok = e.Object.(*buildv1.Build) if !ok { errChan <- fmt.Errorf("unexpected object received: %#v", e.Object) return } klog.Infof("build watch event received for build %s/%s: %v, build phase: %v", b.Namespace, b.Name, e.Type, b.Status.Phase) if e.Type != watchapi.Modified { errChan <- fmt.Errorf("unexpected event received: %s, object: %#v", e.Type, e.Object) return } if done && b.Status.Phase != state.BuildPhase { errChan <- fmt.Errorf("build %s/%s transitioned to new state (%s) after reaching desired state", b.Namespace, b.Name, b.Status.Phase) return } if b.Status.Phase == state.BuildPhase { done = true stateReached <- struct{}{} } } }() select { case err := <-errChan: t.Errorf("%s: Error %v", test.Name, err) return false case <-time.After(BuildControllerTestTransitionTimeout): t.Errorf("%s: Timed out waiting for build %s/%s to reach state %s. Current state: %s", test.Name, b.Namespace, b.Name, state.BuildPhase, b.Status.Phase) return false case <-stateReached: klog.Infof("%s: build %s/%s reached desired state of %s", test.Name, b.Namespace, b.Name, state.BuildPhase) } // After state is reached, continue waiting some time to check for unexpected transitions select { case err := <-errChan: t.Errorf("%s: Error %v", test.Name, err) return false case <-time.After(BuildControllerTestWait): // After waiting for a set time, if no other state is reached, continue to wait for next state transition return true } }() if !shouldContinue { break } } } } func waitForWatch(t testingT, name string, w watchapi.Interface) *watchapi.Event { select { case e, ok := <-w.ResultChan(): if !ok { t.Fatalf("Channel closed waiting for watch: %s", name) } return &e case <-time.After(BuildControllersWatchTimeout): t.Fatalf("Timed out waiting for watch: %s", name) return nil } } func RunImageChangeTriggerTest(t testingT, clusterAdminBuildClient buildv1clienttyped.BuildV1Interface, clusterAdminImageClient imagev1clienttyped.ImageV1Interface, ns string) { const ( tag = "latest" streamName = "test-image-trigger-repo" registryHostname = "registry:8080" ) testutil.SetAdditionalAllowedRegistries(registryHostname) imageStream := mockImageStream2(registryHostname, tag) imageStreamMapping := mockImageStreamMapping(imageStream.Name, "someimage", tag, registryHostname+"/openshift/test-image-trigger:"+tag) config := imageChangeBuildConfig(ns, "sti-imagestreamtag", stiStrategy("ImageStreamTag", streamName+":"+tag)) _, err := clusterAdminBuildClient.BuildConfigs(ns).Create(config) if err != nil { t.Fatalf("Couldn't create BuildConfig: %v", err) } watch, err := clusterAdminBuildClient.Builds(ns).Watch(metav1.ListOptions{}) if err != nil { t.Fatalf("Couldn't subscribe to Builds %v", err) } defer watch.Stop() watch2, err := clusterAdminBuildClient.BuildConfigs(ns).Watch(metav1.ListOptions{}) if err != nil { t.Fatalf("Couldn't subscribe to BuildConfigs %v", err) } defer watch2.Stop() imageStream, err = clusterAdminImageClient.ImageStreams(ns).Create(imageStream) if err != nil { t.Fatalf("Couldn't create ImageStream: %v", err) } // give the imagechangecontroller's buildconfig cache time to be updated with the buildconfig object // so it doesn't get a miss when looking up the BC while processing the imagestream update event. time.Sleep(10 * time.Second) _, err = clusterAdminImageClient.ImageStreamMappings(ns).Create(imageStreamMapping) if err != nil { t.Fatalf("Couldn't create Image: %v", err) } // wait for initial build event from the creation of the imagerepo with tag latest event := waitForWatch(t, "initial build added", watch) if e, a := watchapi.Added, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } newBuild := event.Object.(*buildv1.Build) strategy := newBuild.Spec.Strategy if strategy.SourceStrategy.From.Name != registryHostname+"/openshift/test-image-trigger:"+tag { i, _ := clusterAdminImageClient.ImageStreams(ns).Get(imageStream.Name, metav1.GetOptions{}) bc, _ := clusterAdminBuildClient.BuildConfigs(ns).Get(config.Name, metav1.GetOptions{}) t.Fatalf("Expected build with base image %s, got %s\n, imagerepo is %v\ntrigger is %s\n", registryHostname+"/openshift/test-image-trigger:"+tag, strategy.SourceStrategy.From.Name, i, bc.Spec.Triggers[0].ImageChange) } // Wait for an update on the specific build that was added watch3, err := clusterAdminBuildClient.Builds(ns).Watch(metav1.ListOptions{FieldSelector: fields.OneTermEqualSelector("metadata.name", newBuild.Name).String(), ResourceVersion: newBuild.ResourceVersion}) defer watch3.Stop() if err != nil { t.Fatalf("Couldn't subscribe to Builds %v", err) } event = waitForWatch(t, "initial build update", watch3) if e, a := watchapi.Modified, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } newBuild = event.Object.(*buildv1.Build) // Make sure the resolution of the build's docker image pushspec didn't mutate the persisted API object if newBuild.Spec.Output.To.Name != "test-image-trigger-repo:outputtag" { t.Fatalf("unexpected build output: %#v %#v", newBuild.Spec.Output.To, newBuild.Spec.Output) } if newBuild.Labels["testlabel"] != "testvalue" { t.Fatalf("Expected build with label %s=%s from build config got %s=%s", "testlabel", "testvalue", "testlabel", newBuild.Labels["testlabel"]) } // wait for build config to be updated timeout := time.After(BuildControllerTestWait) WaitLoop: for { select { case e, ok := <-watch2.ResultChan(): if !ok { t.Fatalf("Channel closed waiting for watch: build config update in WaitLoop") } event = &e continue case <-timeout: break WaitLoop } } updatedConfig := event.Object.(*buildv1.BuildConfig) if err != nil { t.Fatalf("Couldn't get BuildConfig: %v", err) } // the first tag did not have an image id, so the last trigger field is the pull spec if updatedConfig.Spec.Triggers[0].ImageChange.LastTriggeredImageID != registryHostname+"/openshift/test-image-trigger:"+tag { t.Fatalf("Expected imageID equal to pull spec, got %#v", updatedConfig.Spec.Triggers[0].ImageChange) } // clear out the build/buildconfig watches before triggering a new build timeout = time.After(60 * time.Second) WaitLoop2: for { select { case _, ok := <-watch.ResultChan(): if !ok { t.Fatalf("Channel closed waiting for watch: build update in WaitLoop2") } continue case _, ok := <-watch2.ResultChan(): if !ok { t.Fatalf("Channel closed waiting for watch: build config update in WaitLoop2") } continue case <-timeout: break WaitLoop2 } } // trigger a build by posting a new image if _, err := clusterAdminImageClient.ImageStreamMappings(ns).Create(&imagev1.ImageStreamMapping{ ObjectMeta: metav1.ObjectMeta{ Namespace: ns, Name: imageStream.Name, }, Tag: tag, Image: imagev1.Image{ ObjectMeta: metav1.ObjectMeta{ Name: "ref-2-random", }, DockerImageReference: registryHostname + "/openshift/test-image-trigger:ref-2-random", }, }); err != nil { t.Fatalf("unexpected error: %v", err) } event = waitForWatch(t, "second build created", watch) if e, a := watchapi.Added, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } newBuild = event.Object.(*buildv1.Build) strategy = newBuild.Spec.Strategy if strategy.SourceStrategy.From.Name != registryHostname+"/openshift/test-image-trigger:ref-2-random" { i, _ := clusterAdminImageClient.ImageStreams(ns).Get(imageStream.Name, metav1.GetOptions{}) bc, _ := clusterAdminBuildClient.BuildConfigs(ns).Get(config.Name, metav1.GetOptions{}) t.Fatalf("Expected build with base image %s, got %s\n, imagerepo is %v\trigger is %s\n", registryHostname+"/openshift/test-image-trigger:ref-2-random", strategy.SourceStrategy.From.Name, i, bc.Spec.Triggers[3].ImageChange) } // Listen to events on specific build watch4, err := clusterAdminBuildClient.Builds(ns).Watch(metav1.ListOptions{FieldSelector: fields.OneTermEqualSelector("metadata.name", newBuild.Name).String(), ResourceVersion: newBuild.ResourceVersion}) defer watch4.Stop() event = waitForWatch(t, "update on second build", watch4) if e, a := watchapi.Modified, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } newBuild = event.Object.(*buildv1.Build) // Make sure the resolution of the build's docker image pushspec didn't mutate the persisted API object if newBuild.Spec.Output.To.Name != "test-image-trigger-repo:outputtag" { t.Fatalf("unexpected build output: %#v %#v", newBuild.Spec.Output.To, newBuild.Spec.Output) } if newBuild.Labels["testlabel"] != "testvalue" { t.Fatalf("Expected build with label %s=%s from build config got %s=%s", "testlabel", "testvalue", "testlabel", newBuild.Labels["testlabel"]) } timeout = time.After(BuildControllerTestWait) WaitLoop3: for { select { case e, ok := <-watch2.ResultChan(): if !ok { t.Fatalf("Channel closed waiting for watch: build config update in WaitLoop3") } event = &e continue case <-timeout: break WaitLoop3 } } updatedConfig = event.Object.(*buildv1.BuildConfig) if e, a := registryHostname+"/openshift/test-image-trigger:ref-2-random", updatedConfig.Spec.Triggers[0].ImageChange.LastTriggeredImageID; e != a { t.Errorf("unexpected trigger id: expected %v, got %v", e, a) } } func RunBuildDeleteTest(t testingT, clusterAdminClient buildv1clienttyped.BuildsGetter, clusterAdminKubeClientset kubernetes.Interface, ns string) { buildWatch, err := clusterAdminClient.Builds(ns).Watch(metav1.ListOptions{}) if err != nil { t.Fatalf("Couldn't subscribe to Builds %v", err) } defer buildWatch.Stop() _, err = clusterAdminClient.Builds(ns).Create(mockBuild()) if err != nil { t.Fatalf("Couldn't create Build: %v", err) } podWatch, err := clusterAdminKubeClientset.CoreV1().Pods(ns).Watch(metav1.ListOptions{}) if err != nil { t.Fatalf("Couldn't subscribe to Pods %v", err) } defer podWatch.Stop() // wait for initial build event from the creation of the imagerepo with tag latest event := waitForWatch(t, "initial build added", buildWatch) if e, a := watchapi.Added, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } newBuild := event.Object.(*buildv1.Build) // initial pod creation for build event = waitForWatch(t, "build pod created", podWatch) if e, a := watchapi.Added, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } clusterAdminClient.Builds(ns).Delete(newBuild.Name, nil) event = waitForWatchType(t, "pod deleted due to build deleted", podWatch, watchapi.Deleted) if e, a := watchapi.Deleted, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } pod := event.Object.(*corev1.Pod) if expected := buildutil.GetBuildPodName(newBuild); pod.Name != expected { t.Fatalf("Expected pod %s to be deleted, but pod %s was deleted", expected, pod.Name) } } // waitForWatchType tolerates receiving 3 events before failing while watching for a particular event // type. func waitForWatchType(t testingT, name string, w watchapi.Interface, expect watchapi.EventType) *watchapi.Event { tries := 3 for i := 0; i < tries; i++ { select { case e := <-w.ResultChan(): if e.Type != expect { continue } return &e case <-time.After(BuildControllersWatchTimeout): t.Fatalf("Timed out waiting for watch: %s", name) return nil } } t.Fatalf("Waited for a %v event with %d tries but never received one", expect, tries) return nil } func RunBuildRunningPodDeleteTest(t testingT, clusterAdminClient buildv1clienttyped.BuildsGetter, clusterAdminKubeClientset kubernetes.Interface, ns string) { buildWatch, err := clusterAdminClient.Builds(ns).Watch(metav1.ListOptions{}) if err != nil { t.Fatalf("Couldn't subscribe to Builds %v", err) } defer buildWatch.Stop() _, err = clusterAdminClient.Builds(ns).Create(mockBuild()) if err != nil { t.Fatalf("Couldn't create Build: %v", err) } podWatch, err := clusterAdminKubeClientset.CoreV1().Pods(ns).Watch(metav1.ListOptions{}) if err != nil { t.Fatalf("Couldn't subscribe to Pods %v", err) } defer podWatch.Stop() // wait for initial build event from the creation of the imagerepo with tag latest event := waitForWatch(t, "initial build added", buildWatch) if e, a := watchapi.Added, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } newBuild := event.Object.(*buildv1.Build) buildName := newBuild.Name podName := newBuild.Name + "-build" // initial pod creation for build for { event = waitForWatch(t, "build pod created", podWatch) newPod := event.Object.(*corev1.Pod) if newPod.Name == podName { break } } if e, a := watchapi.Added, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } // throw away events from other builds, we only care about the new build // we just triggered for { event = waitForWatch(t, "build updated to pending", buildWatch) newBuild = event.Object.(*buildv1.Build) if newBuild.Name == buildName { break } } if e, a := watchapi.Modified, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } if newBuild.Status.Phase != buildv1.BuildPhasePending { t.Fatalf("expected build status to be marked pending, but was marked %s", newBuild.Status.Phase) } clusterAdminKubeClientset.CoreV1().Pods(ns).Delete(buildutil.GetBuildPodName(newBuild), metav1.NewDeleteOptions(0)) event = waitForWatch(t, "build updated to error", buildWatch) if e, a := watchapi.Modified, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } newBuild = event.Object.(*buildv1.Build) if newBuild.Status.Phase != buildv1.BuildPhaseError { t.Fatalf("expected build status to be marked error, but was marked %s", newBuild.Status.Phase) } foundFailed := false err = wait.Poll(time.Second, 30*time.Second, func() (bool, error) { events, err := clusterAdminKubeClientset.CoreV1().Events(ns).Search(legacyscheme.Scheme, newBuild) if err != nil { t.Fatalf("error getting build events: %v", err) return false, fmt.Errorf("error getting build events: %v", err) } for _, event := range events.Items { if event.Reason == buildutil.BuildFailedEventReason { foundFailed = true expect := fmt.Sprintf(buildutil.BuildFailedEventMessage, newBuild.Namespace, newBuild.Name) if event.Message != expect { return false, fmt.Errorf("expected failed event message to be %s, got %s", expect, event.Message) } return true, nil } } return false, nil }) if err != nil { t.Fatalf("unexpected: %v", err) return } if !foundFailed { t.Fatalf("expected to find a failed event on the build %s/%s", newBuild.Namespace, newBuild.Name) } } func RunBuildCompletePodDeleteTest(t testingT, clusterAdminClient buildv1clienttyped.BuildsGetter, clusterAdminKubeClientset kubernetes.Interface, ns string) { buildWatch, err := clusterAdminClient.Builds(ns).Watch(metav1.ListOptions{}) if err != nil { t.Fatalf("Couldn't subscribe to Builds %v", err) } defer buildWatch.Stop() _, err = clusterAdminClient.Builds(ns).Create(mockBuild()) if err != nil { t.Fatalf("Couldn't create Build: %v", err) } podWatch, err := clusterAdminKubeClientset.CoreV1().Pods(ns).Watch(metav1.ListOptions{}) if err != nil { t.Fatalf("Couldn't subscribe to Pods %v", err) } defer podWatch.Stop() // wait for initial build event from the creation of the imagerepo with tag latest event := waitForWatch(t, "initial build added", buildWatch) if e, a := watchapi.Added, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } newBuild := event.Object.(*buildv1.Build) // initial pod creation for build event = waitForWatch(t, "build pod created", podWatch) if e, a := watchapi.Added, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } event = waitForWatch(t, "build updated to pending", buildWatch) if e, a := watchapi.Modified, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } newBuild = event.Object.(*buildv1.Build) if newBuild.Status.Phase != buildv1.BuildPhasePending { t.Fatalf("expected build status to be marked pending, but was marked %s", newBuild.Status.Phase) } newBuild.Status.Phase = buildv1.BuildPhaseComplete clusterAdminClient.Builds(ns).Update(newBuild) event = waitForWatch(t, "build updated to complete", buildWatch) if e, a := watchapi.Modified, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } newBuild = event.Object.(*buildv1.Build) if newBuild.Status.Phase != buildv1.BuildPhaseComplete { t.Fatalf("expected build status to be marked complete, but was marked %s", newBuild.Status.Phase) } clusterAdminKubeClientset.CoreV1().Pods(ns).Delete(buildutil.GetBuildPodName(newBuild), metav1.NewDeleteOptions(0)) time.Sleep(10 * time.Second) newBuild, err = clusterAdminClient.Builds(ns).Get(newBuild.Name, metav1.GetOptions{}) if err != nil { t.Fatalf("unexpected error %v", err) } if newBuild.Status.Phase != buildv1.BuildPhaseComplete { t.Fatalf("build status was updated to %s after deleting pod, should have stayed as %s", newBuild.Status.Phase, buildv1.BuildPhaseComplete) } } func RunBuildConfigChangeControllerTest(t testingT, clusterAdminBuildClient buildv1clienttyped.BuildV1Interface, ns string) { config := configChangeBuildConfig(ns) created, err := clusterAdminBuildClient.BuildConfigs(ns).Create(config) if err != nil { t.Fatalf("Couldn't create BuildConfig: %v", err) } watch, err := clusterAdminBuildClient.Builds(ns).Watch(metav1.ListOptions{}) if err != nil { t.Fatalf("Couldn't subscribe to Builds %v", err) } defer watch.Stop() watch2, err := clusterAdminBuildClient.BuildConfigs(ns).Watch(metav1.ListOptions{ResourceVersion: created.ResourceVersion}) if err != nil { t.Fatalf("Couldn't subscribe to BuildConfigs %v", err) } defer watch2.Stop() // wait for initial build event event := waitForWatch(t, "config change initial build added", watch) if e, a := watchapi.Added, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } event = waitForWatch(t, "config change config updated", watch2) if e, a := watchapi.Modified, event.Type; e != a { t.Fatalf("expected watch event type %s, got %s", e, a) } if bc := event.Object.(*buildv1.BuildConfig); bc.Status.LastVersion == 0 { t.Fatalf("expected build config lastversion to be greater than zero after build") } } func configChangeBuildConfig(ns string) *buildv1.BuildConfig { bc := &buildv1.BuildConfig{} bc.Name = "testcfgbc" bc.Namespace = ns bc.Spec.Source.Git = &buildv1.GitBuildSource{} bc.Spec.Source.Git.URI = "git://github.com/openshift/ruby-hello-world.git" bc.Spec.Strategy.DockerStrategy = &buildv1.DockerBuildStrategy{} configChangeTrigger := buildv1.BuildTriggerPolicy{Type: buildv1.ConfigChangeBuildTriggerType} bc.Spec.Triggers = append(bc.Spec.Triggers, configChangeTrigger) return bc } func mockImageStream2(registryHostname, tag string) *imagev1.ImageStream { return &imagev1.ImageStream{ ObjectMeta: metav1.ObjectMeta{Name: "test-image-trigger-repo"}, Spec: imagev1.ImageStreamSpec{ DockerImageRepository: registryHostname + "/openshift/test-image-trigger", Tags: []imagev1.TagReference{ { Name: tag, From: &corev1.ObjectReference{ Kind: "DockerImage", Name: registryHostname + "/openshift/test-image-trigger:" + tag, }, }, }, }, } } func
(stream, image, tag, reference string) *imagev1.ImageStreamMapping { // create a mapping to an image that doesn't exist return &imagev1.ImageStreamMapping{ ObjectMeta: metav1.ObjectMeta{Name: stream}, Tag: tag, Image: imagev1.Image{ ObjectMeta: metav1.ObjectMeta{ Name: image, }, DockerImageReference: reference, }, } } func imageChangeBuildConfig(ns, name string, strategy buildv1.BuildStrategy) *buildv1.BuildConfig { return &buildv1.BuildConfig{ ObjectMeta: metav1.ObjectMeta{ Name: name, Namespace: ns, Labels: map[string]string{"testlabel": "testvalue"}, }, Spec: buildv1.BuildConfigSpec{ RunPolicy: buildv1.BuildRunPolicyParallel, CommonSpec: buildv1.CommonSpec{ Source: buildv1.BuildSource{ Git: &buildv1.GitBuildSource{ URI: "git://github.com/openshift/ruby-hello-world.git", }, ContextDir: "contextimage", }, Strategy: strategy, Output: buildv1.BuildOutput{ To: &corev1.ObjectReference{ Kind: "ImageStreamTag", Name: "test-image-trigger-repo:outputtag", }, }, }, Triggers: []buildv1.BuildTriggerPolicy{ { Type: buildv1.ImageChangeBuildTriggerType, ImageChange: &buildv1.ImageChangeTrigger{}, }, }, }, } } func stiStrategy(kind, name string) buildv1.BuildStrategy { return buildv1.BuildStrategy{ SourceStrategy: &buildv1.SourceBuildStrategy{ From: corev1.ObjectReference{ Kind: kind, Name: name, }, }, } }
mockImageStreamMapping
data.py
# Copyright The PyTorch Lightning team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 from io import BytesIO from pathlib import Path from typing import Any, Dict, List import numpy as np import torch import flash from flash.core.data.io.input import DataKeys, Input, ServeInput from flash.core.data.utilities.paths import filter_valid_files, has_file_allowed_extension, PATH_TYPE from flash.core.data.utilities.samples import to_samples from flash.core.data.utils import image_default_loader from flash.core.utilities.imports import _TORCHVISION_AVAILABLE, Image, requires if _TORCHVISION_AVAILABLE: from torchvision.datasets.folder import IMG_EXTENSIONS from torchvision.transforms.functional import to_pil_image else: IMG_EXTENSIONS = (".jpg", ".jpeg", ".png", ".ppm", ".bmp", ".pgm", ".tif", ".tiff", ".webp") NP_EXTENSIONS = (".npy",) def image_loader(filepath: str): if has_file_allowed_extension(filepath, IMG_EXTENSIONS): img = image_default_loader(filepath) elif has_file_allowed_extension(filepath, NP_EXTENSIONS): img = Image.fromarray(np.load(filepath).astype("uint8"), "RGB") else: raise ValueError( f"File: {filepath} has an unsupported extension. Supported extensions: " f"{list(IMG_EXTENSIONS + NP_EXTENSIONS)}." ) return img class ImageDeserializer(ServeInput): @requires("image") def serve_load_sample(self, data: str) -> Dict: encoded_with_padding = (data + "===").encode("ascii") img = base64.b64decode(encoded_with_padding) buffer = BytesIO(img) img = Image.open(buffer, mode="r") return { DataKeys.INPUT: img, } @property def example_input(self) -> str: with (Path(flash.ASSETS_ROOT) / "fish.jpg").open("rb") as f: return base64.b64encode(f.read()).decode("UTF-8") class ImageInput(Input): @requires("image") def load_sample(self, sample: Dict[str, Any]) -> Dict[str, Any]: w, h = sample[DataKeys.INPUT].size # W x H if DataKeys.METADATA not in sample: sample[DataKeys.METADATA] = {} sample[DataKeys.METADATA]["size"] = (h, w) return sample class ImageFilesInput(ImageInput): def load_data(self, files: List[PATH_TYPE]) -> List[Dict[str, Any]]: files = filter_valid_files(files, valid_extensions=IMG_EXTENSIONS + NP_EXTENSIONS) return to_samples(files) def load_sample(self, sample: Dict[str, Any]) -> Dict[str, Any]: filepath = sample[DataKeys.INPUT] sample[DataKeys.INPUT] = image_loader(filepath) sample = super().load_sample(sample) sample[DataKeys.METADATA]["filepath"] = filepath return sample class ImageTensorInput(ImageInput): def load_data(self, tensor: Any) -> List[Dict[str, Any]]: return to_samples(tensor) def load_sample(self, sample: Dict[str, Any]) -> Dict[str, Any]: img = to_pil_image(sample[DataKeys.INPUT]) sample[DataKeys.INPUT] = img return super().load_sample(sample) class ImageNumpyInput(ImageInput): def load_data(self, array: Any) -> List[Dict[str, Any]]: return to_samples(array) def
(self, sample: Dict[str, Any]) -> Dict[str, Any]: img = to_pil_image(torch.from_numpy(sample[DataKeys.INPUT])) sample[DataKeys.INPUT] = img return super().load_sample(sample)
load_sample
service.go
// Copyright Aeraki Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package model import ( "fmt" "reflect" "sort" "sync" "github.com/aeraki-mesh/aeraki/lazyxds/pkg/utils" corev1 "k8s.io/api/core/v1" ) // Service represent one service cross multi-cluster type Service struct { mu sync.Mutex // todo Name string Namespace string Distribution map[string]*clusterServiceStatus EgressService map[string]struct{} // http service which reported from als NSLazy NSLazyStatus Spec serviceStatus // desired status Status serviceStatus } // serviceStatus is the status of lazyxds service type serviceStatus struct { ClusterIPs []string HTTPPorts map[string]struct{} TCPPorts map[string]struct{} LazyEnabled bool LazySelector map[string]string } // clusterServiceStatus is the service status of one cluster type clusterServiceStatus struct { ClusterIP string HTTPPorts map[string]struct{} TCPPorts map[string]struct{} LazyEnabled bool Selector map[string]string } // NewService creates new Service func NewService(service *corev1.Service) *Service { return &Service{ Name: service.Name, Namespace: service.Namespace, Distribution: make(map[string]*clusterServiceStatus), EgressService: make(map[string]struct{}), } } // ID use FQDN as service id func (svc *Service) ID() string { return utils.FQDN(svc.Name, svc.Namespace) } // NeedReconcileService check if service need reconcile func (svc *Service) NeedReconcileService() bool { return !reflect.DeepEqual(svc.Status.HTTPPorts, svc.Spec.HTTPPorts) || !reflect.DeepEqual(svc.Status.TCPPorts, svc.Spec.TCPPorts) || !reflect.DeepEqual(svc.Status.ClusterIPs, svc.Spec.ClusterIPs) } // FinishReconcileService update status using spec info func (svc *Service) FinishReconcileService() { svc.Status.HTTPPorts = svc.Spec.HTTPPorts svc.Status.TCPPorts = svc.Spec.TCPPorts svc.Status.ClusterIPs = svc.Spec.ClusterIPs } // NeedReconcileLazy check if lazy info is equal in status and spec func (svc *Service) NeedReconcileLazy() bool { return !reflect.DeepEqual(svc.Status.LazyEnabled, svc.Spec.LazyEnabled) || !reflect.DeepEqual(svc.Status.LazySelector, svc.Spec.LazySelector) } // FinishReconcileLazy update lazy status using spec info
svc.Status.LazySelector = svc.Spec.LazySelector } // UpdateClusterService update the service of one cluster func (svc *Service) UpdateClusterService(clusterName string, service *corev1.Service) { svc.mu.Lock() defer svc.mu.Unlock() cs := &clusterServiceStatus{ ClusterIP: service.Spec.ClusterIP, HTTPPorts: make(map[string]struct{}), TCPPorts: make(map[string]struct{}), LazyEnabled: utils.IsLazyEnabled(service.Annotations), Selector: service.Spec.Selector, } // https://github.com/aeraki-mesh/aeraki/issues/83 // if a service without selector, the lazy loading is disabled // we always disable lazy loading feature on a service with ExternalName if service.Spec.Type == corev1.ServiceTypeExternalName { cs.Selector = nil } for _, servicePort := range service.Spec.Ports { if utils.IsHTTP(servicePort) { cs.HTTPPorts[fmt.Sprint(servicePort.Port)] = struct{}{} } else { cs.TCPPorts[fmt.Sprint(servicePort.Port)] = struct{}{} } } svc.Distribution[clusterName] = cs svc.updateSpec() } // UpdateNSLazy update the enabled status of the namespace // If the ns lazy status changed, we need update service spec func (svc *Service) UpdateNSLazy(status NSLazyStatus) { svc.mu.Lock() defer svc.mu.Unlock() if svc.NSLazy != status { svc.NSLazy = status svc.updateSpec() } } // DeleteFromCluster delete the service of one cluster func (svc *Service) DeleteFromCluster(clusterName string) { svc.mu.Lock() defer svc.mu.Unlock() delete(svc.Distribution, clusterName) svc.updateSpec() } func (svc *Service) updateSpec() { spec := serviceStatus{ HTTPPorts: make(map[string]struct{}), TCPPorts: make(map[string]struct{}), LazySelector: make(map[string]string), } ports := make(map[string]bool) clusterIPSet := make(map[string]bool) svcMustDisableLazy := false for _, cs := range svc.Distribution { for p := range cs.HTTPPorts { if old, ok := ports[p]; ok { ports[p] = old // only if the port of all clusters are http, it's http } else { ports[p] = true } } for p := range cs.TCPPorts { ports[p] = false } if len(cs.Selector) == 0 { svcMustDisableLazy = true } if svcMustDisableLazy { spec.LazyEnabled = false } else { if svc.NSLazy == NSLazyStatusDisabled { spec.LazyEnabled = false } else if svc.NSLazy == NSLazyStatusEnabled { spec.LazyEnabled = true } spec.LazyEnabled = spec.LazyEnabled || cs.LazyEnabled } // if cs.LazyEnabled { spec.LazySelector = cs.Selector // random now, need doc this // } ip := cs.ClusterIP if clusterIPSet[ip] { continue } clusterIPSet[ip] = true spec.ClusterIPs = append(spec.ClusterIPs, ip) } sort.Slice(spec.ClusterIPs, func(i, j int) bool { return spec.ClusterIPs[i] > spec.ClusterIPs[j] }) for p, isHTTP := range ports { if isHTTP { spec.HTTPPorts[p] = struct{}{} } else { spec.TCPPorts[p] = struct{}{} } } svc.Spec = spec } // DomainListOfPort return the whole list of domains related with this port func (svc *Service) DomainListOfPort(num, sourceNS string) []string { fqdn := svc.ID() list := []string{ fqdn, fmt.Sprintf("%s:%s", fqdn, num), fmt.Sprintf("%s.%s.%s", svc.Name, svc.Namespace, "svc.cluster"), fmt.Sprintf("%s.%s.%s:%s", svc.Name, svc.Namespace, "svc.cluster", num), fmt.Sprintf("%s.%s.%s", svc.Name, svc.Namespace, "svc"), fmt.Sprintf("%s.%s.%s:%s", svc.Name, svc.Namespace, "svc", num), fmt.Sprintf("%s.%s", svc.Name, svc.Namespace), fmt.Sprintf("%s.%s:%s", svc.Name, svc.Namespace, num), } if svc.Namespace == sourceNS { // in case 2 services with same name are in 2 different ns list = append(list, svc.Name) list = append(list, fmt.Sprintf("%s:%s", svc.Name, num)) } for _, ip := range svc.Spec.ClusterIPs { if ip == "None" { // todo: for headless service, but the sub domain is miss in outbound of egreee, need fix l := len(list) for i := 0; i < l; i++ { list = append(list, fmt.Sprintf("*.%s", list[i])) } } else if ip != "" { list = append(list, ip) list = append(list, fmt.Sprintf("%s:%s", ip, num)) } } return list }
func (svc *Service) FinishReconcileLazy() { svc.Status.LazyEnabled = svc.Spec.LazyEnabled
tests.py
from django.contrib.auth import get_user_model from django.test import TestCase
class UserManagersTests(TestCase): def test_create_user(self): User = get_user_model() user = User.objects.create_user( email="[email protected]", password="testing@123") self.assertEqual(user.email, '[email protected]') self.assertTrue(user.is_active) self.assertFalse(user.is_staff) self.assertFalse(user.is_superuser) try: self.assertIsNotNone(user.username) self.assertIsNotNone(user.email) except AttributeError: pass with self.assertRaises(TypeError): User.objects.create_user() with self.assertRaises(TypeError): User.objects.create_user(email='') with self.assertRaises(ValueError): User.objects.create_user(email='', password="testing@123") def test_create_superuser(self): User = get_user_model() admin = User.objects.create_superuser( email="[email protected]", password="testing@123") self.assertEqual(admin.email, '[email protected]') self.assertTrue(admin.is_active) self.assertTrue(admin.is_staff) self.assertTrue(admin.is_superuser) try: self.assertIsNotNone(admin.username) self.assertIsNotNone(admin.email) except AttributeError: pass with self.assertRaises(ValueError): User.objects.create_user( email='', password="testing@123", is_superuser=False)
# Create your tests here.
X509Req.d.ts
/*************************************************************************** * * * This file was automatically generated with idlc.js * * build info: * * - fibjs : 0.25.0 * * - date : Jun 12 2018 07:22:40 * * * ***************************************************************************/ /** * @author Richard <[email protected]> * */ /// <reference path="object.d.ts" /> /** module Or Internal Object */ /** * @brief x509 证书请求对象 * @detail X509Req 对象属于 crypto 模块,创建:,```JavaScript,var k = new crypto.X509Req();,``` */ declare class Class_X509Req extends Class__obj
/** * class prop * * * @brief 获取证书的主题可分辨名称 * * @readonly * @type String */ subject: string /** * class prop * * * @brief 获取证书的公钥 * * @readonly * @type PKey */ publicKey: Class_PKey /** * * @brief X509Req 构造函数 * * */ constructor(); /** * * @brief X509Req 构造函数,根据给定的信息创建一个证书请求 * * @param subject 证书的主题可分辨名称 * @param key 证书的公钥 * @param hash 证书摘要算法,缺省为 hash.SHA1 * * * */ constructor(subject: string, key: Class_PKey, hash?: number/** = undefined*/); /** * * @brief 加载一个 DER 格式的证书请求 * @param derReq DER 格式的证书请求 * * * */ load(derReq: Class_Buffer): void; /** * * @brief 加载一个 PEM 格式的证书请求 * @param pemReq PEM 格式的证书请求 * * * */ load(pemReq: string): void; /** * * @brief 加载一个 PEM/DER 格式的证书请求,可多次调用 * @param filename 证书请求文件名 * * * */ loadFile(filename: string): void; /** * * @brief 返回当前证书请求的 PEM 格式编码 * @return 当前证书请求的 PEM 格式编码 * * * */ exportPem(): string; /** * * @brief 返回当前证书请求的 DER 格式编码 * @return 当前证书请求的 DER 格式编码 * * * */ exportDer(): Class_Buffer; /** * * @brief 签名当前证书请求为正式证书 * * opts 接收的字段如下: * ```JavaScript * { * ca: false, // 证书为 ca,缺省为 false * pathlen: -1, // 证书深度,缺省为 -1 * notBefore: "", // 证书生效时间,缺省为当前时间 * notAfter: "", // 证书失效时间,缺省为 notBefore 后一年 * usage: "", // 证书使用范围,接收:digitalSignature, nonRepudiation, keyEncipherment, dataEncipherment, keyAgreement, keyCertSign, cRLSign * type: "" // 证书 Netscape 证书类型,接收:client, server, email, objsign, reserved, sslCA, emailCA, objCA * } * ``` * @param issuer 签名机构的可分辨名称 * @param key 签名机构的私钥 * @param opts 其他可选参数 * @return 返回签名后的正式证书 * * * @async */ sign(issuer: string, key: Class_PKey, opts?: object/** = v8::Object::New(isolate)*/): Class_X509Cert; } /** endof class */ /** endof `module Or Internal Object` */
ect {
model.py
from __future__ import annotations from collections.abc import Iterable from reprlib import Repr from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple, Type from uuid import UUID, uuid4 from restio.event import EventListener from restio.fields.base import Field, T_co from restio.shared import ( CURRENT_SESSION, MODEL_INSTANTIATED_EVENT, MODEL_PRE_UPDATE_EVENT, MODEL_TYPE_REGISTRY, MODEL_UPDATE_EVENT, ) from restio.state import ModelState if TYPE_CHECKING: from restio.session import Session def _check_model_type(obj: Optional[BaseModel]): if not isinstance(obj, BaseModel): raise TypeError("The provided object is not of type BaseModel.") class ModelMeta: __slots__ = ("init", "init_ignore_extra", "repr", "fields", "primary_keys", "alias") init: bool init_ignore_extra: bool repr: bool fields: Dict[str, Field] primary_keys: Dict[str, Field] alias: Optional[str] def __init__(self): self.init = True self.init_ignore_extra = True self.repr = True self.fields = dict() self.primary_keys = dict() self.alias = None # Meta attributes that don't get inherited from parent classes __MODEL_META_NOT_INHERITED__ = ("alias",) # Read-only meta attributes, can't be modified by model class __MODEL_META_READONLY__ = ("fields", "primary_keys") class BaseModelMeta(type): __slots__ = () """ BaseModel metaclass. Responsible to internally cache the data schema in a BaseModel subclass by identifying fields and primary keys. """ def __new__(cls, name: str, bases: Tuple[Type, ...], dct: Dict[str, Any]): # internal fields not initialized in BaseModel dct["_internal_id"] = None dct["_hash"] = None dct["_listener"] = None dct["_persistent_values"] = None # prepares metadata for the model type meta = ModelMeta() dct["_meta"] = meta def _update_meta( _meta: Optional[ModelMeta], extend: bool, not_inherited: Tuple[str, ...] = tuple(), ): if not _meta: return propagate_meta = ( set(meta.__slots__) - set(__MODEL_META_READONLY__) - set(not_inherited) ) for meta_attribute in propagate_meta: if not hasattr(_meta, meta_attribute): continue setattr(meta, meta_attribute, getattr(_meta, meta_attribute)) # excluded meta, needs to be propagated manually if extend: meta.fields.update(_meta.fields) meta.primary_keys.update(_meta.primary_keys) base: Type[BaseModel] for base in bases: if not hasattr(base, "_meta"): continue _update_meta(base._meta, True, __MODEL_META_NOT_INHERITED__) _update_meta(dct.get("Meta", None), False) # process class fields for field_name, field_value in dct.items(): if not isinstance(field_value, Field): continue meta.fields[field_name] = field_value if field_value.pk: meta.primary_keys[field_name] = field_value # set alias name to class name when None name_alias = meta.alias or name # validate if the alias is not duplicate # the caveat here is that two classes with the same name in two # different files will have a name collision and fail initializing if name_alias in MODEL_TYPE_REGISTRY: raise ValueError( f"Model alias `{name_alias}` is already used by another class." ) cls_object = super().__new__(cls, name, bases, dct) # set the model alias to the model type if name_alias != "BaseModel": MODEL_TYPE_REGISTRY[name_alias] = cls_object return cls_object def __call__(self, *args, **kwargs): instance: BaseModel = super().__call__(*args, **kwargs) # stores the default after the constructor, if nothing has been set yet # this is implemented here so that this is always called, regardless of the # models with custom constructors calling or not super().__init__() for field in instance._meta.fields.values(): field._store_default(instance, force=False) instance._internal_id = uuid4() instance._hash = hash((instance.__class__, str(instance._internal_id))) instance._persistent_values = {} instance._listener = EventListener() instance._initialized = True session = CURRENT_SESSION.get() if session: session._listener.dispatch(MODEL_INSTANTIATED_EVENT, instance) return instance _repr_obj: Repr = Repr() _repr_obj.maxother = 200 class BaseModel(metaclass=BaseModelMeta): """ A representation of a remote object model. BaseModel is an abstract class that should be extended to represent models incoming from or outgoing to a remote REST API. Models can exist independently from Sessions but contain an internal state that indicates the status of the model within the current context. The Sessions are responsible to control this state. Also, each model contains a set of control attributes that indicate which fields are watched by restio internals. By default, all Field descriptors in the model will become field attributes. Fields declared with pk=True will be used by restio to optimize the caching of the models in a Session. Models that change over time will contain an internal dictionary with the latest know persistent value of each field. This is done to guarantee fast rollback of the values when the Session is invalid, and to also indicate which values might have changed within the session scope. If a field is modified directly, the model will intercept the change and save the older value into the persistent dictionary until `_persist` is called. During a `_rollback` call, however, the stored values are re-assigned to their original attributes. Each attribute change will also dispatch an update event so that the session is aware of changes and manages the model's internal state accordingly. The persistent dictionary (through the helper method `is_field_modified`) can also be used by DAO's to verify which values where updated prior to sending a request through the REST API, thus allowing for proper optimization and minimizing chances of conflicting changes on the remote object. All models automatically generate a random internal UUID when created. This UUID is used internally for comparison purposes, and externally as an identity. Although this attribute is not explicitly set as private, it should never be modified. """ # these are all initialized by the metaclass _meta: ModelMeta __state: ModelState = ModelState.UNBOUND __primary_keys: Optional[Dict[str, Any]] = None _initialized: bool = False _internal_id: UUID _hash: int _persistent_values: Dict[str, Any] _listener: EventListener def __init__(self, **kwargs: T_co): """ Instantiates the model by matching `kwargs` parameters to field names. Behavior is disabled when init=False in the model Meta class. :param kwargs: The dictionary of keyword arguments matching the field names of the model class. :raises ValueError: When invalid arguments are provided. """ meta = self._meta if not meta.init: return for arg_name, value in kwargs.items(): field_object = meta.fields.get(arg_name, None) if not field_object: if not meta.init_ignore_extra: raise ValueError( "Invalid argument provided to constructor of" f" `{self.__class__.__name__}`: {arg_name}" ) continue # pragma: no cover if not field_object.init: if not meta.init_ignore_extra: raise ValueError(f"Attribute `{arg_name}` cannot be initialized.") continue # pragma: no cover field_object.__set__(self, value) @property def _state(self) -> ModelState: """ Returns the state of the current model. :return: The ModelState representation. """ return self.__state @_state.setter def _state(self, state: ModelState): self.__state = state @property def primary_keys(self) -> Dict[str, T_co]: """ Returns a dictionary containing all primary keys. The keys will be ordered in the same order as they are declared in the model type, also following the order in which they appear in class inheritance. This property is optimized to minimize the number of iterations done in the model instance by internalizing a cache with the latest retrieved primary keys. This cache is reset for every modification of a primary key and recovered during the next call to the property. :return: The ordered tuple of values. """ if self.__primary_keys is None: self.__primary_keys = self._load_primary_keys() return self.__primary_keys def _load_primary_keys(self) -> Dict[str, T_co]: """ Returns a dictionary containing the primary key fields (keys) and their current values in the model (values). This operation will inspect the instance and collect all current values on-spot. :return: Dictionary of primary keys values. """ return {key: getattr(self, key) for key in self._meta.primary_keys} def _reset_primary_keys(self): """ Resets the internal cache of primary keys for the instance. """ self.__primary_keys = None def get_children( self, recursive: bool = False, children: Optional[Set[BaseModel]] = None, top_level: Optional[BaseModel] = None, ) -> Set[BaseModel]: """ Returns the list of all children of the current model. This algorithm checks in runtime for all objects refered by the instance and that are part of fields marked with depends_on=True. When `recursive` is True, then the algorithm will recursively search through all children. `children` and `top_level` are control variables that indicate which models have already been inspected by this function, in order to avoid infinite recursion if any circular dependency exists. In most cases, they should be left empty. :param recursive: If True, recursively searches for children. Returns only first degree relationships otherwise. Defaults to False. :param children: List of existing models already inspected. :param top_level: The top-level model from where inspection started. :return: The list of children. """ if children is None: children = set() if top_level: if self == top_level: return children children.add(self) else: top_level = self for value in self.dependency_fields.values(): def check(child: Optional[BaseModel]): # this can happen when the field allows none if not child or child in children: # type: ignore return if recursive: child.get_children(recursive, children, top_level) else: children.add(child) # iterables are only supported if the values are not iterables - there is # no recursiveness if isinstance(value, Iterable): value: Iterable[Any] for item in value: check(item) else: check(value) return children @property def fields(self) -> Dict[str, Any]: """ Returns the values of each field in the model instance. :return: A dict with keys containing the string names of the fields, and values containing the value of the corresponding field. """ return {k: getattr(self, k) for k in self._filter_fields(lambda v: True)} @property def dependency_fields(self) -> Dict[str, Any]: """ Returns the values of each field that have relationship with other models. :return: The dictionary of fields and their values """ return { k: getattr(self, k) for k in self._filter_fields(lambda v: v.depends_on) } def is_field_modified(self, field_name: str) -> bool: """ Indicates of field with name `field_name` has been modified. :param field_name: The name of the field. :raises ValueError: When the field name does not exist. :return: True if field is modified, False otherwise. """ if field_name not in self._meta.fields: raise ValueError( f"Field `{field_name}` does not exist in model" " `{self.__class__.__name__}`." ) return field_name in self._persistent_values def _filter_fields(self, filt: Callable[[Field], bool]): return {k: v for k, v in self._meta.fields.items() if filt(v)} def _rollback(self): """ Restore the persistent values in the model to their original attributes. """ for attr, value in list(self._persistent_values.items()): setattr(self, attr, value) self._persist() def _persist(self): """ Persists the current attribute values by emptying the internal persistent dictionary. Once this is called, it is not possible to rollback to the old values anymore. It is recommended that this method should only be called by the party that persisted the values on the remote server. """ self._persistent_values = {} def _pre_update(self, field: Field[T_co], value: T_co): self._listener.dispatch(MODEL_PRE_UPDATE_EVENT, self, field, value) def _update(self, field: Field[T_co], value: T_co): if field.pk: self._reset_primary_keys() self._listener.dispatch(MODEL_UPDATE_EVENT, self, field, value) def _update_persistent_values(self, field: Field[T_co], value: T_co): name: str = field.name if name in self._persistent_values: if value == self._persistent_values[name]: del self._persistent_values[name] else: mutable_fields = self.fields if value != mutable_fields[name]: self._persistent_values[name] = mutable_fields[name] def __eq__(self, other: BaseModel) -> bool: return isinstance(other, self.__class__) and self._hash == other._hash def __repr__(self) -> str: if not self._meta.repr: return super().__repr__() def
(field: str): value = getattr(self, field) return f"{field}={_repr_obj.repr(value)}" repr_args: List[str] = [ get_field_repr(n) for n in self._filter_fields(lambda x: x.repr) ] return f"{self.__class__.__name__}({', '.join(repr_args)})" def __hash__(self) -> int: return self._hash
get_field_repr
flags.py
# Copyright 2013 # Author: Christopher Van Arsdale # # See common/third_party/google/gflags_python/gflags for info # # Examlpe: # import common.base.flags # import sys # # FLAGS = flags.FLAGS # flags.d.DEFINE_bool('my_bool', false, 'My description') # # def main(argv): # flags.Parse(argv) # ... use FLAGS.my_bool as boolean ... # # if __name__ == "__main__": # main(sys.argv) import common.third_party.google.gflags_python.gflags as gflags # Interface simplification d = gflags FLAGS = gflags.FLAGS def
(argv): try: argv = FLAGS(argv) except gflags.FlagsError, e: print '%s\\nUsage: %s ARGS\\n%s' % (e, sys.argv[0], FLAGS) sys.exit(1)
Parse
test_rmg_simulation.py
import testing from testing import divert_nexus,restore_nexus,clear_all_sims from testing import failed,FailedTest from testing import value_eq,object_eq,text_eq def
(): from rmg import Rmg,generate_rmg #end def test_import def test_minimal_init(): from machines import job from rmg import Rmg,generate_rmg sim = generate_rmg( job = job(machine='ws1',cores=1), ) assert(isinstance(sim,Rmg)) clear_all_sims() #end def test_minimal_init
test_import
marvinInit.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. ''' @Desc: Initializes the marvin and does required prerequisites for starting it. 1. Parses the configuration file passed to marvin and creates a parsed config 2. Initializes the logging required for marvin.All logs are now made available under a single timestamped folder. 3. Deploys the Data Center based upon input ''' from marvin import configGenerator from marvin import cloudstackException from marvin.marvinLog import MarvinLog from marvin.deployDataCenter import deployDataCenters from marvin.codes import( YES, NO, SUCCESS, FAILED ) import sys import time import os import logging import string import random class MarvinInit: def __init__(self, config_file, load_flag, log_folder_path=None): self.__configFile = config_file self.__loadFlag = load_flag self.__parsedConfig = None self.__logFolderPath = log_folder_path self.__tcRunLogger = None self.__testClient = None self.__tcRunDebugFile = None def __parseConfig(self): ''' @Desc : Parses the configuration file passed and assigns the parsed configuration ''' try: self.__parsedConfig = configGenerator.\ getSetupConfig(self.__configFile) return SUCCESS except Exception, e: print "\n Exception Occurred Under __parseConfig : %s" % str(e) return None def getParsedConfig(self): return self.__parsedConfig def
(self): return self.__logFolderPath def getTestClient(self): return self.__testClient def getLogger(self): return self.__tcRunLogger def getDebugFile(self): return self.__tcRunDebugFile def init(self): ''' @Desc :Initializes the marvin by 1. Parsing the configuration and creating a parsed config structure 2. Creates a timestamped log folder and provides all logs to be dumped there 3. Creates the DataCenter based upon configuration provided ''' try: if ((self.__parseConfig() is not None) and (self.__initLogging() is not None) and (self.__deployDC() is not None)): return SUCCESS else: return FAILED except Exception, e: print "\n Exception Occurred Under init %s" % str(e) return FAILED def __initLogging(self): try: ''' @Desc : 1. Initializes the logging for marvin and so provides various log features for automation run. 2. Initializes all logs to be available under given Folder Path,where all test run logs are available for a given run. 3. All logging like exception log,results, run info etc for a given test run are available under a given timestamped folder ''' temp_path = "".join(str(time.time()).split(".")) if self.__logFolderPath is None: log_config = self.__parsedConfig.logger if log_config is not None: if log_config.LogFolderPath is not None: self.logFolderPath = log_config.LogFolderPath + '/' \ + temp_path else: self.logFolderPath = temp_path else: self.logFolderPath = temp_path else: self.logFolderPath = self.__logFolderPath + '/' + temp_path if os.path.exists(self.logFolderPath): self.logFolderPath += ''.join(random.choice( string.ascii_uppercase + string.digits for x in range(3))) os.makedirs(self.logFolderPath) ''' Log File Paths ''' tc_failed_exceptionlog = self.logFolderPath + "/failed_" \ "plus_" \ "exceptions.txt" tc_run_log = self.logFolderPath + "/runinfo.txt" self.__tcRunDebugFile = open(self.logFolderPath + "/results.txt", "w") log_obj = MarvinLog("CSLog") self.__tcRunLogger = log_obj.setLogHandler(tc_run_log) log_obj.setLogHandler(tc_failed_exceptionlog, log_level=logging.FATAL) return SUCCESS except Exception, e: print "\n Exception Occurred Under __initLogging :%s" % str(e) return None def __deployDC(self): try: ''' Deploy the DataCenter and retrieves test client. ''' deploy_obj = deployDataCenters(self.__parsedConfig, self.__tcRunLogger) if self.__loadFlag: deploy_obj.loadCfg() else: deploy_obj.deploy() self.__testClient = deploy_obj.testClient return SUCCESS except Exception, e: print "\n Exception Occurred Under __deployDC : %s" % str(e) return None
getLogFolderPath
index.ts
/** * The MIT License * * Copyright (c) 2011 Heather Arthur <[email protected]> * * Permission is hereby granted, free of charge, to any person obtaining a copy of this * software and associated documentation files (the "Software"), to deal in the Software * without restriction, including without limitation the rights to use, copy, modify, merge, * publish, distribute, sublicense, and/or sell copies of the Software, and to permit * persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies * or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, * INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR * PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE * OR OTHER DEALINGS IN THE SOFTWARE. */ import ColorNames from '../color-names'; const reverseNames = {}; // create a list of reverse color names for (const name in ColorNames) { if (ColorNames.hasOwnProperty(name)) { reverseNames[ColorNames[name]] = name; } } export type ColorValue = [number, number, number, number]; export interface ColorSpec { model: string; value: ColorValue; } export const get = (string): ColorSpec => { const prefix = string.substring(0, 3).toLowerCase(); let val; let model; switch (prefix) { case 'hsl': val = getHsl(string); model = 'hsl'; break; case 'hwb': val = getHwb(string); model = 'hwb'; break; default: val = getRgb(string); model = 'rgb'; break; } if (!val) { return null; } return { model, value: val, }; }; const getRgb = (string): ColorValue => { if (!string) { return null; } const abbr = /^#([a-f0-9]{3,4})$/i; const hex = /^#([a-f0-9]{6})([a-f0-9]{2})?$/i; const rgba = /^rgba?\(\s*([+-]?\d+)\s*,\s*([+-]?\d+)\s*,\s*([+-]?\d+)\s*(?:,\s*([+-]?[\d.]+)\s*)?\)$/; const per = /^rgba?\(\s*([+-]?[\d.]+)%\s*,\s*([+-]?[\d.]+)%\s*,\s*([+-]?[\d.]+)%\s*(?:,\s*([+-]?[\d.]+)\s*)?\)$/; const keyword = /(\D+)/; let rgb = [0, 0, 0, 1]; let match; let i; let hexAlpha; const hexMatch = string.match(hex); const abbrMatch = string.match(abbr); const rgbaMatch = string.match(rgba); const perMatch = string.match(per); const keywordMatch = string.match(keyword); if (hexMatch) { match = hexMatch; hexAlpha = match[2]; match = match[1]; for (i = 0; i < 3; i++) { // https://jsperf.com/slice-vs-substr-vs-substring-methods-long-string/19 const i2 = i * 2; rgb[i] = parseInt(match.slice(i2, i2 + 2), 16); } if (hexAlpha) { rgb[3] = Math.round(parseInt(hexAlpha, 16) / 255 * 100) / 100; } } else if (abbrMatch) {
for (i = 0; i < 3; i++) { rgb[i] = parseInt(match[i] + match[i], 16); } if (hexAlpha) { rgb[3] = Math.round(parseInt(hexAlpha + hexAlpha, 16) / 255 * 100) / 100; } } else if (rgbaMatch) { match = rgbaMatch; for (i = 0; i < 3; i++) { rgb[i] = parseInt(match[i + 1], 0); } if (match[4]) { rgb[3] = parseFloat(match[4]); } } else if (perMatch) { match = perMatch; for (i = 0; i < 3; i++) { rgb[i] = Math.round(parseFloat(match[i + 1]) * 2.55); } if (match[4]) { rgb[3] = parseFloat(match[4]); } } else if (keywordMatch) { match = keywordMatch; if (match[1] === 'transparent') { return [0, 0, 0, 0]; } rgb = ColorNames[match[1]]; if (!rgb) { return null; } rgb[3] = 1; return rgb as ColorValue; } else { return null; } for (i = 0; i < 3; i++) { rgb[i] = clamp(rgb[i], 0, 255); } rgb[3] = clamp(rgb[3], 0, 1); return rgb as ColorValue; }; const getHsl = (string): ColorValue => { if (!string) { return null; } const hsl = /^hsla?\(\s*([+-]?\d*[.]?\d+)(?:deg)?\s*,\s*([+-]?[\d.]+)%\s*,\s*([+-]?[\d.]+)%\s*(?:,\s*([+-]?[\d.]+)\s*)?\)$/; const match = string.match(hsl); if (match) { const alpha = parseFloat(match[4]); const h = (parseFloat(match[1]) % 360 + 360) % 360; const s = clamp(parseFloat(match[2]), 0, 100); const l = clamp(parseFloat(match[3]), 0, 100); const a = clamp(isNaN(alpha) ? 1 : alpha, 0, 1); return [h, s, l, a]; } return null; }; const getHwb = (string): ColorValue => { if (!string) { return null; } const hwb = /^hwb\(\s*([+-]?\d*[.]?\d+)(?:deg)?\s*,\s*([+-]?[\d.]+)%\s*,\s*([+-]?[\d.]+)%\s*(?:,\s*([+-]?[\d.]+)\s*)?\)$/; const match = string.match(hwb); if (match) { const alpha = parseFloat(match[4]); const h = (parseFloat(match[1]) % 360 + 360) % 360; const w = clamp(parseFloat(match[2]), 0, 100); const b = clamp(parseFloat(match[3]), 0, 100); const a = clamp(isNaN(alpha) ? 1 : alpha, 0, 1); return [h, w, b, a]; } return null; }; export const to = (model: string, rgba: ColorValue): string => { switch (model) { case 'hex': return ( '#' + hexDouble(rgba[0]) + hexDouble(rgba[1]) + hexDouble(rgba[2]) + (rgba[3] < 1 ? hexDouble(Math.round(rgba[3] * 255)) : '') ); case 'rgb': return rgba.length < 4 || rgba[3] === 1 ? 'rgb(' + Math.round(rgba[0]) + ', ' + Math.round(rgba[1]) + ', ' + Math.round(rgba[2]) + ')' : 'rgba(' + Math.round(rgba[0]) + ', ' + Math.round(rgba[1]) + ', ' + Math.round(rgba[2]) + ', ' + rgba[3] + ')'; case 'hsl': return rgba.length < 4 || rgba[3] === 1 ? 'hsl(' + rgba[0] + ', ' + rgba[1] + '%, ' + rgba[2] + '%)' : 'hsla(' + rgba[0] + ', ' + rgba[1] + '%, ' + rgba[2] + '%, ' + rgba[3] + ')'; case 'hwb': // hwb is a bit different than rgb(a) & hsl(a) since there is no alpha specific syntax // (hwb have alpha optional & 1 is default value) let a = ''; if (rgba.length >= 4 && rgba[3] !== 1) { a = ', ' + rgba[3]; } return 'hwb(' + rgba[0] + ', ' + rgba[1] + '%, ' + rgba[2] + '%' + a + ')'; } }; const clamp = (num, min, max) => Math.min(Math.max(min, num), max); const hexDouble = (num) => { const str = num.toString(16).toUpperCase(); return str.length < 2 ? '0' + str : str; };
match = abbrMatch; match = match[1]; hexAlpha = match[3];
error.go
package gojq import ( "math/big" "reflect" "strconv" "strings" ) // ValueError is an interface for errors with a value for internal function. // Return an error implementing this interface when you want to catch error // values (not error messages) by try-catch, just like built-in error function. // Refer to WithFunction to add a custom internal function. type ValueError interface { error Value() interface{} } type expectedObjectError struct { v interface{} } func (err *expectedObjectError) Error() string { return "expected an object but got: " + typeErrorPreview(err.v) } type expectedArrayError struct { v interface{} } func (err *expectedArrayError) Error() string { return "expected an array but got: " + typeErrorPreview(err.v) } type iteratorError struct { v interface{} } func (err *iteratorError) Error() string { return "cannot iterate over: " + typeErrorPreview(err.v) } type arrayIndexTooLargeError struct { v interface{} } func (err *arrayIndexTooLargeError) Error() string { return "array index too large: " + previewValue(err.v) } type objectKeyNotStringError struct { v interface{} } func (err *objectKeyNotStringError) Error() string { return "expected a string for object key but got: " + typeErrorPreview(err.v) } type arrayIndexNotNumberError struct { v interface{} } func (err *arrayIndexNotNumberError) Error() string { return "expected a number for indexing an array but got: " + typeErrorPreview(err.v) } type expectedStartEndError struct { v interface{} } func (err *expectedStartEndError) Error() string { return `expected "start" and "end" for slicing but got: ` + typeErrorPreview(err.v) } type inputNotAllowedError struct{} func (*inputNotAllowedError) Error() string { return "input(s)/0 is not allowed" } type funcNotFoundError struct { f *Func } func (err *funcNotFoundError) Error() string { return "function not defined: " + err.f.Name + "/" + strconv.Itoa(len(err.f.Args)) } type funcTypeError struct { name string v interface{} } func (err *funcTypeError) Error() string { return err.name + " cannot be applied to: " + typeErrorPreview(err.v) } type exitCodeError struct { value interface{} code int halt bool } func (err *exitCodeError) Error() string { if s, ok := err.value.(string); ok { return "error: " + s } return "error: " + jsonMarshal(err.value) } func (err *exitCodeError) IsEmptyError() bool { return err.value == nil } func (err *exitCodeError) ExitCode() int { return err.code } func (err *exitCodeError) Value() interface{} { return err.value } type funcContainsError struct { l, r interface{} } func (err *funcContainsError) Error() string { return "cannot check contains(" + previewValue(err.r) + "): " + typeErrorPreview(err.l) } type hasKeyTypeError struct { l, r interface{} } func (err *hasKeyTypeError) Error() string { return "cannot check whether " + typeErrorPreview(err.l) + " has a key: " + typeErrorPreview(err.r) } type unaryTypeError struct { name string v interface{} } func (err *unaryTypeError) Error() string { return "cannot " + err.name + ": " + typeErrorPreview(err.v) } type binopTypeError struct { name string l, r interface{} } func (err *binopTypeError) Error() string { return "cannot " + err.name + ": " + typeErrorPreview(err.l) + " and " + typeErrorPreview(err.r) } type zeroDivisionError struct { l, r interface{} } func (err *zeroDivisionError) Error() string { return "cannot divide " + typeErrorPreview(err.l) + " by: " + typeErrorPreview(err.r) } type zeroModuloError struct { l, r interface{} } func (err *zeroModuloError) Error() string { return "cannot modulo " + typeErrorPreview(err.l) + " by: " + typeErrorPreview(err.r) + "" } type formatNotFoundError struct { n string } func (err *formatNotFoundError) Error() string { return "format not defined: " + err.n } type formatCsvTsvRowError struct { typ string v interface{} } func (err *formatCsvTsvRowError) Error() string { return "invalid " + err.typ + " row: " + typeErrorPreview(err.v) } type formatShError struct { v interface{} } func (err *formatShError) Error() string { return "cannot escape for shell: " + typeErrorPreview(err.v) } type tooManyVariableValuesError struct{} func (err *tooManyVariableValuesError) Error() string { return "too many variable values provided" } type expectedVariableError struct { n string } func (err *expectedVariableError) Error() string { return "variable defined but not bound: " + err.n } type variableNotFoundError struct { n string } func (err *variableNotFoundError) Error() string { return "variable not defined: " + err.n } type variableNameError struct { n string } func (err *variableNameError) Error() string { return "invalid variable name: " + err.n } type breakError struct { n string } func (err *breakError) Error() string { return "label not defined: " + err.n } func (err *breakError) ExitCode() int { return 3 } type tryEndError struct { err error } func (err *tryEndError) Error() string { return err.err.Error() } type invalidPathError struct { v interface{} } func (err *invalidPathError) Error() string { return "invalid path against: " + typeErrorPreview(err.v) } type invalidPathIterError struct { v interface{} } func (err *invalidPathIterError) Error() string { return "invalid path on iterating against: " + typeErrorPreview(err.v) } type getpathError struct { v, path interface{} } func (err *getpathError) Error() string { return "cannot getpath with " + previewValue(err.path) + " against: " + typeErrorPreview(err.v) + "" } type queryParseError struct { typ, fname, contents string err error } func (err *queryParseError) QueryParseError() (string, string, string, error) { return err.typ, err.fname, err.contents, err.err } func (err *queryParseError) Error() string { return "invalid " + err.typ + ": " + err.fname + ": " + err.err.Error() } type jsonParseError struct { fname, contents string err error } func (err *jsonParseError) JSONParseError() (string, string, error) { return err.fname, err.contents, err.err } func (err *jsonParseError) Error() string { return "invalid json: " + err.fname + ": " + err.err.Error() } func typeErrorPreview(v interface{}) string { if _, ok := v.(Iter); ok { return "gojq.Iter" } p := preview(v) if p != "" { p = " (" + p + ")" } return typeof(v) + p } func typeof(v interface{}) (s string) { if v == nil { return "null" } k := reflect.TypeOf(v).Kind() switch k { case reflect.Array, reflect.Slice: return "array" case reflect.Map: return "object" case reflect.Bool: return "boolean" case reflect.Int, reflect.Uint, reflect.Float64: return "number" case reflect.String: return "string" case reflect.Ptr: if _, ok := v.(*big.Int); ok { return "number" } return "ptr" default: return k.String() } } func preview(v interface{}) string { if v == nil { return "" } s := jsonMarshal(v) if l := 30; len(s) > l { var trailing string switch v.(type) { case string: trailing = ` ..."` case []interface{}: trailing = " ...]" case map[string]interface{}: trailing = " ...}" default: trailing = " ..." } var sb strings.Builder sb.Grow(l + 5) for _, c := range s { sb.WriteRune(c) if sb.Len() >= l-len(trailing) { sb.WriteString(trailing) break } } s = sb.String() } return s } func previewValue(v interface{}) string { if v == nil
return preview(v) }
{ return "null" }
mod.rs
//! This is a prototype for [PR #21], the RFC introducing the `Kerl` and `CurlP` hash functions //! implemented in terms of a common `Sponge` trait. //! //! The main focus of this prototype are the [`Sponge`] trait, and the [`CurlP`], and [`Kerl`] //! types. These are cryptographic hash functions that are sponge constructions implemented in //! terms of the trait. //! //! [PR #21]: https://github.com/iotaledger/bee-rfcs/pull/21 use std::convert::TryFrom; use std::default::Default; /// The length of a hash as returned by the hash functions implemented in this RFC (in /// units of binary-coded, balanced trits). const HASH_LEN: usize = 243; /// The length internal state of the `CurlP` sponge construction (in units of binary-coded, /// balanced trits). const CURLP_STATE_LEN: usize = HASH_LEN * 3; const CURLP_HALF_STATE_LEN: usize = CURLP_STATE_LEN / 2; const TRUTH_TABLE: [i8; 11] = [1, 0, -1, 2, 1, -1, 0, 2, -1, 1, 0]; /// An owned, mutable #[derive(Clone, Debug)] pub struct TritsBuf(Vec<i8>); pub enum ValidTrits { MinusOne, PlusOne, Zero, } impl From<ValidTrits> for i8 { fn from(v: ValidTrits) -> Self { use ValidTrits::*; match v { MinusOne => -1, PlusOne => 1, Zero => 0, } } } impl TritsBuf { /// Create a new `TritsBuf` with a number of `capacity` elements, all /// initialized to 0; pub fn with_capacity(capacity: usize) -> Self { Self(vec![0; capacity]) } /// Return a read-only view of the buffer in form of a `Trits`. pub fn as_trits(&self) -> Trits<'_> { Trits(&self.0) } /// Return a read-write view of the buffer in form of a `TritsMut`. pub fn as_trits_mut(&mut self) -> TritsMut<'_> { TritsMut(&mut self.0) } pub fn fill(&mut self, v: ValidTrits) { let v = v.into(); self.0.iter_mut().for_each(|x| *x = v); } /// Create a `Trits` from a `&[i8]` slice without verifying that its bytes are /// correctly binary-coded balanced trits (-1, 0, and +1). /// /// This function is intended to be used in hot loops and relies on the user making sure that /// the bytes are set correctly. /// /// **NOTE:** Use the `TryFrom` trait if you want to check that the slice encodes trits /// correctly before creating `Trits`. /// /// **WARNING:** If used incorrectly (that is, if the bytes are not correctly encoding trits), the /// usage of `Trits` might lead to unexpected behaviour. pub fn from_i8_unchecked(v: &[i8]) -> Self { Self(v.to_owned()) } /// Create a `Trits` from a `&[u8]` slice without verifying that its bytes are /// correctly binary-coded balanced trits (-1, 0, and +1 transmuted to unsigned bytes). /// /// This function is intended to be used in hot loops and relies on the user making sure that /// the bytes are set correctly. /// /// **NOTE:** Use the `TryFrom` trait if you want to check that the slice encodes trits /// correctly before creating `Trits`. /// /// **WARNING:** If used incorrectly (that is, if the bytes are not correctly encoding trits), the /// usage of `Trits` might lead to unexpected behaviour. pub fn from_u8_unchecked(v: &[u8]) -> Self { Self::from_i8_unchecked(unsafe { &*(v as *const _ as *const [i8]) }) } } pub struct Trits<'a>(&'a [i8]); pub struct TritsMut<'a>(&'a mut [i8]); pub struct FromU8Error; pub struct FromI8Error; /// Similar impls for `TritsMut` and `TritsBuf` impl<'a> Trits<'a> { pub fn len(&self) -> usize { self.0.len() } /// Create a `Trits` from a `&[i8]` slice without verifying that its bytes are /// correctly binary-coded balanced trits (-1, 0, and +1). /// /// This function is intended to be used in hot loops and relies on the user making sure that /// the bytes are set correctly. /// /// **NOTE:** Use the `TryFrom` trait if you want to check that the slice encodes trits /// correctly before creating `Trits`. /// /// **WARNING:** If used incorrectly (that is, if the bytes are not correctly encoding trits), the /// usage of `Trits` might lead to unexpected behaviour. pub fn from_i8_unchecked(v: &'a [i8]) -> Self { Self(v) } /// Create a `Trits` from a `&[u8]` slice without verifying that its bytes are /// correctly binary-coded balanced trits (-1, 0, and +1 transmuted to unsigned bytes). /// /// This function is intended to be used in hot loops and relies on the user making sure that /// the bytes are set correctly. /// /// **NOTE:** Use the `TryFrom` trait if you want to check that the slice encodes trits /// correctly before creating `Trits`. /// /// **WARNING:** If used incorrectly (that is, if the bytes are not correctly encoding trits), the /// usage of `Trits` might lead to unexpected behaviour. pub fn from_u8_unchecked(v: &[u8]) -> Self { Self::from_i8_unchecked(unsafe { &*(v as *const _ as *const [i8]) }) } } impl<'a> TryFrom<&'a [u8]> for Trits<'a> { type Error = FromU8Error; fn try_from(v: &[u8]) -> Result<Self, Self::Error> { for byte in v { match byte { 0b0000_0000 | 0b1111_1111 | 0b0000_0001 => {} _ => Err(FromU8Error)?, } } Ok(Self::from_u8_unchecked(v)) } } impl<'a> TryFrom<&'a [i8]> for Trits<'a> { type Error = FromI8Error; fn try_from(v: &'a [i8]) -> Result<Self, Self::Error> { for byte in v { match byte { 0 | -1 | 1 => {} _ => Err(FromI8Error)?, } } Ok(Self::from_i8_unchecked(v)) } } impl<'a> TritsMut<'a> { pub fn len(&self) -> usize { self.0.len() } pub fn from_i8_unchecked(v: &'a mut [i8]) -> Self { Self(v) } pub fn from_u8_unchecked(v: &mut [u8]) -> Self { Self::from_i8_unchecked(unsafe { &mut *(v as *mut _ as *mut [i8]) }) } } impl<'a> TryFrom<&'a mut [i8]> for TritsMut<'a> { type Error = FromI8Error; fn try_from(v: &'a mut [i8]) -> Result<Self, Self::Error> { for byte in v.iter() { match byte { 0 | -1 | 1 => {} _ => Err(FromI8Error)?, } } Ok(Self::from_i8_unchecked(v)) } } impl<'a> TryFrom<&'a mut [u8]> for TritsMut<'a> { type Error = FromU8Error; fn try_from(v: &mut [u8]) -> Result<Self, Self::Error> { for byte in v.iter() { match byte { 0b0000_0000 | 0b1111_1111 | 0b0000_0001 => {} _ => Err(FromU8Error)?, } } Ok(Self::from_u8_unchecked(v)) } } /// The common interface of cryptographic hash functions that follow the sponge construction, /// and that absorb and return binary-coded, balanced ternary. trait Sponge { const HASH_LEN: usize; /// Absorb `input` into the sponge. fn absorb(&mut self, input: &Trits); /// Reset the inner state of the sponge. fn reset(&mut self); /// Squeeze the sponge into a buffer fn squeeze_into(&mut self, buf: &mut TritsMut); /// Convenience function using `Sponge::squeeze_into` to to return an owned /// version of the hash. fn squeeze(&mut self) -> TritsBuf { let mut output = TritsBuf::with_capacity(Self::HASH_LEN); self.squeeze_into(&mut output.as_trits_mut()); output } /// Convenience function to absorb `input`, squeeze the sponge into a /// buffer, and reset the sponge in one go. fn digest_into(&mut self, input: &Trits, buf: &mut TritsMut) { self.absorb(input); self.squeeze_into(buf); self.reset(); } /// Convenience function to absorb `input`, squeeze the sponge, and reset the sponge in one go. /// Returns an owned versin of the hash. fn digest(&mut self, input: &Trits) -> TritsBuf { self.absorb(input); let output = self.squeeze(); self.reset(); output } } pub struct CurlP { /// The number of rounds of hashing to apply before a hash is squeezed. rounds: usize, /// The internal state. state: TritsBuf, /// Workspace for performing transformations work_state: TritsBuf, } impl CurlP { /// Create a new `CurlP` sponge with `rounds` of iterations. pub fn new(rounds: usize) -> Self { Self {
work_state: TritsBuf::with_capacity(CURLP_STATE_LEN), } } /// Return the number of rounds used in this `CurlP` instacnce. pub fn rounds(&self) -> usize { self.rounds } /// Transforms the internal state of the `CurlP` sponge after the input was copied /// into the internal state. /// /// The essence of this transformation is the application of a so-called substitution box to /// the internal state, which happens `round` number of times. fn transform(&mut self) { fn apply_substitution_box(input: &[i8], output: &mut [i8]) { assert!(input.len() <= CURLP_STATE_LEN); assert!(output.len() <= CURLP_STATE_LEN); output[0] = TRUTH_TABLE[(input[0] + (input[364] << 2) + 5) as usize]; for state_index in 0..CURLP_HALF_STATE_LEN { let rhs_index_a = CURLP_HALF_STATE_LEN - state_index; let rhs_index_b = CURLP_STATE_LEN - state_index - 1; output[2 * state_index + 1] = TRUTH_TABLE[{ (input[rhs_index_a] + input[rhs_index_b] << 2) + 5 } as usize]; let rhs_index_a = 364 - state_index - 1; output[2 * state_index + 2] = TRUTH_TABLE[{ (input[rhs_index_b] + input[rhs_index_a] << 2) + 5 } as usize]; } } let (mut lhs, mut rhs) = (&mut self.state.0, &mut self.work_state.0); for _ in 0..self.rounds { apply_substitution_box(lhs, rhs); std::mem::swap(&mut lhs, &mut rhs); } } } impl Sponge for CurlP { const HASH_LEN: usize = HASH_LEN; /// Absorb `input` into the sponge by copying `HASH_LEN` chunks of it into its internal /// state and transforming the state before moving on to the next chunk. /// /// If `input` is not a multiple of `HASH_LEN` with the last chunk having `n < HASH_LEN` trits, /// the last chunk will be copied to the first `n` slots of the internal state. The remaining /// data in the internal state is then just the result of the last transformation before the /// data was copied, and will be reused for the next transformation. fn absorb(&mut self, input: &Trits) { for chunk in input.0.chunks(Self::HASH_LEN) { self.state.0[0..chunk.len()].copy_from_slice(chunk); self.transform(); } } /// Reset the internal state by overwriting it with zeros. fn reset(&mut self) { self.state.fill(ValidTrits::Zero); } /// Squeeze the sponge by copying the calculated hash into the provided `buf`. This will fill /// the buffer in chunks of `HASH_LEN` at a time. /// /// If the last chunk is smaller than `HASH_LEN`, then only the fraction that fits is written /// into it. fn squeeze_into(&mut self, buf: &mut TritsMut) { for chunk in buf.0.chunks_mut(Self::HASH_LEN) { chunk.copy_from_slice(&self.state.0[0..chunk.len()]); self.transform() } } } /// `CurlP` with a fixed number of 27 rounds. pub struct CurlP27(CurlP); impl CurlP27 { pub fn new() -> Self { Self(CurlP::new(27)) } } impl Default for CurlP27 { fn default() -> Self { CurlP27::new() } } /// `CurlP` with a fixed number of 81 rounds. pub struct CurlP81(CurlP); impl CurlP81 { pub fn new() -> Self { Self(CurlP::new(81)) } } impl Default for CurlP81 { fn default() -> Self { CurlP81::new() } } macro_rules! forward_sponge_impl { ($($t:ty),+) => { $( impl $t { /// Return the number of rounds used in this `CurlP` instacnce. pub fn rounds(&self) -> usize { self.0.rounds } } impl Sponge for $t { const HASH_LEN: usize = 243; fn absorb(&mut self, input: &Trits) { self.0.absorb(input) } fn reset(&mut self) { self.0.reset() } fn squeeze_into(&mut self, buf: &mut TritsMut) { self.0.squeeze_into(buf); } } )+ } } forward_sponge_impl!(CurlP27, CurlP81);
rounds, state: TritsBuf::with_capacity(CURLP_STATE_LEN),
StreamsActions.js
import axios from 'axios'; export const STREAMS_REQUEST = 'STREAMS_REQUEST'; export const STREAMS_SUCCESS = 'STREAMS_SUCCESS'; export const STREAMS_FAILURE = 'STREAMS_FAILURE'; const request = () => { return { type: STREAMS_REQUEST } } const receive = (type, data) => { return { type, data } } export const fetchStreamsData = (url) => { return (dispatch) => {
dispatch(request()); return axios.get(url) .then(res => dispatch(receive(STREAMS_SUCCESS, res.data))) .catch(res => dispatch(receive(STREAMS_FAILURE))) } }
run.go
package cmd import ( "fmt" "time" log "github.com/sirupsen/logrus"
"github.com/donbattery/test-r/model" "github.com/spf13/cobra" ) var timeout int64 // runCmd represents the run command var runCmd = &cobra.Command{ Use: "run", Short: "run runs the test ofc", Run: func(cmd *cobra.Command, args []string) { runTest() }, } func init() { RootCmd.AddCommand(runCmd) runCmd.Flags().Int64VarP(&timeout, "timeout", "T", 10, "Timeout of the test in seconds") } func runTest() { pool := model.NewPool() for i := 0; i < 10; i++ { pool.AddJob(model.NewTask()) } pool.InitJobs() pool.WaitAll() log.Info("All task finished gg bb") rep := model.Report{ Apps: []model.App{ model.App{ Name: "App 1", Duration: time.Second * 111, Builds: []*model.Build{ &model.Build{ Name: "Build 1", Duration: time.Second * 15, Result: 1, }, &model.Build{ Name: "Build 2", Duration: time.Second * 13, Result: 0, }, }, }, }, } fmt.Printf("%#v", rep) // model.JUnitReportXML(&rep, false, os.Stdout) }
main.go
//1.结构体类型的定义 // type Circle struct { // x int // y int // Radius int // } //2.结构体变量的创建 // package main // import ( // "fmt" // "unsafe" // ) // type Circle struct { // x int // y int // Radius int // } // func main() { // var c Circle = Circle{ // x: 100, // y: 100, // Radius: 50, // 注意这里的逗号不能少 // } // fmt.Printf("%+v\n", c) // } // func main() { // var c1 Circle = Circle{ // Radius: 50, // } // var c2 Circle = Circle{} // fmt.Printf("%+v\n", c1) // fmt.Printf("%+v\n", c2) // } // func main() { // var c *Circle = &Circle{100, 100, 50} // fmt.Printf("%+v\n", c) // } // func main() { // var c *Circle = new(Circle) // fmt.Printf("%+v\n", c) // } //最后我们再将三种零值初始化形式放到一起对比观察一下 // var c1 Circle = Circle{} // var c2 Circle // var c3 *Circle = new(Circle) //3.零值结构体和 nil 结构体 // func main() { // var c *Circle = nil // // fmt.Printf("%+v\n", len(c)) // fmt.Println(unsafe.Sizeof(c)) // } //4.结构体的内存大小 // func main() { // var c Circle = Circle{} //= Circle{Radius: 50} // fmt.Println(unsafe.Sizeof(c)) // } //5.结构体的拷贝 // func main() { // var c Circle = Circle{} //= Circle{Radius: 50} // fmt.Println(unsafe.Sizeof(c)) // var c1 Circle = Circle{Radius: 50} // var c2 Circle = c1 // fmt.Printf("%+v\n", c1) // fmt.Printf("%+v\n", c2) // c1.Radius = 100 // fmt.Printf("%+v\n", c1) // fmt.Printf("%+v\n", c2) // var c3 *Circle = &Circle{Radius: 50} // var c4 *Circle = c3 // fmt.Printf("%+v\n", c3) // fmt.Printf("%+v\n", c4) // c3.Radius = 100 // fmt.Printf("%+v\n", c3) // fmt.Printf("%+v\n", c4) // fmt.Println(c1 == c2) // fmt.Println(c3 == c4) // } //6.无处不在的结构体 // 切片头的结构体形式如下,它在 64 位机器上将会占用 24 个字节 // type slice struct { // array unsafe.Pointer // 底层数组的地址 // len int // 长度 // cap int // 容量 // } // 字符串头的结构体形式,它在 64 位机器上将会占用 16 个字节 // type string struct { // array unsafe.Pointer // 底层数组的地址 // len int // } // 字典头的结构体形式 // type hmap struct { // count int // ... // buckets unsafe.Pointer // hash桶地址 // ... // } //7.结构体中的数组和切片 // type ArrayStruct struct { // value [11]int // } // type SliceStruct struct { // value []int // } // func main() { // // var as = ArrayStruct{[...]int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1}} // // var ss = SliceStruct{[]int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 11}} // fmt.Println(unsafe.Sizeof(1), unsafe.Sizeof(1)) // } // package main // import "fmt" // import "unsafe"
// type SliceStruct struct { // value []int // } // func main() { // var as = ArrayStruct{[...]int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 1}} // var ss = SliceStruct{[]int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9}} // fmt.Println(unsafe.Sizeof(as), unsafe.Sizeof(ss)) // } //8.结构体的参数传递 // package main // import "fmt" // type Circle struct { // x int // y int // Radius int // } // func expandByValue(c Circle) { // c.Radius *= 2 // } // func expandByPointer(c *Circle) { // c.Radius *= 2 // } // func main() { // var c = Circle{Radius: 50} // expandByValue(c) // fmt.Println(c) // expandByPointer(&c) // fmt.Println(c) // } //9.结构体方法 //10. //11
// type ArrayStruct struct { // value [11]int // }
vault.go
// Copyright © 2017 NAME HERE <EMAIL ADDRESS> // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package cmd import ( "github.com/spf13/cobra" ) var VaultAddr string // vaultCmd represents the vault command var vaultCmd = &cobra.Command{ Use: "vault", Short: "Set of commands to manage vault", Long: `Hashicorp Vault is used for storing sensitive information account details, configurations etc. This set of commands allows to initialize new vault, change and monitor values in vault`, Run: func(cmd *cobra.Command, args []string ) { }, } func init() { RootCmd.AddCommand(vaultCmd) vaultCmd.PersistentFlags().StringVarP(&VaultAddr,"vaultAddr" ,"", "http://127.0.0.1", "address of vault server")
}
file_series.py
# # Copyright (c) 2020, Neptune Labs Sp. z o.o. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import imghdr import os import pathlib from typing import List, Optional, Iterable from neptune.new.internal.utils import base64_encode from neptune.new.exceptions import FileNotFound, OperationNotSupported from neptune.new.types import File from neptune.new.types.series.file_series import FileSeries as FileSeriesVal from neptune.new.internal.operation import ( ImageValue, LogImages, ClearImageLog, Operation, ) from neptune.new.attributes.series.series import Series from neptune.utils import split_to_chunks Val = FileSeriesVal Data = File class FileSeries(Series[Val, Data]): def _get_log_operations_from_value( self, value: Val, step: Optional[float], timestamp: float ) -> List[Operation]: values = [ LogImages.ValueType( ImageValue( data=self._get_base64_image_content(val), name=value.name, description=value.description, ), step=step, ts=timestamp, ) for val in value.values ] return [LogImages(self._path, chunk) for chunk in split_to_chunks(values, 1)] def _get_clear_operation(self) -> Operation: return ClearImageLog(self._path) def _data_to_value(self, values: Iterable, **kwargs) -> Val: return FileSeriesVal(values, **kwargs) def _is_value_type(self, value) -> bool: return isinstance(value, FileSeriesVal) @staticmethod def _get_base64_image_content(file: File) -> str:
def download(self, destination: Optional[str]): target_dir = self._get_destination(destination) item_count = self._backend.get_image_series_values( self._container_id, self._container_type, self._path, 0, 1 ).totalItemCount for i in range(0, item_count): self._backend.download_file_series_by_index( self._container_id, self._container_type, self._path, i, target_dir ) def download_last(self, destination: Optional[str]): target_dir = self._get_destination(destination) item_count = self._backend.get_image_series_values( self._container_id, self._container_type, self._path, 0, 1 ).totalItemCount if item_count > 0: self._backend.download_file_series_by_index( self._container_id, self._container_type, self._path, item_count - 1, target_dir, ) else: raise ValueError("Unable to download last file - series is empty") def _get_destination(self, destination: Optional[str]): target_dir = destination if destination is None: target_dir = os.path.join("neptune", self._path[-1]) pathlib.Path(os.path.abspath(target_dir)).mkdir(parents=True, exist_ok=True) return target_dir
if file.path is not None: if not os.path.exists(file.path): raise FileNotFound(file.path) with open(file.path, "rb") as image_file: file = File.from_stream(image_file) ext = imghdr.what("", h=file.content) if not ext: raise OperationNotSupported( "FileSeries supports only image files for now. " "Other file types will be implemented in future." ) return base64_encode(file.content)
timing.rs
//! Utilities for working with time. use std::time::{Duration, Instant}; /// Frame timing values. #[derive(Clone, Copy, Debug, PartialEq)] pub struct Time { /// Time elapsed since the last frame in seconds. delta_seconds: f32, /// Time elapsed since the last frame. delta_time: Duration, /// Time elapsed since the last frame in seconds ignoring the time speed multiplier. delta_real_seconds: f32, /// Time elapsed since the last frame ignoring the time speed multiplier. delta_real_time: Duration, /// Rate at which `State::fixed_update` is called in seconds. fixed_seconds: f32, /// Rate at which `State::fixed_update` is called. fixed_time: Duration, /// The total number of frames that have been played in this session. frame_number: u64, ///Time elapsed since game start, ignoring the speed multipler. absolute_real_time: Duration, ///Time elapsed since game start, taking the speed multiplier into account. absolute_time: Duration, ///Time multiplier. Affects returned delta_seconds, delta_time and absolute_time. time_scale: f32, /// Fixed timestep accumulator. fixed_time_accumulator: f32, /// Fixed update interpolation alpha interpolation_alpha: f32, } impl Time { /// Gets the time difference between frames in seconds. /// /// This function should not be used during `fixed_update`s, use `fixed_seconds` instead. pub fn delta_seconds(&self) -> f32 { self.delta_seconds } /// Gets the time difference between frames. /// /// This function should not be used during `fixed_update`s, use `fixed_time` instead. pub fn delta_time(&self) -> Duration { self.delta_time } /// Gets the time difference between frames in seconds ignoring the time speed multiplier. /// /// This function should not be used during `fixed_update`s. pub fn delta_real_seconds(&self) -> f32 { self.delta_real_seconds } /// Gets the time difference between frames ignoring the time speed multiplier. pub fn delta_real_time(&self) -> Duration { self.delta_real_time } /// Gets the fixed time step in seconds. pub fn fixed_seconds(&self) -> f32 { self.fixed_seconds } /// Gets the fixed time step. pub fn fixed_time(&self) -> Duration { self.fixed_time } /// Gets the current frame number. This increments by 1 every frame. There is no frame 0. pub fn frame_number(&self) -> u64 { self.frame_number } /// Gets the time since the start of the game, taking into account the speed multiplier. pub fn absolute_time(&self) -> Duration { self.absolute_time } /// Gets the time since the start of the game as seconds, taking into account the speed multiplier. pub fn absolute_time_seconds(&self) -> f64 { duration_to_secs_f64(self.absolute_time) } /// Gets the time since the start of the game, ignoring the speed multiplier. pub fn absolute_real_time(&self) -> Duration { self.absolute_real_time } /// Gets the time since the start of the game as seconds, ignoring the speed multiplier. pub fn absolute_real_time_seconds(&self) -> f64 { duration_to_secs_f64(self.absolute_real_time) } /// Gets the current time speed multiplier.
pub fn time_scale(&self) -> f32 { self.time_scale } /// Gets the current interpolation alpha factor. pub fn interpolation_alpha(&self) -> f32 { self.interpolation_alpha } /// Sets both `delta_seconds` and `delta_time` based on the seconds given. /// /// This should only be called by the engine. Bad things might happen if you call this in /// your game. pub fn set_delta_seconds(&mut self, secs: f32) { self.delta_seconds = secs * self.time_scale; self.delta_time = secs_to_duration(secs * self.time_scale); self.delta_real_seconds = secs; self.delta_real_time = secs_to_duration(secs); self.absolute_time += self.delta_time; self.absolute_real_time += self.delta_real_time; } /// Sets both `delta_time` and `delta_seconds` based on the duration given. /// /// This should only be called by the engine. Bad things might happen if you call this in /// your game. pub fn set_delta_time(&mut self, time: Duration) { self.delta_seconds = duration_to_secs(time) * self.time_scale; self.delta_time = secs_to_duration(duration_to_secs(time) * self.time_scale); self.delta_real_seconds = duration_to_secs(time); self.delta_real_time = time; self.absolute_time += self.delta_time; self.absolute_real_time += self.delta_real_time; } /// Sets both `fixed_seconds` and `fixed_time` based on the seconds given. pub fn set_fixed_seconds(&mut self, secs: f32) { self.fixed_seconds = secs; self.fixed_time = secs_to_duration(secs); } /// Sets both `fixed_time` and `fixed_seconds` based on the duration given. pub fn set_fixed_time(&mut self, time: Duration) { self.fixed_seconds = duration_to_secs(time); self.fixed_time = time; } /// Increments the current frame number by 1. /// /// This should only be called by the engine. Bad things might happen if you call this in /// your game. pub fn increment_frame_number(&mut self) { self.frame_number += 1; } /// Sets the time multiplier that affects how time values are computed, /// effectively slowing or speeding up your game. /// /// ## Panics /// This will panic if multiplier is NaN, Infinity, or less than 0. pub fn set_time_scale(&mut self, multiplier: f32) { use std::f32::INFINITY; assert!(multiplier >= 0.0); assert!(multiplier != INFINITY); self.time_scale = multiplier; } /// Restarts the internal fixed update accumulator to the desired fixed update delta time. /// /// This should only be called by the engine. Bad things might happen if you call this in /// your game. pub fn start_fixed_update(&mut self) { self.fixed_time_accumulator += self.delta_real_seconds; } /// Checks to see if we should perform another fixed update iteration, and if so, returns true /// and reduces the accumulator. /// /// This should only be called by the engine. Bad things might happen if you call this in /// your game. pub fn step_fixed_update(&mut self) -> bool { if self.fixed_time_accumulator >= self.fixed_seconds { self.fixed_time_accumulator -= self.fixed_seconds; true } else { false } } /// Updates the interpolation alpha factor given the current fixed update rate and accumulator. /// /// This should only be called by the engine. Bad things might happen if you call this in /// your game. pub fn finish_fixed_update(&mut self) { self.interpolation_alpha = self.fixed_time_accumulator / self.fixed_seconds; } } impl Default for Time { fn default() -> Time { Time { delta_seconds: 0.0, delta_time: Duration::from_secs(0), delta_real_seconds: 0.0, delta_real_time: Duration::from_secs(0), fixed_seconds: duration_to_secs(Duration::new(0, 16_666_666)), fixed_time: Duration::new(0, 16_666_666), fixed_time_accumulator: 0.0, frame_number: 0, interpolation_alpha: 0.0, absolute_real_time: Duration::default(), absolute_time: Duration::default(), time_scale: 1.0, } } } /// A stopwatch which accurately measures elapsed time. #[derive(Clone, Debug, Eq, PartialEq)] pub enum Stopwatch { /// Initial state with an elapsed time value of 0 seconds. Waiting, /// Stopwatch has started counting the elapsed time since this `Instant` /// and accumuluated time from previous start/stop cycles `Duration`. Started(Duration, Instant), /// Stopwatch has been stopped and reports the elapsed time `Duration`. Ended(Duration), } impl Default for Stopwatch { fn default() -> Stopwatch { Stopwatch::Waiting } } impl Stopwatch { /// Creates a new stopwatch. pub fn new() -> Stopwatch { Default::default() } /// Retrieves the elapsed time. pub fn elapsed(&self) -> Duration { match *self { Stopwatch::Waiting => Duration::new(0, 0), Stopwatch::Started(dur, start) => dur + start.elapsed(), Stopwatch::Ended(dur) => dur, } } /// Stops, resets, and starts the stopwatch again. pub fn restart(&mut self) { *self = Stopwatch::Started(Duration::new(0, 0), Instant::now()); } /// Starts, or resumes, measuring elapsed time. If the stopwatch has been /// started and stopped before, the new results are compounded onto the /// existing elapsed time value. /// /// Note: Starting an already running stopwatch will do nothing. pub fn start(&mut self) { match *self { Stopwatch::Waiting => self.restart(), Stopwatch::Ended(dur) => { *self = Stopwatch::Started(dur, Instant::now()); } _ => {} } } /// Stops measuring elapsed time. /// /// Note: Stopping a stopwatch that isn't running will do nothing. pub fn stop(&mut self) { if let Stopwatch::Started(dur, start) = *self { *self = Stopwatch::Ended(dur + start.elapsed()); } } /// Clears the current elapsed time value. pub fn reset(&mut self) { *self = Stopwatch::Waiting; } } // Unit tests #[cfg(test)] mod tests { use std::{thread, time::Duration}; use super::Stopwatch; // Timing varies more on macOS CI fn get_uncertainty() -> u32 { 15 } #[test] #[cfg(not(target_os = "macos"))] fn elapsed() { const DURATION: u64 = 1; // in seconds. let mut watch = Stopwatch::new(); let uncertainty = get_uncertainty(); watch.start(); thread::sleep(Duration::from_secs(DURATION)); watch.stop(); // check that elapsed time was DURATION sec +/- UNCERTAINTY% let elapsed = watch.elapsed(); let duration = Duration::new(DURATION, 0); let lower = duration / 100 * (100 - uncertainty); let upper = duration / 100 * (100 + uncertainty); assert!( elapsed < upper && elapsed > lower, "expected {} +- {}% seconds, got {:?}", DURATION, uncertainty, elapsed ); } #[test] #[cfg(not(target_os = "macos"))] fn reset() { let mut watch = Stopwatch::new(); watch.start(); thread::sleep(Duration::from_millis(30)); watch.stop(); watch.reset(); assert_eq!(0, watch.elapsed().subsec_nanos()); } #[test] #[cfg(not(target_os = "macos"))] fn restart() { const DURATION0: u64 = 1000; // in milliseconds. const DURATION: u64 = 500; // in milliseconds. let uncertainty = get_uncertainty(); // in percents. let mut watch = Stopwatch::new(); watch.start(); thread::sleep(Duration::from_millis(DURATION0)); watch.stop(); watch.restart(); thread::sleep(Duration::from_millis(DURATION)); watch.stop(); // check that elapsed time was DURATION sec +/- UNCERTAINTY% let elapsed = watch.elapsed(); let duration = Duration::from_millis(DURATION); let lower = duration / 100 * (100 - uncertainty); let upper = duration / 100 * (100 + uncertainty); assert!( elapsed < upper && elapsed > lower, "expected {} +- {}% seconds, got {:?}", DURATION, uncertainty, elapsed ); } // test that multiple start-stop cycles are cumulative #[test] #[cfg(not(target_os = "macos"))] fn stop_start() { let uncertainty = get_uncertainty(); // in percents. let mut watch = Stopwatch::new(); for _ in 0..3 { watch.start(); thread::sleep(Duration::from_millis(200)); watch.stop(); } // check that elapsed time was DURATION sec +/- UNCERTAINTY% let elapsed = watch.elapsed(); let duration = Duration::from_millis(600); let lower = duration / 100 * (100 - uncertainty); let upper = duration / 100 * (100 + uncertainty); assert!( elapsed < upper && elapsed > lower, "expected {} +-{}% milliseconds, got {:?}", 600, uncertainty, elapsed ); } // Test that fixed_update methods accumulate and return correctly // Test confirms that with a fixed update of 120fps, we run fixed update twice with the timer // Runs at 10 times game speed, which shouldn't affect fixed updates #[test] fn fixed_update_120fps() { use super::Time; let mut time = Time::default(); time.set_fixed_seconds(1.0 / 120.0); time.set_time_scale(10.0); let step = 1.0 / 60.0; let mut fixed_count = 0; for _ in 0..60 { time.set_delta_seconds(step); time.start_fixed_update(); while time.step_fixed_update() { fixed_count += 1; } time.finish_fixed_update(); } assert_eq!(fixed_count, 120); } // Test that fixed_update methods accumulate and return correctly // Test confirms that with a fixed update every 1 second, it runs every 1 second only #[test] fn fixed_update_1sec() { use super::Time; let mut time = Time::default(); time.set_fixed_seconds(1.0); let step = 1.0 / 60.0; let mut fixed_count = 0; for _ in 0..130 { // Run two seconds time.set_delta_seconds(step); time.start_fixed_update(); while time.step_fixed_update() { fixed_count += 1; } time.finish_fixed_update(); } assert_eq!(fixed_count, 2); } } /// Converts a Duration to the time in seconds. pub fn duration_to_secs(duration: Duration) -> f32 { duration.as_secs() as f32 + (duration.subsec_nanos() as f32 / 1.0e9) } /// Converts a Duration to the time in seconds in an f64. pub fn duration_to_secs_f64(duration: Duration) -> f64 { duration.as_secs() as f64 + (f64::from(duration.subsec_nanos()) / 1.0e9) } /// Converts a time in seconds to a duration pub fn secs_to_duration(secs: f32) -> Duration { Duration::new(secs as u64, ((secs % 1.0) * 1.0e9) as u32) } /// Converts a Duration to nanoseconds pub fn duration_to_nanos(duration: Duration) -> u64 { (duration.as_secs() * 1_000_000_000) + u64::from(duration.subsec_nanos()) } /// Converts nanoseconds to a Duration pub fn nanos_to_duration(nanos: u64) -> Duration { Duration::new(nanos / 1_000_000_000, (nanos % 1_000_000_000) as u32) }
serializers.py
from rest_framework import serializers from orchestra.api import router from orchestra.contrib.accounts.models import Account from orchestra.contrib.accounts.serializers import AccountSerializerMixin from .models import Bill, BillLine, BillContact class BillLineSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = BillLine class BillSerializer(AccountSerializerMixin, serializers.HyperlinkedModelSerializer): # lines = BillLineSerializer(source='lines') class Meta: model = Bill fields = ( 'url', 'id', 'number', 'type', 'total', 'is_sent', 'created_on', 'due_on', 'comments', # 'lines' ) class BillContactSerializer(AccountSerializerMixin, serializers.ModelSerializer): class Meta:
router.insert(Account, 'billcontact', BillContactSerializer, required=False)
model = BillContact fields = ('name', 'address', 'city', 'zipcode', 'country', 'vat')
tsserverlibrary.d.ts
/*! ***************************************************************************** Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE, MERCHANTABLITY OR NON-INFRINGEMENT. See the Apache Version 2.0 License for specific language governing permissions and limitations under the License. ***************************************************************************** */ declare namespace ts { const versionMajorMinor = "4.1"; /** The version of the TypeScript compiler release */ const version: string; /** * Type of objects whose values are all of the same type. * The `in` and `for-in` operators can *not* be safely used, * since `Object.prototype` may be modified by outside code. */ interface MapLike<T> { [index: string]: T; } interface SortedReadonlyArray<T> extends ReadonlyArray<T> { " __sortedArrayBrand": any; } interface SortedArray<T> extends Array<T> { " __sortedArrayBrand": any; } /** Common read methods for ES6 Map/Set. */ interface ReadonlyCollection<K> { readonly size: number; has(key: K): boolean; keys(): Iterator<K>; } /** Common write methods for ES6 Map/Set. */ interface Collection<K> extends ReadonlyCollection<K> { delete(key: K): boolean; clear(): void; } /** ES6 Map interface, only read methods included. */ interface ReadonlyESMap<K, V> extends ReadonlyCollection<K> { get(key: K): V | undefined; values(): Iterator<V>; entries(): Iterator<[K, V]>; forEach(action: (value: V, key: K) => void): void; } /** * ES6 Map interface, only read methods included. */ interface ReadonlyMap<T> extends ReadonlyESMap<string, T> { } /** ES6 Map interface. */ interface ESMap<K, V> extends ReadonlyESMap<K, V>, Collection<K> { set(key: K, value: V): this; } /** * ES6 Map interface. */ interface Map<T> extends ESMap<string, T> { } /** ES6 Set interface, only read methods included. */ interface ReadonlySet<T> extends ReadonlyCollection<T> { has(value: T): boolean; values(): Iterator<T>; entries(): Iterator<[T, T]>; forEach(action: (value: T, key: T) => void): void; } /** ES6 Set interface. */ interface Set<T> extends ReadonlySet<T>, Collection<T> { add(value: T): this; delete(value: T): boolean; } /** ES6 Iterator type. */ interface Iterator<T> { next(): { value: T; done?: false; } | { value: never; done: true; }; } /** Array that is only intended to be pushed to, never read. */ interface Push<T> { push(...values: T[]): void; } } declare namespace ts { export type Path = string & { __pathBrand: any; }; export interface TextRange { pos: number; end: number; } export interface ReadonlyTextRange { readonly pos: number; readonly end: number; } export enum SyntaxKind { Unknown = 0, EndOfFileToken = 1, SingleLineCommentTrivia = 2, MultiLineCommentTrivia = 3, NewLineTrivia = 4, WhitespaceTrivia = 5, ShebangTrivia = 6, ConflictMarkerTrivia = 7, NumericLiteral = 8, BigIntLiteral = 9, StringLiteral = 10, JsxText = 11, JsxTextAllWhiteSpaces = 12, RegularExpressionLiteral = 13, NoSubstitutionTemplateLiteral = 14, TemplateHead = 15, TemplateMiddle = 16, TemplateTail = 17, OpenBraceToken = 18, CloseBraceToken = 19, OpenParenToken = 20, CloseParenToken = 21, OpenBracketToken = 22, CloseBracketToken = 23, DotToken = 24, DotDotDotToken = 25, SemicolonToken = 26, CommaToken = 27, QuestionDotToken = 28, LessThanToken = 29, LessThanSlashToken = 30, GreaterThanToken = 31, LessThanEqualsToken = 32, GreaterThanEqualsToken = 33, EqualsEqualsToken = 34, ExclamationEqualsToken = 35, EqualsEqualsEqualsToken = 36, ExclamationEqualsEqualsToken = 37, EqualsGreaterThanToken = 38, PlusToken = 39, MinusToken = 40, AsteriskToken = 41, AsteriskAsteriskToken = 42, SlashToken = 43, PercentToken = 44, PlusPlusToken = 45, MinusMinusToken = 46, LessThanLessThanToken = 47, GreaterThanGreaterThanToken = 48, GreaterThanGreaterThanGreaterThanToken = 49, AmpersandToken = 50, BarToken = 51, CaretToken = 52, ExclamationToken = 53, TildeToken = 54, AmpersandAmpersandToken = 55, BarBarToken = 56, QuestionToken = 57, ColonToken = 58, AtToken = 59, QuestionQuestionToken = 60, /** Only the JSDoc scanner produces BacktickToken. The normal scanner produces NoSubstitutionTemplateLiteral and related kinds. */ BacktickToken = 61, EqualsToken = 62, PlusEqualsToken = 63, MinusEqualsToken = 64, AsteriskEqualsToken = 65, AsteriskAsteriskEqualsToken = 66, SlashEqualsToken = 67, PercentEqualsToken = 68, LessThanLessThanEqualsToken = 69, GreaterThanGreaterThanEqualsToken = 70, GreaterThanGreaterThanGreaterThanEqualsToken = 71, AmpersandEqualsToken = 72, BarEqualsToken = 73, BarBarEqualsToken = 74, AmpersandAmpersandEqualsToken = 75, QuestionQuestionEqualsToken = 76, CaretEqualsToken = 77, Identifier = 78, PrivateIdentifier = 79, BreakKeyword = 80, CaseKeyword = 81, CatchKeyword = 82, ClassKeyword = 83, ConstKeyword = 84, ContinueKeyword = 85, DebuggerKeyword = 86, DefaultKeyword = 87, DeleteKeyword = 88, DoKeyword = 89, ElseKeyword = 90, EnumKeyword = 91, ExportKeyword = 92, ExtendsKeyword = 93, FalseKeyword = 94, FinallyKeyword = 95, ForKeyword = 96, FunctionKeyword = 97, IfKeyword = 98, ImportKeyword = 99, InKeyword = 100, InstanceOfKeyword = 101, NewKeyword = 102, NullKeyword = 103, ReturnKeyword = 104, SuperKeyword = 105, SwitchKeyword = 106, ThisKeyword = 107, ThrowKeyword = 108, TrueKeyword = 109, TryKeyword = 110, TypeOfKeyword = 111, VarKeyword = 112, VoidKeyword = 113, WhileKeyword = 114, WithKeyword = 115, ImplementsKeyword = 116, InterfaceKeyword = 117, LetKeyword = 118, PackageKeyword = 119, PrivateKeyword = 120, ProtectedKeyword = 121, PublicKeyword = 122, StaticKeyword = 123, YieldKeyword = 124, AbstractKeyword = 125, AsKeyword = 126, AssertsKeyword = 127, AnyKeyword = 128, AsyncKeyword = 129, AwaitKeyword = 130, BooleanKeyword = 131, ConstructorKeyword = 132, DeclareKeyword = 133, GetKeyword = 134, InferKeyword = 135, IntrinsicKeyword = 136, IsKeyword = 137, KeyOfKeyword = 138, ModuleKeyword = 139, NamespaceKeyword = 140, NeverKeyword = 141, ReadonlyKeyword = 142, RequireKeyword = 143, NumberKeyword = 144, ObjectKeyword = 145, SetKeyword = 146, StringKeyword = 147, SymbolKeyword = 148, TypeKeyword = 149, UndefinedKeyword = 150, UniqueKeyword = 151, UnknownKeyword = 152, FromKeyword = 153, GlobalKeyword = 154, BigIntKeyword = 155, OfKeyword = 156, QualifiedName = 157, ComputedPropertyName = 158, TypeParameter = 159, Parameter = 160, Decorator = 161, PropertySignature = 162, PropertyDeclaration = 163, MethodSignature = 164, MethodDeclaration = 165, Constructor = 166, GetAccessor = 167, SetAccessor = 168, CallSignature = 169, ConstructSignature = 170, IndexSignature = 171, TypePredicate = 172, TypeReference = 173, FunctionType = 174, ConstructorType = 175, TypeQuery = 176, TypeLiteral = 177, ArrayType = 178, TupleType = 179, OptionalType = 180, RestType = 181, UnionType = 182, IntersectionType = 183, ConditionalType = 184, InferType = 185, ParenthesizedType = 186, ThisType = 187, TypeOperator = 188, IndexedAccessType = 189, MappedType = 190, LiteralType = 191, NamedTupleMember = 192, TemplateLiteralType = 193, TemplateLiteralTypeSpan = 194, ImportType = 195, ObjectBindingPattern = 196, ArrayBindingPattern = 197, BindingElement = 198, ArrayLiteralExpression = 199, ObjectLiteralExpression = 200, PropertyAccessExpression = 201, ElementAccessExpression = 202, CallExpression = 203, NewExpression = 204, TaggedTemplateExpression = 205, TypeAssertionExpression = 206, ParenthesizedExpression = 207, FunctionExpression = 208, ArrowFunction = 209, DeleteExpression = 210, TypeOfExpression = 211, VoidExpression = 212, AwaitExpression = 213, PrefixUnaryExpression = 214, PostfixUnaryExpression = 215, BinaryExpression = 216, ConditionalExpression = 217, TemplateExpression = 218, YieldExpression = 219, SpreadElement = 220, ClassExpression = 221, OmittedExpression = 222, ExpressionWithTypeArguments = 223, AsExpression = 224, NonNullExpression = 225, MetaProperty = 226, SyntheticExpression = 227, TemplateSpan = 228, SemicolonClassElement = 229, Block = 230, EmptyStatement = 231, VariableStatement = 232, ExpressionStatement = 233, IfStatement = 234, DoStatement = 235, WhileStatement = 236, ForStatement = 237, ForInStatement = 238, ForOfStatement = 239, ContinueStatement = 240, BreakStatement = 241, ReturnStatement = 242, WithStatement = 243, SwitchStatement = 244, LabeledStatement = 245, ThrowStatement = 246, TryStatement = 247, DebuggerStatement = 248, VariableDeclaration = 249, VariableDeclarationList = 250, FunctionDeclaration = 251, ClassDeclaration = 252, InterfaceDeclaration = 253, TypeAliasDeclaration = 254, EnumDeclaration = 255, ModuleDeclaration = 256, ModuleBlock = 257, CaseBlock = 258, NamespaceExportDeclaration = 259, ImportEqualsDeclaration = 260, ImportDeclaration = 261, ImportClause = 262, NamespaceImport = 263, NamedImports = 264, ImportSpecifier = 265, ExportAssignment = 266, ExportDeclaration = 267, NamedExports = 268, NamespaceExport = 269, ExportSpecifier = 270, MissingDeclaration = 271, ExternalModuleReference = 272, JsxElement = 273, JsxSelfClosingElement = 274, JsxOpeningElement = 275, JsxClosingElement = 276, JsxFragment = 277, JsxOpeningFragment = 278, JsxClosingFragment = 279, JsxAttribute = 280, JsxAttributes = 281, JsxSpreadAttribute = 282, JsxExpression = 283, CaseClause = 284, DefaultClause = 285, HeritageClause = 286, CatchClause = 287, PropertyAssignment = 288, ShorthandPropertyAssignment = 289, SpreadAssignment = 290, EnumMember = 291, UnparsedPrologue = 292, UnparsedPrepend = 293, UnparsedText = 294, UnparsedInternalText = 295, UnparsedSyntheticReference = 296, SourceFile = 297, Bundle = 298, UnparsedSource = 299, InputFiles = 300, JSDocTypeExpression = 301, JSDocNameReference = 302, JSDocAllType = 303, JSDocUnknownType = 304, JSDocNullableType = 305, JSDocNonNullableType = 306, JSDocOptionalType = 307, JSDocFunctionType = 308, JSDocVariadicType = 309, JSDocNamepathType = 310, JSDocComment = 311, JSDocTypeLiteral = 312, JSDocSignature = 313, JSDocTag = 314, JSDocAugmentsTag = 315, JSDocImplementsTag = 316, JSDocAuthorTag = 317, JSDocDeprecatedTag = 318, JSDocClassTag = 319, JSDocPublicTag = 320, JSDocPrivateTag = 321, JSDocProtectedTag = 322, JSDocReadonlyTag = 323, JSDocCallbackTag = 324, JSDocEnumTag = 325, JSDocParameterTag = 326, JSDocReturnTag = 327, JSDocThisTag = 328, JSDocTypeTag = 329, JSDocTemplateTag = 330, JSDocTypedefTag = 331, JSDocSeeTag = 332, JSDocPropertyTag = 333, SyntaxList = 334, NotEmittedStatement = 335, PartiallyEmittedExpression = 336, CommaListExpression = 337, MergeDeclarationMarker = 338, EndOfDeclarationMarker = 339, SyntheticReferenceExpression = 340, Count = 341, FirstAssignment = 62, LastAssignment = 77, FirstCompoundAssignment = 63, LastCompoundAssignment = 77, FirstReservedWord = 80, LastReservedWord = 115, FirstKeyword = 80, LastKeyword = 156, FirstFutureReservedWord = 116, LastFutureReservedWord = 124, FirstTypeNode = 172, LastTypeNode = 195, FirstPunctuation = 18, LastPunctuation = 77, FirstToken = 0,
FirstLiteralToken = 8, LastLiteralToken = 14, FirstTemplateToken = 14, LastTemplateToken = 17, FirstBinaryOperator = 29, LastBinaryOperator = 77, FirstStatement = 232, LastStatement = 248, FirstNode = 157, FirstJSDocNode = 301, LastJSDocNode = 333, FirstJSDocTagNode = 314, LastJSDocTagNode = 333, } export type TriviaSyntaxKind = SyntaxKind.SingleLineCommentTrivia | SyntaxKind.MultiLineCommentTrivia | SyntaxKind.NewLineTrivia | SyntaxKind.WhitespaceTrivia | SyntaxKind.ShebangTrivia | SyntaxKind.ConflictMarkerTrivia; export type LiteralSyntaxKind = SyntaxKind.NumericLiteral | SyntaxKind.BigIntLiteral | SyntaxKind.StringLiteral | SyntaxKind.JsxText | SyntaxKind.JsxTextAllWhiteSpaces | SyntaxKind.RegularExpressionLiteral | SyntaxKind.NoSubstitutionTemplateLiteral; export type PseudoLiteralSyntaxKind = SyntaxKind.TemplateHead | SyntaxKind.TemplateMiddle | SyntaxKind.TemplateTail; export type PunctuationSyntaxKind = SyntaxKind.OpenBraceToken | SyntaxKind.CloseBraceToken | SyntaxKind.OpenParenToken | SyntaxKind.CloseParenToken | SyntaxKind.OpenBracketToken | SyntaxKind.CloseBracketToken | SyntaxKind.DotToken | SyntaxKind.DotDotDotToken | SyntaxKind.SemicolonToken | SyntaxKind.CommaToken | SyntaxKind.QuestionDotToken | SyntaxKind.LessThanToken | SyntaxKind.LessThanSlashToken | SyntaxKind.GreaterThanToken | SyntaxKind.LessThanEqualsToken | SyntaxKind.GreaterThanEqualsToken | SyntaxKind.EqualsEqualsToken | SyntaxKind.ExclamationEqualsToken | SyntaxKind.EqualsEqualsEqualsToken | SyntaxKind.ExclamationEqualsEqualsToken | SyntaxKind.EqualsGreaterThanToken | SyntaxKind.PlusToken | SyntaxKind.MinusToken | SyntaxKind.AsteriskToken | SyntaxKind.AsteriskAsteriskToken | SyntaxKind.SlashToken | SyntaxKind.PercentToken | SyntaxKind.PlusPlusToken | SyntaxKind.MinusMinusToken | SyntaxKind.LessThanLessThanToken | SyntaxKind.GreaterThanGreaterThanToken | SyntaxKind.GreaterThanGreaterThanGreaterThanToken | SyntaxKind.AmpersandToken | SyntaxKind.BarToken | SyntaxKind.CaretToken | SyntaxKind.ExclamationToken | SyntaxKind.TildeToken | SyntaxKind.AmpersandAmpersandToken | SyntaxKind.BarBarToken | SyntaxKind.QuestionQuestionToken | SyntaxKind.QuestionToken | SyntaxKind.ColonToken | SyntaxKind.AtToken | SyntaxKind.BacktickToken | SyntaxKind.EqualsToken | SyntaxKind.PlusEqualsToken | SyntaxKind.MinusEqualsToken | SyntaxKind.AsteriskEqualsToken | SyntaxKind.AsteriskAsteriskEqualsToken | SyntaxKind.SlashEqualsToken | SyntaxKind.PercentEqualsToken | SyntaxKind.LessThanLessThanEqualsToken | SyntaxKind.GreaterThanGreaterThanEqualsToken | SyntaxKind.GreaterThanGreaterThanGreaterThanEqualsToken | SyntaxKind.AmpersandEqualsToken | SyntaxKind.BarEqualsToken | SyntaxKind.CaretEqualsToken; export type KeywordSyntaxKind = SyntaxKind.AbstractKeyword | SyntaxKind.AnyKeyword | SyntaxKind.AsKeyword | SyntaxKind.AssertsKeyword | SyntaxKind.AsyncKeyword | SyntaxKind.AwaitKeyword | SyntaxKind.BigIntKeyword | SyntaxKind.BooleanKeyword | SyntaxKind.BreakKeyword | SyntaxKind.CaseKeyword | SyntaxKind.CatchKeyword | SyntaxKind.ClassKeyword | SyntaxKind.ConstKeyword | SyntaxKind.ConstructorKeyword | SyntaxKind.ContinueKeyword | SyntaxKind.DebuggerKeyword | SyntaxKind.DeclareKeyword | SyntaxKind.DefaultKeyword | SyntaxKind.DeleteKeyword | SyntaxKind.DoKeyword | SyntaxKind.ElseKeyword | SyntaxKind.EnumKeyword | SyntaxKind.ExportKeyword | SyntaxKind.ExtendsKeyword | SyntaxKind.FalseKeyword | SyntaxKind.FinallyKeyword | SyntaxKind.ForKeyword | SyntaxKind.FromKeyword | SyntaxKind.FunctionKeyword | SyntaxKind.GetKeyword | SyntaxKind.GlobalKeyword | SyntaxKind.IfKeyword | SyntaxKind.ImplementsKeyword | SyntaxKind.ImportKeyword | SyntaxKind.InferKeyword | SyntaxKind.InKeyword | SyntaxKind.InstanceOfKeyword | SyntaxKind.InterfaceKeyword | SyntaxKind.IntrinsicKeyword | SyntaxKind.IsKeyword | SyntaxKind.KeyOfKeyword | SyntaxKind.LetKeyword | SyntaxKind.ModuleKeyword | SyntaxKind.NamespaceKeyword | SyntaxKind.NeverKeyword | SyntaxKind.NewKeyword | SyntaxKind.NullKeyword | SyntaxKind.NumberKeyword | SyntaxKind.ObjectKeyword | SyntaxKind.OfKeyword | SyntaxKind.PackageKeyword | SyntaxKind.PrivateKeyword | SyntaxKind.ProtectedKeyword | SyntaxKind.PublicKeyword | SyntaxKind.ReadonlyKeyword | SyntaxKind.RequireKeyword | SyntaxKind.ReturnKeyword | SyntaxKind.SetKeyword | SyntaxKind.StaticKeyword | SyntaxKind.StringKeyword | SyntaxKind.SuperKeyword | SyntaxKind.SwitchKeyword | SyntaxKind.SymbolKeyword | SyntaxKind.ThisKeyword | SyntaxKind.ThrowKeyword | SyntaxKind.TrueKeyword | SyntaxKind.TryKeyword | SyntaxKind.TypeKeyword | SyntaxKind.TypeOfKeyword | SyntaxKind.UndefinedKeyword | SyntaxKind.UniqueKeyword | SyntaxKind.UnknownKeyword | SyntaxKind.VarKeyword | SyntaxKind.VoidKeyword | SyntaxKind.WhileKeyword | SyntaxKind.WithKeyword | SyntaxKind.YieldKeyword; export type ModifierSyntaxKind = SyntaxKind.AbstractKeyword | SyntaxKind.AsyncKeyword | SyntaxKind.ConstKeyword | SyntaxKind.DeclareKeyword | SyntaxKind.DefaultKeyword | SyntaxKind.ExportKeyword | SyntaxKind.PrivateKeyword | SyntaxKind.ProtectedKeyword | SyntaxKind.PublicKeyword | SyntaxKind.ReadonlyKeyword | SyntaxKind.StaticKeyword; export type KeywordTypeSyntaxKind = SyntaxKind.AnyKeyword | SyntaxKind.BigIntKeyword | SyntaxKind.BooleanKeyword | SyntaxKind.IntrinsicKeyword | SyntaxKind.NeverKeyword | SyntaxKind.NumberKeyword | SyntaxKind.ObjectKeyword | SyntaxKind.StringKeyword | SyntaxKind.SymbolKeyword | SyntaxKind.UndefinedKeyword | SyntaxKind.UnknownKeyword | SyntaxKind.VoidKeyword; export type TokenSyntaxKind = SyntaxKind.Unknown | SyntaxKind.EndOfFileToken | TriviaSyntaxKind | LiteralSyntaxKind | PseudoLiteralSyntaxKind | PunctuationSyntaxKind | SyntaxKind.Identifier | KeywordSyntaxKind; export type JsxTokenSyntaxKind = SyntaxKind.LessThanSlashToken | SyntaxKind.EndOfFileToken | SyntaxKind.ConflictMarkerTrivia | SyntaxKind.JsxText | SyntaxKind.JsxTextAllWhiteSpaces | SyntaxKind.OpenBraceToken | SyntaxKind.LessThanToken; export type JSDocSyntaxKind = SyntaxKind.EndOfFileToken | SyntaxKind.WhitespaceTrivia | SyntaxKind.AtToken | SyntaxKind.NewLineTrivia | SyntaxKind.AsteriskToken | SyntaxKind.OpenBraceToken | SyntaxKind.CloseBraceToken | SyntaxKind.LessThanToken | SyntaxKind.GreaterThanToken | SyntaxKind.OpenBracketToken | SyntaxKind.CloseBracketToken | SyntaxKind.EqualsToken | SyntaxKind.CommaToken | SyntaxKind.DotToken | SyntaxKind.Identifier | SyntaxKind.BacktickToken | SyntaxKind.Unknown | KeywordSyntaxKind; export enum NodeFlags { None = 0, Let = 1, Const = 2, NestedNamespace = 4, Synthesized = 8, Namespace = 16, OptionalChain = 32, ExportContext = 64, ContainsThis = 128, HasImplicitReturn = 256, HasExplicitReturn = 512, GlobalAugmentation = 1024, HasAsyncFunctions = 2048, DisallowInContext = 4096, YieldContext = 8192, DecoratorContext = 16384, AwaitContext = 32768, ThisNodeHasError = 65536, JavaScriptFile = 131072, ThisNodeOrAnySubNodesHasError = 262144, HasAggregatedChildData = 524288, JSDoc = 4194304, JsonFile = 33554432, BlockScoped = 3, ReachabilityCheckFlags = 768, ReachabilityAndEmitFlags = 2816, ContextFlags = 25358336, TypeExcludesFlags = 40960, } export enum ModifierFlags { None = 0, Export = 1, Ambient = 2, Public = 4, Private = 8, Protected = 16, Static = 32, Readonly = 64, Abstract = 128, Async = 256, Default = 512, Const = 2048, HasComputedJSDocModifiers = 4096, Deprecated = 8192, HasComputedFlags = 536870912, AccessibilityModifier = 28, ParameterPropertyModifier = 92, NonPublicAccessibilityModifier = 24, TypeScriptModifier = 2270, ExportDefault = 513, All = 11263 } export enum JsxFlags { None = 0, /** An element from a named property of the JSX.IntrinsicElements interface */ IntrinsicNamedElement = 1, /** An element inferred from the string index signature of the JSX.IntrinsicElements interface */ IntrinsicIndexedElement = 2, IntrinsicElement = 3 } export interface Node extends ReadonlyTextRange { readonly kind: SyntaxKind; readonly flags: NodeFlags; readonly decorators?: NodeArray<Decorator>; readonly modifiers?: ModifiersArray; readonly parent: Node; } export interface JSDocContainer { } export type HasJSDoc = ParameterDeclaration | CallSignatureDeclaration | ConstructSignatureDeclaration | MethodSignature | PropertySignature | ArrowFunction | ParenthesizedExpression | SpreadAssignment | ShorthandPropertyAssignment | PropertyAssignment | FunctionExpression | LabeledStatement | ExpressionStatement | VariableStatement | FunctionDeclaration | ConstructorDeclaration | MethodDeclaration | PropertyDeclaration | AccessorDeclaration | ClassLikeDeclaration | InterfaceDeclaration | TypeAliasDeclaration | EnumMember | EnumDeclaration | ModuleDeclaration | ImportEqualsDeclaration | ImportDeclaration | NamespaceExportDeclaration | ExportAssignment | IndexSignatureDeclaration | FunctionTypeNode | ConstructorTypeNode | JSDocFunctionType | ExportDeclaration | NamedTupleMember | EndOfFileToken; export type HasType = SignatureDeclaration | VariableDeclaration | ParameterDeclaration | PropertySignature | PropertyDeclaration | TypePredicateNode | ParenthesizedTypeNode | TypeOperatorNode | MappedTypeNode | AssertionExpression | TypeAliasDeclaration | JSDocTypeExpression | JSDocNonNullableType | JSDocNullableType | JSDocOptionalType | JSDocVariadicType; export type HasTypeArguments = CallExpression | NewExpression | TaggedTemplateExpression | JsxOpeningElement | JsxSelfClosingElement; export type HasInitializer = HasExpressionInitializer | ForStatement | ForInStatement | ForOfStatement | JsxAttribute; export type HasExpressionInitializer = VariableDeclaration | ParameterDeclaration | BindingElement | PropertySignature | PropertyDeclaration | PropertyAssignment | EnumMember; export interface NodeArray<T extends Node> extends ReadonlyArray<T>, ReadonlyTextRange { hasTrailingComma?: boolean; } export interface Token<TKind extends SyntaxKind> extends Node { readonly kind: TKind; } export type EndOfFileToken = Token<SyntaxKind.EndOfFileToken> & JSDocContainer; export interface PunctuationToken<TKind extends PunctuationSyntaxKind> extends Token<TKind> { } export type DotToken = PunctuationToken<SyntaxKind.DotToken>; export type DotDotDotToken = PunctuationToken<SyntaxKind.DotDotDotToken>; export type QuestionToken = PunctuationToken<SyntaxKind.QuestionToken>; export type ExclamationToken = PunctuationToken<SyntaxKind.ExclamationToken>; export type ColonToken = PunctuationToken<SyntaxKind.ColonToken>; export type EqualsToken = PunctuationToken<SyntaxKind.EqualsToken>; export type AsteriskToken = PunctuationToken<SyntaxKind.AsteriskToken>; export type EqualsGreaterThanToken = PunctuationToken<SyntaxKind.EqualsGreaterThanToken>; export type PlusToken = PunctuationToken<SyntaxKind.PlusToken>; export type MinusToken = PunctuationToken<SyntaxKind.MinusToken>; export type QuestionDotToken = PunctuationToken<SyntaxKind.QuestionDotToken>; export interface KeywordToken<TKind extends KeywordSyntaxKind> extends Token<TKind> { } export type AssertsKeyword = KeywordToken<SyntaxKind.AssertsKeyword>; export type AwaitKeyword = KeywordToken<SyntaxKind.AwaitKeyword>; /** @deprecated Use `AwaitKeyword` instead. */ export type AwaitKeywordToken = AwaitKeyword; /** @deprecated Use `AssertsKeyword` instead. */ export type AssertsToken = AssertsKeyword; export interface ModifierToken<TKind extends ModifierSyntaxKind> extends KeywordToken<TKind> { } export type AbstractKeyword = ModifierToken<SyntaxKind.AbstractKeyword>; export type AsyncKeyword = ModifierToken<SyntaxKind.AsyncKeyword>; export type ConstKeyword = ModifierToken<SyntaxKind.ConstKeyword>; export type DeclareKeyword = ModifierToken<SyntaxKind.DeclareKeyword>; export type DefaultKeyword = ModifierToken<SyntaxKind.DefaultKeyword>; export type ExportKeyword = ModifierToken<SyntaxKind.ExportKeyword>; export type PrivateKeyword = ModifierToken<SyntaxKind.PrivateKeyword>; export type ProtectedKeyword = ModifierToken<SyntaxKind.ProtectedKeyword>; export type PublicKeyword = ModifierToken<SyntaxKind.PublicKeyword>; export type ReadonlyKeyword = ModifierToken<SyntaxKind.ReadonlyKeyword>; export type StaticKeyword = ModifierToken<SyntaxKind.StaticKeyword>; /** @deprecated Use `ReadonlyKeyword` instead. */ export type ReadonlyToken = ReadonlyKeyword; export type Modifier = AbstractKeyword | AsyncKeyword | ConstKeyword | DeclareKeyword | DefaultKeyword | ExportKeyword | PrivateKeyword | ProtectedKeyword | PublicKeyword | ReadonlyKeyword | StaticKeyword; export type AccessibilityModifier = PublicKeyword | PrivateKeyword | ProtectedKeyword; export type ParameterPropertyModifier = AccessibilityModifier | ReadonlyKeyword; export type ClassMemberModifier = AccessibilityModifier | ReadonlyKeyword | StaticKeyword; export type ModifiersArray = NodeArray<Modifier>; export enum GeneratedIdentifierFlags { None = 0, ReservedInNestedScopes = 8, Optimistic = 16, FileLevel = 32, AllowNameSubstitution = 64 } export interface Identifier extends PrimaryExpression, Declaration { readonly kind: SyntaxKind.Identifier; /** * Prefer to use `id.unescapedText`. (Note: This is available only in services, not internally to the TypeScript compiler.) * Text of identifier, but if the identifier begins with two underscores, this will begin with three. */ readonly escapedText: __String; readonly originalKeywordKind?: SyntaxKind; isInJSDocNamespace?: boolean; } export interface TransientIdentifier extends Identifier { resolvedSymbol: Symbol; } export interface QualifiedName extends Node { readonly kind: SyntaxKind.QualifiedName; readonly left: EntityName; readonly right: Identifier; } export type EntityName = Identifier | QualifiedName; export type PropertyName = Identifier | StringLiteral | NumericLiteral | ComputedPropertyName | PrivateIdentifier; export type DeclarationName = Identifier | PrivateIdentifier | StringLiteralLike | NumericLiteral | ComputedPropertyName | ElementAccessExpression | BindingPattern | EntityNameExpression; export interface Declaration extends Node { _declarationBrand: any; } export interface NamedDeclaration extends Declaration { readonly name?: DeclarationName; } export interface DeclarationStatement extends NamedDeclaration, Statement { readonly name?: Identifier | StringLiteral | NumericLiteral; } export interface ComputedPropertyName extends Node { readonly kind: SyntaxKind.ComputedPropertyName; readonly parent: Declaration; readonly expression: Expression; } export interface PrivateIdentifier extends Node { readonly kind: SyntaxKind.PrivateIdentifier; readonly escapedText: __String; } export interface Decorator extends Node { readonly kind: SyntaxKind.Decorator; readonly parent: NamedDeclaration; readonly expression: LeftHandSideExpression; } export interface TypeParameterDeclaration extends NamedDeclaration { readonly kind: SyntaxKind.TypeParameter; readonly parent: DeclarationWithTypeParameterChildren | InferTypeNode; readonly name: Identifier; /** Note: Consider calling `getEffectiveConstraintOfTypeParameter` */ readonly constraint?: TypeNode; readonly default?: TypeNode; expression?: Expression; } export interface SignatureDeclarationBase extends NamedDeclaration, JSDocContainer { readonly kind: SignatureDeclaration["kind"]; readonly name?: PropertyName; readonly typeParameters?: NodeArray<TypeParameterDeclaration>; readonly parameters: NodeArray<ParameterDeclaration>; readonly type?: TypeNode; } export type SignatureDeclaration = CallSignatureDeclaration | ConstructSignatureDeclaration | MethodSignature | IndexSignatureDeclaration | FunctionTypeNode | ConstructorTypeNode | JSDocFunctionType | FunctionDeclaration | MethodDeclaration | ConstructorDeclaration | AccessorDeclaration | FunctionExpression | ArrowFunction; export interface CallSignatureDeclaration extends SignatureDeclarationBase, TypeElement { readonly kind: SyntaxKind.CallSignature; } export interface ConstructSignatureDeclaration extends SignatureDeclarationBase, TypeElement { readonly kind: SyntaxKind.ConstructSignature; } export type BindingName = Identifier | BindingPattern; export interface VariableDeclaration extends NamedDeclaration { readonly kind: SyntaxKind.VariableDeclaration; readonly parent: VariableDeclarationList | CatchClause; readonly name: BindingName; readonly exclamationToken?: ExclamationToken; readonly type?: TypeNode; readonly initializer?: Expression; } export interface VariableDeclarationList extends Node { readonly kind: SyntaxKind.VariableDeclarationList; readonly parent: VariableStatement | ForStatement | ForOfStatement | ForInStatement; readonly declarations: NodeArray<VariableDeclaration>; } export interface ParameterDeclaration extends NamedDeclaration, JSDocContainer { readonly kind: SyntaxKind.Parameter; readonly parent: SignatureDeclaration; readonly dotDotDotToken?: DotDotDotToken; readonly name: BindingName; readonly questionToken?: QuestionToken; readonly type?: TypeNode; readonly initializer?: Expression; } export interface BindingElement extends NamedDeclaration { readonly kind: SyntaxKind.BindingElement; readonly parent: BindingPattern; readonly propertyName?: PropertyName; readonly dotDotDotToken?: DotDotDotToken; readonly name: BindingName; readonly initializer?: Expression; } export interface PropertySignature extends TypeElement, JSDocContainer { readonly kind: SyntaxKind.PropertySignature; readonly name: PropertyName; readonly questionToken?: QuestionToken; readonly type?: TypeNode; initializer?: Expression; } export interface PropertyDeclaration extends ClassElement, JSDocContainer { readonly kind: SyntaxKind.PropertyDeclaration; readonly parent: ClassLikeDeclaration; readonly name: PropertyName; readonly questionToken?: QuestionToken; readonly exclamationToken?: ExclamationToken; readonly type?: TypeNode; readonly initializer?: Expression; } export interface ObjectLiteralElement extends NamedDeclaration { _objectLiteralBrand: any; readonly name?: PropertyName; } /** Unlike ObjectLiteralElement, excludes JSXAttribute and JSXSpreadAttribute. */ export type ObjectLiteralElementLike = PropertyAssignment | ShorthandPropertyAssignment | SpreadAssignment | MethodDeclaration | AccessorDeclaration; export interface PropertyAssignment extends ObjectLiteralElement, JSDocContainer { readonly kind: SyntaxKind.PropertyAssignment; readonly parent: ObjectLiteralExpression; readonly name: PropertyName; readonly questionToken?: QuestionToken; readonly exclamationToken?: ExclamationToken; readonly initializer: Expression; } export interface ShorthandPropertyAssignment extends ObjectLiteralElement, JSDocContainer { readonly kind: SyntaxKind.ShorthandPropertyAssignment; readonly parent: ObjectLiteralExpression; readonly name: Identifier; readonly questionToken?: QuestionToken; readonly exclamationToken?: ExclamationToken; readonly equalsToken?: EqualsToken; readonly objectAssignmentInitializer?: Expression; } export interface SpreadAssignment extends ObjectLiteralElement, JSDocContainer { readonly kind: SyntaxKind.SpreadAssignment; readonly parent: ObjectLiteralExpression; readonly expression: Expression; } export type VariableLikeDeclaration = VariableDeclaration | ParameterDeclaration | BindingElement | PropertyDeclaration | PropertyAssignment | PropertySignature | JsxAttribute | ShorthandPropertyAssignment | EnumMember | JSDocPropertyTag | JSDocParameterTag; export interface PropertyLikeDeclaration extends NamedDeclaration { readonly name: PropertyName; } export interface ObjectBindingPattern extends Node { readonly kind: SyntaxKind.ObjectBindingPattern; readonly parent: VariableDeclaration | ParameterDeclaration | BindingElement; readonly elements: NodeArray<BindingElement>; } export interface ArrayBindingPattern extends Node { readonly kind: SyntaxKind.ArrayBindingPattern; readonly parent: VariableDeclaration | ParameterDeclaration | BindingElement; readonly elements: NodeArray<ArrayBindingElement>; } export type BindingPattern = ObjectBindingPattern | ArrayBindingPattern; export type ArrayBindingElement = BindingElement | OmittedExpression; /** * Several node kinds share function-like features such as a signature, * a name, and a body. These nodes should extend FunctionLikeDeclarationBase. * Examples: * - FunctionDeclaration * - MethodDeclaration * - AccessorDeclaration */ export interface FunctionLikeDeclarationBase extends SignatureDeclarationBase { _functionLikeDeclarationBrand: any; readonly asteriskToken?: AsteriskToken; readonly questionToken?: QuestionToken; readonly exclamationToken?: ExclamationToken; readonly body?: Block | Expression; } export type FunctionLikeDeclaration = FunctionDeclaration | MethodDeclaration | GetAccessorDeclaration | SetAccessorDeclaration | ConstructorDeclaration | FunctionExpression | ArrowFunction; /** @deprecated Use SignatureDeclaration */ export type FunctionLike = SignatureDeclaration; export interface FunctionDeclaration extends FunctionLikeDeclarationBase, DeclarationStatement { readonly kind: SyntaxKind.FunctionDeclaration; readonly name?: Identifier; readonly body?: FunctionBody; } export interface MethodSignature extends SignatureDeclarationBase, TypeElement { readonly kind: SyntaxKind.MethodSignature; readonly parent: ObjectTypeDeclaration; readonly name: PropertyName; } export interface MethodDeclaration extends FunctionLikeDeclarationBase, ClassElement, ObjectLiteralElement, JSDocContainer { readonly kind: SyntaxKind.MethodDeclaration; readonly parent: ClassLikeDeclaration | ObjectLiteralExpression; readonly name: PropertyName; readonly body?: FunctionBody; } export interface ConstructorDeclaration extends FunctionLikeDeclarationBase, ClassElement, JSDocContainer { readonly kind: SyntaxKind.Constructor; readonly parent: ClassLikeDeclaration; readonly body?: FunctionBody; } /** For when we encounter a semicolon in a class declaration. ES6 allows these as class elements. */ export interface SemicolonClassElement extends ClassElement { readonly kind: SyntaxKind.SemicolonClassElement; readonly parent: ClassLikeDeclaration; } export interface GetAccessorDeclaration extends FunctionLikeDeclarationBase, ClassElement, ObjectLiteralElement, JSDocContainer { readonly kind: SyntaxKind.GetAccessor; readonly parent: ClassLikeDeclaration | ObjectLiteralExpression; readonly name: PropertyName; readonly body?: FunctionBody; } export interface SetAccessorDeclaration extends FunctionLikeDeclarationBase, ClassElement, ObjectLiteralElement, JSDocContainer { readonly kind: SyntaxKind.SetAccessor; readonly parent: ClassLikeDeclaration | ObjectLiteralExpression; readonly name: PropertyName; readonly body?: FunctionBody; } export type AccessorDeclaration = GetAccessorDeclaration | SetAccessorDeclaration; export interface IndexSignatureDeclaration extends SignatureDeclarationBase, ClassElement, TypeElement { readonly kind: SyntaxKind.IndexSignature; readonly parent: ObjectTypeDeclaration; readonly type: TypeNode; } export interface TypeNode extends Node { _typeNodeBrand: any; } export interface KeywordTypeNode<TKind extends KeywordTypeSyntaxKind = KeywordTypeSyntaxKind> extends KeywordToken<TKind>, TypeNode { readonly kind: TKind; } export interface ImportTypeNode extends NodeWithTypeArguments { readonly kind: SyntaxKind.ImportType; readonly isTypeOf: boolean; readonly argument: TypeNode; readonly qualifier?: EntityName; } export interface ThisTypeNode extends TypeNode { readonly kind: SyntaxKind.ThisType; } export type FunctionOrConstructorTypeNode = FunctionTypeNode | ConstructorTypeNode; export interface FunctionOrConstructorTypeNodeBase extends TypeNode, SignatureDeclarationBase { readonly kind: SyntaxKind.FunctionType | SyntaxKind.ConstructorType; readonly type: TypeNode; } export interface FunctionTypeNode extends FunctionOrConstructorTypeNodeBase { readonly kind: SyntaxKind.FunctionType; } export interface ConstructorTypeNode extends FunctionOrConstructorTypeNodeBase { readonly kind: SyntaxKind.ConstructorType; } export interface NodeWithTypeArguments extends TypeNode { readonly typeArguments?: NodeArray<TypeNode>; } export type TypeReferenceType = TypeReferenceNode | ExpressionWithTypeArguments; export interface TypeReferenceNode extends NodeWithTypeArguments { readonly kind: SyntaxKind.TypeReference; readonly typeName: EntityName; } export interface TypePredicateNode extends TypeNode { readonly kind: SyntaxKind.TypePredicate; readonly parent: SignatureDeclaration | JSDocTypeExpression; readonly assertsModifier?: AssertsToken; readonly parameterName: Identifier | ThisTypeNode; readonly type?: TypeNode; } export interface TypeQueryNode extends TypeNode { readonly kind: SyntaxKind.TypeQuery; readonly exprName: EntityName; } export interface TypeLiteralNode extends TypeNode, Declaration { readonly kind: SyntaxKind.TypeLiteral; readonly members: NodeArray<TypeElement>; } export interface ArrayTypeNode extends TypeNode { readonly kind: SyntaxKind.ArrayType; readonly elementType: TypeNode; } export interface TupleTypeNode extends TypeNode { readonly kind: SyntaxKind.TupleType; readonly elements: NodeArray<TypeNode | NamedTupleMember>; } export interface NamedTupleMember extends TypeNode, JSDocContainer, Declaration { readonly kind: SyntaxKind.NamedTupleMember; readonly dotDotDotToken?: Token<SyntaxKind.DotDotDotToken>; readonly name: Identifier; readonly questionToken?: Token<SyntaxKind.QuestionToken>; readonly type: TypeNode; } export interface OptionalTypeNode extends TypeNode { readonly kind: SyntaxKind.OptionalType; readonly type: TypeNode; } export interface RestTypeNode extends TypeNode { readonly kind: SyntaxKind.RestType; readonly type: TypeNode; } export type UnionOrIntersectionTypeNode = UnionTypeNode | IntersectionTypeNode; export interface UnionTypeNode extends TypeNode { readonly kind: SyntaxKind.UnionType; readonly types: NodeArray<TypeNode>; } export interface IntersectionTypeNode extends TypeNode { readonly kind: SyntaxKind.IntersectionType; readonly types: NodeArray<TypeNode>; } export interface ConditionalTypeNode extends TypeNode { readonly kind: SyntaxKind.ConditionalType; readonly checkType: TypeNode; readonly extendsType: TypeNode; readonly trueType: TypeNode; readonly falseType: TypeNode; } export interface InferTypeNode extends TypeNode { readonly kind: SyntaxKind.InferType; readonly typeParameter: TypeParameterDeclaration; } export interface ParenthesizedTypeNode extends TypeNode { readonly kind: SyntaxKind.ParenthesizedType; readonly type: TypeNode; } export interface TypeOperatorNode extends TypeNode { readonly kind: SyntaxKind.TypeOperator; readonly operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword | SyntaxKind.ReadonlyKeyword; readonly type: TypeNode; } export interface IndexedAccessTypeNode extends TypeNode { readonly kind: SyntaxKind.IndexedAccessType; readonly objectType: TypeNode; readonly indexType: TypeNode; } export interface MappedTypeNode extends TypeNode, Declaration { readonly kind: SyntaxKind.MappedType; readonly readonlyToken?: ReadonlyToken | PlusToken | MinusToken; readonly typeParameter: TypeParameterDeclaration; readonly nameType?: TypeNode; readonly questionToken?: QuestionToken | PlusToken | MinusToken; readonly type?: TypeNode; } export interface LiteralTypeNode extends TypeNode { readonly kind: SyntaxKind.LiteralType; readonly literal: NullLiteral | BooleanLiteral | LiteralExpression | PrefixUnaryExpression; } export interface StringLiteral extends LiteralExpression, Declaration { readonly kind: SyntaxKind.StringLiteral; } export type StringLiteralLike = StringLiteral | NoSubstitutionTemplateLiteral; export type PropertyNameLiteral = Identifier | StringLiteralLike | NumericLiteral; export interface TemplateLiteralTypeNode extends TypeNode { kind: SyntaxKind.TemplateLiteralType; readonly head: TemplateHead; readonly templateSpans: NodeArray<TemplateLiteralTypeSpan>; } export interface TemplateLiteralTypeSpan extends TypeNode { readonly kind: SyntaxKind.TemplateLiteralTypeSpan; readonly parent: TemplateLiteralTypeNode; readonly type: TypeNode; readonly literal: TemplateMiddle | TemplateTail; } export interface Expression extends Node { _expressionBrand: any; } export interface OmittedExpression extends Expression { readonly kind: SyntaxKind.OmittedExpression; } export interface PartiallyEmittedExpression extends LeftHandSideExpression { readonly kind: SyntaxKind.PartiallyEmittedExpression; readonly expression: Expression; } export interface UnaryExpression extends Expression { _unaryExpressionBrand: any; } /** Deprecated, please use UpdateExpression */ export type IncrementExpression = UpdateExpression; export interface UpdateExpression extends UnaryExpression { _updateExpressionBrand: any; } export type PrefixUnaryOperator = SyntaxKind.PlusPlusToken | SyntaxKind.MinusMinusToken | SyntaxKind.PlusToken | SyntaxKind.MinusToken | SyntaxKind.TildeToken | SyntaxKind.ExclamationToken; export interface PrefixUnaryExpression extends UpdateExpression { readonly kind: SyntaxKind.PrefixUnaryExpression; readonly operator: PrefixUnaryOperator; readonly operand: UnaryExpression; } export type PostfixUnaryOperator = SyntaxKind.PlusPlusToken | SyntaxKind.MinusMinusToken; export interface PostfixUnaryExpression extends UpdateExpression { readonly kind: SyntaxKind.PostfixUnaryExpression; readonly operand: LeftHandSideExpression; readonly operator: PostfixUnaryOperator; } export interface LeftHandSideExpression extends UpdateExpression { _leftHandSideExpressionBrand: any; } export interface MemberExpression extends LeftHandSideExpression { _memberExpressionBrand: any; } export interface PrimaryExpression extends MemberExpression { _primaryExpressionBrand: any; } export interface NullLiteral extends PrimaryExpression { readonly kind: SyntaxKind.NullKeyword; } export interface TrueLiteral extends PrimaryExpression { readonly kind: SyntaxKind.TrueKeyword; } export interface FalseLiteral extends PrimaryExpression { readonly kind: SyntaxKind.FalseKeyword; } export type BooleanLiteral = TrueLiteral | FalseLiteral; export interface ThisExpression extends PrimaryExpression { readonly kind: SyntaxKind.ThisKeyword; } export interface SuperExpression extends PrimaryExpression { readonly kind: SyntaxKind.SuperKeyword; } export interface ImportExpression extends PrimaryExpression { readonly kind: SyntaxKind.ImportKeyword; } export interface DeleteExpression extends UnaryExpression { readonly kind: SyntaxKind.DeleteExpression; readonly expression: UnaryExpression; } export interface TypeOfExpression extends UnaryExpression { readonly kind: SyntaxKind.TypeOfExpression; readonly expression: UnaryExpression; } export interface VoidExpression extends UnaryExpression { readonly kind: SyntaxKind.VoidExpression; readonly expression: UnaryExpression; } export interface AwaitExpression extends UnaryExpression { readonly kind: SyntaxKind.AwaitExpression; readonly expression: UnaryExpression; } export interface YieldExpression extends Expression { readonly kind: SyntaxKind.YieldExpression; readonly asteriskToken?: AsteriskToken; readonly expression?: Expression; } export interface SyntheticExpression extends Expression { readonly kind: SyntaxKind.SyntheticExpression; readonly isSpread: boolean; readonly type: Type; readonly tupleNameSource?: ParameterDeclaration | NamedTupleMember; } export type ExponentiationOperator = SyntaxKind.AsteriskAsteriskToken; export type MultiplicativeOperator = SyntaxKind.AsteriskToken | SyntaxKind.SlashToken | SyntaxKind.PercentToken; export type MultiplicativeOperatorOrHigher = ExponentiationOperator | MultiplicativeOperator; export type AdditiveOperator = SyntaxKind.PlusToken | SyntaxKind.MinusToken; export type AdditiveOperatorOrHigher = MultiplicativeOperatorOrHigher | AdditiveOperator; export type ShiftOperator = SyntaxKind.LessThanLessThanToken | SyntaxKind.GreaterThanGreaterThanToken | SyntaxKind.GreaterThanGreaterThanGreaterThanToken; export type ShiftOperatorOrHigher = AdditiveOperatorOrHigher | ShiftOperator; export type RelationalOperator = SyntaxKind.LessThanToken | SyntaxKind.LessThanEqualsToken | SyntaxKind.GreaterThanToken | SyntaxKind.GreaterThanEqualsToken | SyntaxKind.InstanceOfKeyword | SyntaxKind.InKeyword; export type RelationalOperatorOrHigher = ShiftOperatorOrHigher | RelationalOperator; export type EqualityOperator = SyntaxKind.EqualsEqualsToken | SyntaxKind.EqualsEqualsEqualsToken | SyntaxKind.ExclamationEqualsEqualsToken | SyntaxKind.ExclamationEqualsToken; export type EqualityOperatorOrHigher = RelationalOperatorOrHigher | EqualityOperator; export type BitwiseOperator = SyntaxKind.AmpersandToken | SyntaxKind.BarToken | SyntaxKind.CaretToken; export type BitwiseOperatorOrHigher = EqualityOperatorOrHigher | BitwiseOperator; export type LogicalOperator = SyntaxKind.AmpersandAmpersandToken | SyntaxKind.BarBarToken; export type LogicalOperatorOrHigher = BitwiseOperatorOrHigher | LogicalOperator; export type CompoundAssignmentOperator = SyntaxKind.PlusEqualsToken | SyntaxKind.MinusEqualsToken | SyntaxKind.AsteriskAsteriskEqualsToken | SyntaxKind.AsteriskEqualsToken | SyntaxKind.SlashEqualsToken | SyntaxKind.PercentEqualsToken | SyntaxKind.AmpersandEqualsToken | SyntaxKind.BarEqualsToken | SyntaxKind.CaretEqualsToken | SyntaxKind.LessThanLessThanEqualsToken | SyntaxKind.GreaterThanGreaterThanGreaterThanEqualsToken | SyntaxKind.GreaterThanGreaterThanEqualsToken | SyntaxKind.BarBarEqualsToken | SyntaxKind.AmpersandAmpersandEqualsToken | SyntaxKind.QuestionQuestionEqualsToken; export type AssignmentOperator = SyntaxKind.EqualsToken | CompoundAssignmentOperator; export type AssignmentOperatorOrHigher = SyntaxKind.QuestionQuestionToken | LogicalOperatorOrHigher | AssignmentOperator; export type BinaryOperator = AssignmentOperatorOrHigher | SyntaxKind.CommaToken; export type LogicalOrCoalescingAssignmentOperator = SyntaxKind.AmpersandAmpersandEqualsToken | SyntaxKind.BarBarEqualsToken | SyntaxKind.QuestionQuestionEqualsToken; export type BinaryOperatorToken = Token<BinaryOperator>; export interface BinaryExpression extends Expression, Declaration { readonly kind: SyntaxKind.BinaryExpression; readonly left: Expression; readonly operatorToken: BinaryOperatorToken; readonly right: Expression; } export type AssignmentOperatorToken = Token<AssignmentOperator>; export interface AssignmentExpression<TOperator extends AssignmentOperatorToken> extends BinaryExpression { readonly left: LeftHandSideExpression; readonly operatorToken: TOperator; } export interface ObjectDestructuringAssignment extends AssignmentExpression<EqualsToken> { readonly left: ObjectLiteralExpression; } export interface ArrayDestructuringAssignment extends AssignmentExpression<EqualsToken> { readonly left: ArrayLiteralExpression; } export type DestructuringAssignment = ObjectDestructuringAssignment | ArrayDestructuringAssignment; export type BindingOrAssignmentElement = VariableDeclaration | ParameterDeclaration | ObjectBindingOrAssignmentElement | ArrayBindingOrAssignmentElement; export type ObjectBindingOrAssignmentElement = BindingElement | PropertyAssignment | ShorthandPropertyAssignment | SpreadAssignment; export type ArrayBindingOrAssignmentElement = BindingElement | OmittedExpression | SpreadElement | ArrayLiteralExpression | ObjectLiteralExpression | AssignmentExpression<EqualsToken> | Identifier | PropertyAccessExpression | ElementAccessExpression; export type BindingOrAssignmentElementRestIndicator = DotDotDotToken | SpreadElement | SpreadAssignment; export type BindingOrAssignmentElementTarget = BindingOrAssignmentPattern | Identifier | PropertyAccessExpression | ElementAccessExpression | OmittedExpression; export type ObjectBindingOrAssignmentPattern = ObjectBindingPattern | ObjectLiteralExpression; export type ArrayBindingOrAssignmentPattern = ArrayBindingPattern | ArrayLiteralExpression; export type AssignmentPattern = ObjectLiteralExpression | ArrayLiteralExpression; export type BindingOrAssignmentPattern = ObjectBindingOrAssignmentPattern | ArrayBindingOrAssignmentPattern; export interface ConditionalExpression extends Expression { readonly kind: SyntaxKind.ConditionalExpression; readonly condition: Expression; readonly questionToken: QuestionToken; readonly whenTrue: Expression; readonly colonToken: ColonToken; readonly whenFalse: Expression; } export type FunctionBody = Block; export type ConciseBody = FunctionBody | Expression; export interface FunctionExpression extends PrimaryExpression, FunctionLikeDeclarationBase, JSDocContainer { readonly kind: SyntaxKind.FunctionExpression; readonly name?: Identifier; readonly body: FunctionBody; } export interface ArrowFunction extends Expression, FunctionLikeDeclarationBase, JSDocContainer { readonly kind: SyntaxKind.ArrowFunction; readonly equalsGreaterThanToken: EqualsGreaterThanToken; readonly body: ConciseBody; readonly name: never; } export interface LiteralLikeNode extends Node { text: string; isUnterminated?: boolean; hasExtendedUnicodeEscape?: boolean; } export interface TemplateLiteralLikeNode extends LiteralLikeNode { rawText?: string; } export interface LiteralExpression extends LiteralLikeNode, PrimaryExpression { _literalExpressionBrand: any; } export interface RegularExpressionLiteral extends LiteralExpression { readonly kind: SyntaxKind.RegularExpressionLiteral; } export interface NoSubstitutionTemplateLiteral extends LiteralExpression, TemplateLiteralLikeNode, Declaration { readonly kind: SyntaxKind.NoSubstitutionTemplateLiteral; } export enum TokenFlags { None = 0, Scientific = 16, Octal = 32, HexSpecifier = 64, BinarySpecifier = 128, OctalSpecifier = 256, } export interface NumericLiteral extends LiteralExpression, Declaration { readonly kind: SyntaxKind.NumericLiteral; } export interface BigIntLiteral extends LiteralExpression { readonly kind: SyntaxKind.BigIntLiteral; } export type LiteralToken = NumericLiteral | BigIntLiteral | StringLiteral | JsxText | RegularExpressionLiteral | NoSubstitutionTemplateLiteral; export interface TemplateHead extends TemplateLiteralLikeNode { readonly kind: SyntaxKind.TemplateHead; readonly parent: TemplateExpression | TemplateLiteralTypeNode; } export interface TemplateMiddle extends TemplateLiteralLikeNode { readonly kind: SyntaxKind.TemplateMiddle; readonly parent: TemplateSpan | TemplateLiteralTypeSpan; } export interface TemplateTail extends TemplateLiteralLikeNode { readonly kind: SyntaxKind.TemplateTail; readonly parent: TemplateSpan | TemplateLiteralTypeSpan; } export type PseudoLiteralToken = TemplateHead | TemplateMiddle | TemplateTail; export type TemplateLiteralToken = NoSubstitutionTemplateLiteral | PseudoLiteralToken; export interface TemplateExpression extends PrimaryExpression { readonly kind: SyntaxKind.TemplateExpression; readonly head: TemplateHead; readonly templateSpans: NodeArray<TemplateSpan>; } export type TemplateLiteral = TemplateExpression | NoSubstitutionTemplateLiteral; export interface TemplateSpan extends Node { readonly kind: SyntaxKind.TemplateSpan; readonly parent: TemplateExpression; readonly expression: Expression; readonly literal: TemplateMiddle | TemplateTail; } export interface ParenthesizedExpression extends PrimaryExpression, JSDocContainer { readonly kind: SyntaxKind.ParenthesizedExpression; readonly expression: Expression; } export interface ArrayLiteralExpression extends PrimaryExpression { readonly kind: SyntaxKind.ArrayLiteralExpression; readonly elements: NodeArray<Expression>; } export interface SpreadElement extends Expression { readonly kind: SyntaxKind.SpreadElement; readonly parent: ArrayLiteralExpression | CallExpression | NewExpression; readonly expression: Expression; } /** * This interface is a base interface for ObjectLiteralExpression and JSXAttributes to extend from. JSXAttributes is similar to * ObjectLiteralExpression in that it contains array of properties; however, JSXAttributes' properties can only be * JSXAttribute or JSXSpreadAttribute. ObjectLiteralExpression, on the other hand, can only have properties of type * ObjectLiteralElement (e.g. PropertyAssignment, ShorthandPropertyAssignment etc.) */ export interface ObjectLiteralExpressionBase<T extends ObjectLiteralElement> extends PrimaryExpression, Declaration { readonly properties: NodeArray<T>; } export interface ObjectLiteralExpression extends ObjectLiteralExpressionBase<ObjectLiteralElementLike> { readonly kind: SyntaxKind.ObjectLiteralExpression; } export type EntityNameExpression = Identifier | PropertyAccessEntityNameExpression; export type EntityNameOrEntityNameExpression = EntityName | EntityNameExpression; export type AccessExpression = PropertyAccessExpression | ElementAccessExpression; export interface PropertyAccessExpression extends MemberExpression, NamedDeclaration { readonly kind: SyntaxKind.PropertyAccessExpression; readonly expression: LeftHandSideExpression; readonly questionDotToken?: QuestionDotToken; readonly name: Identifier | PrivateIdentifier; } export interface PropertyAccessChain extends PropertyAccessExpression { _optionalChainBrand: any; readonly name: Identifier | PrivateIdentifier; } export interface SuperPropertyAccessExpression extends PropertyAccessExpression { readonly expression: SuperExpression; } /** Brand for a PropertyAccessExpression which, like a QualifiedName, consists of a sequence of identifiers separated by dots. */ export interface PropertyAccessEntityNameExpression extends PropertyAccessExpression { _propertyAccessExpressionLikeQualifiedNameBrand?: any; readonly expression: EntityNameExpression; readonly name: Identifier; } export interface ElementAccessExpression extends MemberExpression { readonly kind: SyntaxKind.ElementAccessExpression; readonly expression: LeftHandSideExpression; readonly questionDotToken?: QuestionDotToken; readonly argumentExpression: Expression; } export interface ElementAccessChain extends ElementAccessExpression { _optionalChainBrand: any; } export interface SuperElementAccessExpression extends ElementAccessExpression { readonly expression: SuperExpression; } export type SuperProperty = SuperPropertyAccessExpression | SuperElementAccessExpression; export interface CallExpression extends LeftHandSideExpression, Declaration { readonly kind: SyntaxKind.CallExpression; readonly expression: LeftHandSideExpression; readonly questionDotToken?: QuestionDotToken; readonly typeArguments?: NodeArray<TypeNode>; readonly arguments: NodeArray<Expression>; } export interface CallChain extends CallExpression { _optionalChainBrand: any; } export type OptionalChain = PropertyAccessChain | ElementAccessChain | CallChain | NonNullChain; export interface SuperCall extends CallExpression { readonly expression: SuperExpression; } export interface ImportCall extends CallExpression { readonly expression: ImportExpression; } export interface ExpressionWithTypeArguments extends NodeWithTypeArguments { readonly kind: SyntaxKind.ExpressionWithTypeArguments; readonly parent: HeritageClause | JSDocAugmentsTag | JSDocImplementsTag; readonly expression: LeftHandSideExpression; } export interface NewExpression extends PrimaryExpression, Declaration { readonly kind: SyntaxKind.NewExpression; readonly expression: LeftHandSideExpression; readonly typeArguments?: NodeArray<TypeNode>; readonly arguments?: NodeArray<Expression>; } export interface TaggedTemplateExpression extends MemberExpression { readonly kind: SyntaxKind.TaggedTemplateExpression; readonly tag: LeftHandSideExpression; readonly typeArguments?: NodeArray<TypeNode>; readonly template: TemplateLiteral; } export type CallLikeExpression = CallExpression | NewExpression | TaggedTemplateExpression | Decorator | JsxOpeningLikeElement; export interface AsExpression extends Expression { readonly kind: SyntaxKind.AsExpression; readonly expression: Expression; readonly type: TypeNode; } export interface TypeAssertion extends UnaryExpression { readonly kind: SyntaxKind.TypeAssertionExpression; readonly type: TypeNode; readonly expression: UnaryExpression; } export type AssertionExpression = TypeAssertion | AsExpression; export interface NonNullExpression extends LeftHandSideExpression { readonly kind: SyntaxKind.NonNullExpression; readonly expression: Expression; } export interface NonNullChain extends NonNullExpression { _optionalChainBrand: any; } export interface MetaProperty extends PrimaryExpression { readonly kind: SyntaxKind.MetaProperty; readonly keywordToken: SyntaxKind.NewKeyword | SyntaxKind.ImportKeyword; readonly name: Identifier; } export interface JsxElement extends PrimaryExpression { readonly kind: SyntaxKind.JsxElement; readonly openingElement: JsxOpeningElement; readonly children: NodeArray<JsxChild>; readonly closingElement: JsxClosingElement; } export type JsxOpeningLikeElement = JsxSelfClosingElement | JsxOpeningElement; export type JsxAttributeLike = JsxAttribute | JsxSpreadAttribute; export type JsxTagNameExpression = Identifier | ThisExpression | JsxTagNamePropertyAccess; export interface JsxTagNamePropertyAccess extends PropertyAccessExpression { readonly expression: JsxTagNameExpression; } export interface JsxAttributes extends ObjectLiteralExpressionBase<JsxAttributeLike> { readonly kind: SyntaxKind.JsxAttributes; readonly parent: JsxOpeningLikeElement; } export interface JsxOpeningElement extends Expression { readonly kind: SyntaxKind.JsxOpeningElement; readonly parent: JsxElement; readonly tagName: JsxTagNameExpression; readonly typeArguments?: NodeArray<TypeNode>; readonly attributes: JsxAttributes; } export interface JsxSelfClosingElement extends PrimaryExpression { readonly kind: SyntaxKind.JsxSelfClosingElement; readonly tagName: JsxTagNameExpression; readonly typeArguments?: NodeArray<TypeNode>; readonly attributes: JsxAttributes; } export interface JsxFragment extends PrimaryExpression { readonly kind: SyntaxKind.JsxFragment; readonly openingFragment: JsxOpeningFragment; readonly children: NodeArray<JsxChild>; readonly closingFragment: JsxClosingFragment; } export interface JsxOpeningFragment extends Expression { readonly kind: SyntaxKind.JsxOpeningFragment; readonly parent: JsxFragment; } export interface JsxClosingFragment extends Expression { readonly kind: SyntaxKind.JsxClosingFragment; readonly parent: JsxFragment; } export interface JsxAttribute extends ObjectLiteralElement { readonly kind: SyntaxKind.JsxAttribute; readonly parent: JsxAttributes; readonly name: Identifier; readonly initializer?: StringLiteral | JsxExpression; } export interface JsxSpreadAttribute extends ObjectLiteralElement { readonly kind: SyntaxKind.JsxSpreadAttribute; readonly parent: JsxAttributes; readonly expression: Expression; } export interface JsxClosingElement extends Node { readonly kind: SyntaxKind.JsxClosingElement; readonly parent: JsxElement; readonly tagName: JsxTagNameExpression; } export interface JsxExpression extends Expression { readonly kind: SyntaxKind.JsxExpression; readonly parent: JsxElement | JsxAttributeLike; readonly dotDotDotToken?: Token<SyntaxKind.DotDotDotToken>; readonly expression?: Expression; } export interface JsxText extends LiteralLikeNode { readonly kind: SyntaxKind.JsxText; readonly parent: JsxElement; readonly containsOnlyTriviaWhiteSpaces: boolean; } export type JsxChild = JsxText | JsxExpression | JsxElement | JsxSelfClosingElement | JsxFragment; export interface Statement extends Node { _statementBrand: any; } export interface NotEmittedStatement extends Statement { readonly kind: SyntaxKind.NotEmittedStatement; } /** * A list of comma-separated expressions. This node is only created by transformations. */ export interface CommaListExpression extends Expression { readonly kind: SyntaxKind.CommaListExpression; readonly elements: NodeArray<Expression>; } export interface EmptyStatement extends Statement { readonly kind: SyntaxKind.EmptyStatement; } export interface DebuggerStatement extends Statement { readonly kind: SyntaxKind.DebuggerStatement; } export interface MissingDeclaration extends DeclarationStatement { readonly kind: SyntaxKind.MissingDeclaration; readonly name?: Identifier; } export type BlockLike = SourceFile | Block | ModuleBlock | CaseOrDefaultClause; export interface Block extends Statement { readonly kind: SyntaxKind.Block; readonly statements: NodeArray<Statement>; } export interface VariableStatement extends Statement, JSDocContainer { readonly kind: SyntaxKind.VariableStatement; readonly declarationList: VariableDeclarationList; } export interface ExpressionStatement extends Statement, JSDocContainer { readonly kind: SyntaxKind.ExpressionStatement; readonly expression: Expression; } export interface IfStatement extends Statement { readonly kind: SyntaxKind.IfStatement; readonly expression: Expression; readonly thenStatement: Statement; readonly elseStatement?: Statement; } export interface IterationStatement extends Statement { readonly statement: Statement; } export interface DoStatement extends IterationStatement { readonly kind: SyntaxKind.DoStatement; readonly expression: Expression; } export interface WhileStatement extends IterationStatement { readonly kind: SyntaxKind.WhileStatement; readonly expression: Expression; } export type ForInitializer = VariableDeclarationList | Expression; export interface ForStatement extends IterationStatement { readonly kind: SyntaxKind.ForStatement; readonly initializer?: ForInitializer; readonly condition?: Expression; readonly incrementor?: Expression; } export type ForInOrOfStatement = ForInStatement | ForOfStatement; export interface ForInStatement extends IterationStatement { readonly kind: SyntaxKind.ForInStatement; readonly initializer: ForInitializer; readonly expression: Expression; } export interface ForOfStatement extends IterationStatement { readonly kind: SyntaxKind.ForOfStatement; readonly awaitModifier?: AwaitKeywordToken; readonly initializer: ForInitializer; readonly expression: Expression; } export interface BreakStatement extends Statement { readonly kind: SyntaxKind.BreakStatement; readonly label?: Identifier; } export interface ContinueStatement extends Statement { readonly kind: SyntaxKind.ContinueStatement; readonly label?: Identifier; } export type BreakOrContinueStatement = BreakStatement | ContinueStatement; export interface ReturnStatement extends Statement { readonly kind: SyntaxKind.ReturnStatement; readonly expression?: Expression; } export interface WithStatement extends Statement { readonly kind: SyntaxKind.WithStatement; readonly expression: Expression; readonly statement: Statement; } export interface SwitchStatement extends Statement { readonly kind: SyntaxKind.SwitchStatement; readonly expression: Expression; readonly caseBlock: CaseBlock; possiblyExhaustive?: boolean; } export interface CaseBlock extends Node { readonly kind: SyntaxKind.CaseBlock; readonly parent: SwitchStatement; readonly clauses: NodeArray<CaseOrDefaultClause>; } export interface CaseClause extends Node { readonly kind: SyntaxKind.CaseClause; readonly parent: CaseBlock; readonly expression: Expression; readonly statements: NodeArray<Statement>; } export interface DefaultClause extends Node { readonly kind: SyntaxKind.DefaultClause; readonly parent: CaseBlock; readonly statements: NodeArray<Statement>; } export type CaseOrDefaultClause = CaseClause | DefaultClause; export interface LabeledStatement extends Statement, JSDocContainer { readonly kind: SyntaxKind.LabeledStatement; readonly label: Identifier; readonly statement: Statement; } export interface ThrowStatement extends Statement { readonly kind: SyntaxKind.ThrowStatement; readonly expression: Expression; } export interface TryStatement extends Statement { readonly kind: SyntaxKind.TryStatement; readonly tryBlock: Block; readonly catchClause?: CatchClause; readonly finallyBlock?: Block; } export interface CatchClause extends Node { readonly kind: SyntaxKind.CatchClause; readonly parent: TryStatement; readonly variableDeclaration?: VariableDeclaration; readonly block: Block; } export type ObjectTypeDeclaration = ClassLikeDeclaration | InterfaceDeclaration | TypeLiteralNode; export type DeclarationWithTypeParameters = DeclarationWithTypeParameterChildren | JSDocTypedefTag | JSDocCallbackTag | JSDocSignature; export type DeclarationWithTypeParameterChildren = SignatureDeclaration | ClassLikeDeclaration | InterfaceDeclaration | TypeAliasDeclaration | JSDocTemplateTag; export interface ClassLikeDeclarationBase extends NamedDeclaration, JSDocContainer { readonly kind: SyntaxKind.ClassDeclaration | SyntaxKind.ClassExpression; readonly name?: Identifier; readonly typeParameters?: NodeArray<TypeParameterDeclaration>; readonly heritageClauses?: NodeArray<HeritageClause>; readonly members: NodeArray<ClassElement>; } export interface ClassDeclaration extends ClassLikeDeclarationBase, DeclarationStatement { readonly kind: SyntaxKind.ClassDeclaration; /** May be undefined in `export default class { ... }`. */ readonly name?: Identifier; } export interface ClassExpression extends ClassLikeDeclarationBase, PrimaryExpression { readonly kind: SyntaxKind.ClassExpression; } export type ClassLikeDeclaration = ClassDeclaration | ClassExpression; export interface ClassElement extends NamedDeclaration { _classElementBrand: any; readonly name?: PropertyName; } export interface TypeElement extends NamedDeclaration { _typeElementBrand: any; readonly name?: PropertyName; readonly questionToken?: QuestionToken; } export interface InterfaceDeclaration extends DeclarationStatement, JSDocContainer { readonly kind: SyntaxKind.InterfaceDeclaration; readonly name: Identifier; readonly typeParameters?: NodeArray<TypeParameterDeclaration>; readonly heritageClauses?: NodeArray<HeritageClause>; readonly members: NodeArray<TypeElement>; } export interface HeritageClause extends Node { readonly kind: SyntaxKind.HeritageClause; readonly parent: InterfaceDeclaration | ClassLikeDeclaration; readonly token: SyntaxKind.ExtendsKeyword | SyntaxKind.ImplementsKeyword; readonly types: NodeArray<ExpressionWithTypeArguments>; } export interface TypeAliasDeclaration extends DeclarationStatement, JSDocContainer { readonly kind: SyntaxKind.TypeAliasDeclaration; readonly name: Identifier; readonly typeParameters?: NodeArray<TypeParameterDeclaration>; readonly type: TypeNode; } export interface EnumMember extends NamedDeclaration, JSDocContainer { readonly kind: SyntaxKind.EnumMember; readonly parent: EnumDeclaration; readonly name: PropertyName; readonly initializer?: Expression; } export interface EnumDeclaration extends DeclarationStatement, JSDocContainer { readonly kind: SyntaxKind.EnumDeclaration; readonly name: Identifier; readonly members: NodeArray<EnumMember>; } export type ModuleName = Identifier | StringLiteral; export type ModuleBody = NamespaceBody | JSDocNamespaceBody; export interface ModuleDeclaration extends DeclarationStatement, JSDocContainer { readonly kind: SyntaxKind.ModuleDeclaration; readonly parent: ModuleBody | SourceFile; readonly name: ModuleName; readonly body?: ModuleBody | JSDocNamespaceDeclaration; } export type NamespaceBody = ModuleBlock | NamespaceDeclaration; export interface NamespaceDeclaration extends ModuleDeclaration { readonly name: Identifier; readonly body: NamespaceBody; } export type JSDocNamespaceBody = Identifier | JSDocNamespaceDeclaration; export interface JSDocNamespaceDeclaration extends ModuleDeclaration { readonly name: Identifier; readonly body?: JSDocNamespaceBody; } export interface ModuleBlock extends Node, Statement { readonly kind: SyntaxKind.ModuleBlock; readonly parent: ModuleDeclaration; readonly statements: NodeArray<Statement>; } export type ModuleReference = EntityName | ExternalModuleReference; /** * One of: * - import x = require("mod"); * - import x = M.x; */ export interface ImportEqualsDeclaration extends DeclarationStatement, JSDocContainer { readonly kind: SyntaxKind.ImportEqualsDeclaration; readonly parent: SourceFile | ModuleBlock; readonly name: Identifier; readonly moduleReference: ModuleReference; } export interface ExternalModuleReference extends Node { readonly kind: SyntaxKind.ExternalModuleReference; readonly parent: ImportEqualsDeclaration; readonly expression: Expression; } export interface ImportDeclaration extends Statement, JSDocContainer { readonly kind: SyntaxKind.ImportDeclaration; readonly parent: SourceFile | ModuleBlock; readonly importClause?: ImportClause; /** If this is not a StringLiteral it will be a grammar error. */ readonly moduleSpecifier: Expression; } export type NamedImportBindings = NamespaceImport | NamedImports; export type NamedExportBindings = NamespaceExport | NamedExports; export interface ImportClause extends NamedDeclaration { readonly kind: SyntaxKind.ImportClause; readonly parent: ImportDeclaration; readonly isTypeOnly: boolean; readonly name?: Identifier; readonly namedBindings?: NamedImportBindings; } export interface NamespaceImport extends NamedDeclaration { readonly kind: SyntaxKind.NamespaceImport; readonly parent: ImportClause; readonly name: Identifier; } export interface NamespaceExport extends NamedDeclaration { readonly kind: SyntaxKind.NamespaceExport; readonly parent: ExportDeclaration; readonly name: Identifier; } export interface NamespaceExportDeclaration extends DeclarationStatement, JSDocContainer { readonly kind: SyntaxKind.NamespaceExportDeclaration; readonly name: Identifier; } export interface ExportDeclaration extends DeclarationStatement, JSDocContainer { readonly kind: SyntaxKind.ExportDeclaration; readonly parent: SourceFile | ModuleBlock; readonly isTypeOnly: boolean; /** Will not be assigned in the case of `export * from "foo";` */ readonly exportClause?: NamedExportBindings; /** If this is not a StringLiteral it will be a grammar error. */ readonly moduleSpecifier?: Expression; } export interface NamedImports extends Node { readonly kind: SyntaxKind.NamedImports; readonly parent: ImportClause; readonly elements: NodeArray<ImportSpecifier>; } export interface NamedExports extends Node { readonly kind: SyntaxKind.NamedExports; readonly parent: ExportDeclaration; readonly elements: NodeArray<ExportSpecifier>; } export type NamedImportsOrExports = NamedImports | NamedExports; export interface ImportSpecifier extends NamedDeclaration { readonly kind: SyntaxKind.ImportSpecifier; readonly parent: NamedImports; readonly propertyName?: Identifier; readonly name: Identifier; } export interface ExportSpecifier extends NamedDeclaration { readonly kind: SyntaxKind.ExportSpecifier; readonly parent: NamedExports; readonly propertyName?: Identifier; readonly name: Identifier; } export type ImportOrExportSpecifier = ImportSpecifier | ExportSpecifier; export type TypeOnlyCompatibleAliasDeclaration = ImportClause | NamespaceImport | ImportOrExportSpecifier; /** * This is either an `export =` or an `export default` declaration. * Unless `isExportEquals` is set, this node was parsed as an `export default`. */ export interface ExportAssignment extends DeclarationStatement, JSDocContainer { readonly kind: SyntaxKind.ExportAssignment; readonly parent: SourceFile; readonly isExportEquals?: boolean; readonly expression: Expression; } export interface FileReference extends TextRange { fileName: string; } export interface CheckJsDirective extends TextRange { enabled: boolean; } export type CommentKind = SyntaxKind.SingleLineCommentTrivia | SyntaxKind.MultiLineCommentTrivia; export interface CommentRange extends TextRange { hasTrailingNewLine?: boolean; kind: CommentKind; } export interface SynthesizedComment extends CommentRange { text: string; pos: -1; end: -1; hasLeadingNewline?: boolean; } export interface JSDocTypeExpression extends TypeNode { readonly kind: SyntaxKind.JSDocTypeExpression; readonly type: TypeNode; } export interface JSDocNameReference extends Node { readonly kind: SyntaxKind.JSDocNameReference; readonly name: EntityName; } export interface JSDocType extends TypeNode { _jsDocTypeBrand: any; } export interface JSDocAllType extends JSDocType { readonly kind: SyntaxKind.JSDocAllType; } export interface JSDocUnknownType extends JSDocType { readonly kind: SyntaxKind.JSDocUnknownType; } export interface JSDocNonNullableType extends JSDocType { readonly kind: SyntaxKind.JSDocNonNullableType; readonly type: TypeNode; } export interface JSDocNullableType extends JSDocType { readonly kind: SyntaxKind.JSDocNullableType; readonly type: TypeNode; } export interface JSDocOptionalType extends JSDocType { readonly kind: SyntaxKind.JSDocOptionalType; readonly type: TypeNode; } export interface JSDocFunctionType extends JSDocType, SignatureDeclarationBase { readonly kind: SyntaxKind.JSDocFunctionType; } export interface JSDocVariadicType extends JSDocType { readonly kind: SyntaxKind.JSDocVariadicType; readonly type: TypeNode; } export interface JSDocNamepathType extends JSDocType { readonly kind: SyntaxKind.JSDocNamepathType; readonly type: TypeNode; } export type JSDocTypeReferencingNode = JSDocVariadicType | JSDocOptionalType | JSDocNullableType | JSDocNonNullableType; export interface JSDoc extends Node { readonly kind: SyntaxKind.JSDocComment; readonly parent: HasJSDoc; readonly tags?: NodeArray<JSDocTag>; readonly comment?: string; } export interface JSDocTag extends Node { readonly parent: JSDoc | JSDocTypeLiteral; readonly tagName: Identifier; readonly comment?: string; } export interface JSDocUnknownTag extends JSDocTag { readonly kind: SyntaxKind.JSDocTag; } /** * Note that `@extends` is a synonym of `@augments`. * Both tags are represented by this interface. */ export interface JSDocAugmentsTag extends JSDocTag { readonly kind: SyntaxKind.JSDocAugmentsTag; readonly class: ExpressionWithTypeArguments & { readonly expression: Identifier | PropertyAccessEntityNameExpression; }; } export interface JSDocImplementsTag extends JSDocTag { readonly kind: SyntaxKind.JSDocImplementsTag; readonly class: ExpressionWithTypeArguments & { readonly expression: Identifier | PropertyAccessEntityNameExpression; }; } export interface JSDocAuthorTag extends JSDocTag { readonly kind: SyntaxKind.JSDocAuthorTag; } export interface JSDocDeprecatedTag extends JSDocTag { kind: SyntaxKind.JSDocDeprecatedTag; } export interface JSDocClassTag extends JSDocTag { readonly kind: SyntaxKind.JSDocClassTag; } export interface JSDocPublicTag extends JSDocTag { readonly kind: SyntaxKind.JSDocPublicTag; } export interface JSDocPrivateTag extends JSDocTag { readonly kind: SyntaxKind.JSDocPrivateTag; } export interface JSDocProtectedTag extends JSDocTag { readonly kind: SyntaxKind.JSDocProtectedTag; } export interface JSDocReadonlyTag extends JSDocTag { readonly kind: SyntaxKind.JSDocReadonlyTag; } export interface JSDocEnumTag extends JSDocTag, Declaration { readonly kind: SyntaxKind.JSDocEnumTag; readonly parent: JSDoc; readonly typeExpression: JSDocTypeExpression; } export interface JSDocThisTag extends JSDocTag { readonly kind: SyntaxKind.JSDocThisTag; readonly typeExpression: JSDocTypeExpression; } export interface JSDocTemplateTag extends JSDocTag { readonly kind: SyntaxKind.JSDocTemplateTag; readonly constraint: JSDocTypeExpression | undefined; readonly typeParameters: NodeArray<TypeParameterDeclaration>; } export interface JSDocSeeTag extends JSDocTag { readonly kind: SyntaxKind.JSDocSeeTag; readonly name?: JSDocNameReference; } export interface JSDocReturnTag extends JSDocTag { readonly kind: SyntaxKind.JSDocReturnTag; readonly typeExpression?: JSDocTypeExpression; } export interface JSDocTypeTag extends JSDocTag { readonly kind: SyntaxKind.JSDocTypeTag; readonly typeExpression: JSDocTypeExpression; } export interface JSDocTypedefTag extends JSDocTag, NamedDeclaration { readonly kind: SyntaxKind.JSDocTypedefTag; readonly parent: JSDoc; readonly fullName?: JSDocNamespaceDeclaration | Identifier; readonly name?: Identifier; readonly typeExpression?: JSDocTypeExpression | JSDocTypeLiteral; } export interface JSDocCallbackTag extends JSDocTag, NamedDeclaration { readonly kind: SyntaxKind.JSDocCallbackTag; readonly parent: JSDoc; readonly fullName?: JSDocNamespaceDeclaration | Identifier; readonly name?: Identifier; readonly typeExpression: JSDocSignature; } export interface JSDocSignature extends JSDocType, Declaration { readonly kind: SyntaxKind.JSDocSignature; readonly typeParameters?: readonly JSDocTemplateTag[]; readonly parameters: readonly JSDocParameterTag[]; readonly type: JSDocReturnTag | undefined; } export interface JSDocPropertyLikeTag extends JSDocTag, Declaration { readonly parent: JSDoc; readonly name: EntityName; readonly typeExpression?: JSDocTypeExpression; /** Whether the property name came before the type -- non-standard for JSDoc, but Typescript-like */ readonly isNameFirst: boolean; readonly isBracketed: boolean; } export interface JSDocPropertyTag extends JSDocPropertyLikeTag { readonly kind: SyntaxKind.JSDocPropertyTag; } export interface JSDocParameterTag extends JSDocPropertyLikeTag { readonly kind: SyntaxKind.JSDocParameterTag; } export interface JSDocTypeLiteral extends JSDocType { readonly kind: SyntaxKind.JSDocTypeLiteral; readonly jsDocPropertyTags?: readonly JSDocPropertyLikeTag[]; /** If true, then this type literal represents an *array* of its type. */ readonly isArrayType: boolean; } export enum FlowFlags { Unreachable = 1, Start = 2, BranchLabel = 4, LoopLabel = 8, Assignment = 16, TrueCondition = 32, FalseCondition = 64, SwitchClause = 128, ArrayMutation = 256, Call = 512, ReduceLabel = 1024, Referenced = 2048, Shared = 4096, Label = 12, Condition = 96 } export type FlowNode = FlowStart | FlowLabel | FlowAssignment | FlowCall | FlowCondition | FlowSwitchClause | FlowArrayMutation | FlowCall | FlowReduceLabel; export interface FlowNodeBase { flags: FlowFlags; id?: number; } export interface FlowStart extends FlowNodeBase { node?: FunctionExpression | ArrowFunction | MethodDeclaration; } export interface FlowLabel extends FlowNodeBase { antecedents: FlowNode[] | undefined; } export interface FlowAssignment extends FlowNodeBase { node: Expression | VariableDeclaration | BindingElement; antecedent: FlowNode; } export interface FlowCall extends FlowNodeBase { node: CallExpression; antecedent: FlowNode; } export interface FlowCondition extends FlowNodeBase { node: Expression; antecedent: FlowNode; } export interface FlowSwitchClause extends FlowNodeBase { switchStatement: SwitchStatement; clauseStart: number; clauseEnd: number; antecedent: FlowNode; } export interface FlowArrayMutation extends FlowNodeBase { node: CallExpression | BinaryExpression; antecedent: FlowNode; } export interface FlowReduceLabel extends FlowNodeBase { target: FlowLabel; antecedents: FlowNode[]; antecedent: FlowNode; } export type FlowType = Type | IncompleteType; export interface IncompleteType { flags: TypeFlags; type: Type; } export interface AmdDependency { path: string; name?: string; } export interface SourceFile extends Declaration { readonly kind: SyntaxKind.SourceFile; readonly statements: NodeArray<Statement>; readonly endOfFileToken: Token<SyntaxKind.EndOfFileToken>; fileName: string; text: string; amdDependencies: readonly AmdDependency[]; moduleName?: string; referencedFiles: readonly FileReference[]; typeReferenceDirectives: readonly FileReference[]; libReferenceDirectives: readonly FileReference[]; languageVariant: LanguageVariant; isDeclarationFile: boolean; /** * lib.d.ts should have a reference comment like * * /// <reference no-default-lib="true"/> * * If any other file has this comment, it signals not to include lib.d.ts * because this containing file is intended to act as a default library. */ hasNoDefaultLib: boolean; languageVersion: ScriptTarget; } export interface Bundle extends Node { readonly kind: SyntaxKind.Bundle; readonly prepends: readonly (InputFiles | UnparsedSource)[]; readonly sourceFiles: readonly SourceFile[]; } export interface InputFiles extends Node { readonly kind: SyntaxKind.InputFiles; javascriptPath?: string; javascriptText: string; javascriptMapPath?: string; javascriptMapText?: string; declarationPath?: string; declarationText: string; declarationMapPath?: string; declarationMapText?: string; } export interface UnparsedSource extends Node { readonly kind: SyntaxKind.UnparsedSource; fileName: string; text: string; readonly prologues: readonly UnparsedPrologue[]; helpers: readonly UnscopedEmitHelper[] | undefined; referencedFiles: readonly FileReference[]; typeReferenceDirectives: readonly string[] | undefined; libReferenceDirectives: readonly FileReference[]; hasNoDefaultLib?: boolean; sourceMapPath?: string; sourceMapText?: string; readonly syntheticReferences?: readonly UnparsedSyntheticReference[]; readonly texts: readonly UnparsedSourceText[]; } export type UnparsedSourceText = UnparsedPrepend | UnparsedTextLike; export type UnparsedNode = UnparsedPrologue | UnparsedSourceText | UnparsedSyntheticReference; export interface UnparsedSection extends Node { readonly kind: SyntaxKind; readonly parent: UnparsedSource; readonly data?: string; } export interface UnparsedPrologue extends UnparsedSection { readonly kind: SyntaxKind.UnparsedPrologue; readonly parent: UnparsedSource; readonly data: string; } export interface UnparsedPrepend extends UnparsedSection { readonly kind: SyntaxKind.UnparsedPrepend; readonly parent: UnparsedSource; readonly data: string; readonly texts: readonly UnparsedTextLike[]; } export interface UnparsedTextLike extends UnparsedSection { readonly kind: SyntaxKind.UnparsedText | SyntaxKind.UnparsedInternalText; readonly parent: UnparsedSource; } export interface UnparsedSyntheticReference extends UnparsedSection { readonly kind: SyntaxKind.UnparsedSyntheticReference; readonly parent: UnparsedSource; } export interface JsonSourceFile extends SourceFile { readonly statements: NodeArray<JsonObjectExpressionStatement>; } export interface TsConfigSourceFile extends JsonSourceFile { extendedSourceFiles?: string[]; } export interface JsonMinusNumericLiteral extends PrefixUnaryExpression { readonly kind: SyntaxKind.PrefixUnaryExpression; readonly operator: SyntaxKind.MinusToken; readonly operand: NumericLiteral; } export type JsonObjectExpression = ObjectLiteralExpression | ArrayLiteralExpression | JsonMinusNumericLiteral | NumericLiteral | StringLiteral | BooleanLiteral | NullLiteral; export interface JsonObjectExpressionStatement extends ExpressionStatement { readonly expression: JsonObjectExpression; } export interface ScriptReferenceHost { getCompilerOptions(): CompilerOptions; getSourceFile(fileName: string): SourceFile | undefined; getSourceFileByPath(path: Path): SourceFile | undefined; getCurrentDirectory(): string; } export interface ParseConfigHost { useCaseSensitiveFileNames: boolean; readDirectory(rootDir: string, extensions: readonly string[], excludes: readonly string[] | undefined, includes: readonly string[], depth?: number): readonly string[]; /** * Gets a value indicating whether the specified path exists and is a file. * @param path The path to test. */ fileExists(path: string): boolean; readFile(path: string): string | undefined; trace?(s: string): void; } /** * Branded string for keeping track of when we've turned an ambiguous path * specified like "./blah" to an absolute path to an actual * tsconfig file, e.g. "/root/blah/tsconfig.json" */ export type ResolvedConfigFileName = string & { _isResolvedConfigFileName: never; }; export type WriteFileCallback = (fileName: string, data: string, writeByteOrderMark: boolean, onError?: (message: string) => void, sourceFiles?: readonly SourceFile[]) => void; export class OperationCanceledException { } export interface CancellationToken { isCancellationRequested(): boolean; /** @throws OperationCanceledException if isCancellationRequested is true */ throwIfCancellationRequested(): void; } export interface Program extends ScriptReferenceHost { getCurrentDirectory(): string; /** * Get a list of root file names that were passed to a 'createProgram' */ getRootFileNames(): readonly string[]; /** * Get a list of files in the program */ getSourceFiles(): readonly SourceFile[]; /** * Emits the JavaScript and declaration files. If targetSourceFile is not specified, then * the JavaScript and declaration files will be produced for all the files in this program. * If targetSourceFile is specified, then only the JavaScript and declaration for that * specific file will be generated. * * If writeFile is not specified then the writeFile callback from the compiler host will be * used for writing the JavaScript and declaration files. Otherwise, the writeFile parameter * will be invoked when writing the JavaScript and declaration files. */ emit(targetSourceFile?: SourceFile, writeFile?: WriteFileCallback, cancellationToken?: CancellationToken, emitOnlyDtsFiles?: boolean, customTransformers?: CustomTransformers): EmitResult; getOptionsDiagnostics(cancellationToken?: CancellationToken): readonly Diagnostic[]; getGlobalDiagnostics(cancellationToken?: CancellationToken): readonly Diagnostic[]; getSyntacticDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): readonly DiagnosticWithLocation[]; /** The first time this is called, it will return global diagnostics (no location). */ getSemanticDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): readonly Diagnostic[]; getDeclarationDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): readonly DiagnosticWithLocation[]; getConfigFileParsingDiagnostics(): readonly Diagnostic[]; /** * Gets a type checker that can be used to semantically analyze source files in the program. */ getTypeChecker(): TypeChecker; getTypeCatalog(): readonly Type[]; getNodeCount(): number; getIdentifierCount(): number; getSymbolCount(): number; getTypeCount(): number; getInstantiationCount(): number; getRelationCacheSizes(): { assignable: number; identity: number; subtype: number; strictSubtype: number; }; isSourceFileFromExternalLibrary(file: SourceFile): boolean; isSourceFileDefaultLibrary(file: SourceFile): boolean; getProjectReferences(): readonly ProjectReference[] | undefined; getResolvedProjectReferences(): readonly (ResolvedProjectReference | undefined)[] | undefined; } export interface ResolvedProjectReference { commandLine: ParsedCommandLine; sourceFile: SourceFile; references?: readonly (ResolvedProjectReference | undefined)[]; } export type CustomTransformerFactory = (context: TransformationContext) => CustomTransformer; export interface CustomTransformer { transformSourceFile(node: SourceFile): SourceFile; transformBundle(node: Bundle): Bundle; } export interface CustomTransformers { /** Custom transformers to evaluate before built-in .js transformations. */ before?: (TransformerFactory<SourceFile> | CustomTransformerFactory)[]; /** Custom transformers to evaluate after built-in .js transformations. */ after?: (TransformerFactory<SourceFile> | CustomTransformerFactory)[]; /** Custom transformers to evaluate after built-in .d.ts transformations. */ afterDeclarations?: (TransformerFactory<Bundle | SourceFile> | CustomTransformerFactory)[]; } export interface SourceMapSpan { /** Line number in the .js file. */ emittedLine: number; /** Column number in the .js file. */ emittedColumn: number; /** Line number in the .ts file. */ sourceLine: number; /** Column number in the .ts file. */ sourceColumn: number; /** Optional name (index into names array) associated with this span. */ nameIndex?: number; /** .ts file (index into sources array) associated with this span */ sourceIndex: number; } /** Return code used by getEmitOutput function to indicate status of the function */ export enum ExitStatus { Success = 0, DiagnosticsPresent_OutputsSkipped = 1, DiagnosticsPresent_OutputsGenerated = 2, InvalidProject_OutputsSkipped = 3, ProjectReferenceCycle_OutputsSkipped = 4, /** @deprecated Use ProjectReferenceCycle_OutputsSkipped instead. */ ProjectReferenceCycle_OutputsSkupped = 4 } export interface EmitResult { emitSkipped: boolean; /** Contains declaration emit diagnostics */ diagnostics: readonly Diagnostic[]; emittedFiles?: string[]; } export interface TypeChecker { getTypeOfSymbolAtLocation(symbol: Symbol, node: Node): Type; getDeclaredTypeOfSymbol(symbol: Symbol): Type; getPropertiesOfType(type: Type): Symbol[]; getPropertyOfType(type: Type, propertyName: string): Symbol | undefined; getPrivateIdentifierPropertyOfType(leftType: Type, name: string, location: Node): Symbol | undefined; getIndexInfoOfType(type: Type, kind: IndexKind): IndexInfo | undefined; getSignaturesOfType(type: Type, kind: SignatureKind): readonly Signature[]; getIndexTypeOfType(type: Type, kind: IndexKind): Type | undefined; getBaseTypes(type: InterfaceType): BaseType[]; getBaseTypeOfLiteralType(type: Type): Type; getWidenedType(type: Type): Type; getReturnTypeOfSignature(signature: Signature): Type; getNullableType(type: Type, flags: TypeFlags): Type; getNonNullableType(type: Type): Type; getTypeArguments(type: TypeReference): readonly Type[]; /** Note that the resulting nodes cannot be checked. */ typeToTypeNode(type: Type, enclosingDeclaration: Node | undefined, flags: NodeBuilderFlags | undefined): TypeNode | undefined; /** Note that the resulting nodes cannot be checked. */ signatureToSignatureDeclaration(signature: Signature, kind: SyntaxKind, enclosingDeclaration: Node | undefined, flags: NodeBuilderFlags | undefined): SignatureDeclaration & { typeArguments?: NodeArray<TypeNode>; } | undefined; /** Note that the resulting nodes cannot be checked. */ indexInfoToIndexSignatureDeclaration(indexInfo: IndexInfo, kind: IndexKind, enclosingDeclaration: Node | undefined, flags: NodeBuilderFlags | undefined): IndexSignatureDeclaration | undefined; /** Note that the resulting nodes cannot be checked. */ symbolToEntityName(symbol: Symbol, meaning: SymbolFlags, enclosingDeclaration: Node | undefined, flags: NodeBuilderFlags | undefined): EntityName | undefined; /** Note that the resulting nodes cannot be checked. */ symbolToExpression(symbol: Symbol, meaning: SymbolFlags, enclosingDeclaration: Node | undefined, flags: NodeBuilderFlags | undefined): Expression | undefined; /** Note that the resulting nodes cannot be checked. */ symbolToTypeParameterDeclarations(symbol: Symbol, enclosingDeclaration: Node | undefined, flags: NodeBuilderFlags | undefined): NodeArray<TypeParameterDeclaration> | undefined; /** Note that the resulting nodes cannot be checked. */ symbolToParameterDeclaration(symbol: Symbol, enclosingDeclaration: Node | undefined, flags: NodeBuilderFlags | undefined): ParameterDeclaration | undefined; /** Note that the resulting nodes cannot be checked. */ typeParameterToDeclaration(parameter: TypeParameter, enclosingDeclaration: Node | undefined, flags: NodeBuilderFlags | undefined): TypeParameterDeclaration | undefined; getSymbolsInScope(location: Node, meaning: SymbolFlags): Symbol[]; getSymbolAtLocation(node: Node): Symbol | undefined; getSymbolsOfParameterPropertyDeclaration(parameter: ParameterDeclaration, parameterName: string): Symbol[]; /** * The function returns the value (local variable) symbol of an identifier in the short-hand property assignment. * This is necessary as an identifier in short-hand property assignment can contains two meaning: property name and property value. */ getShorthandAssignmentValueSymbol(location: Node): Symbol | undefined; getExportSpecifierLocalTargetSymbol(location: ExportSpecifier | Identifier): Symbol | undefined; /** * If a symbol is a local symbol with an associated exported symbol, returns the exported symbol. * Otherwise returns its input. * For example, at `export type T = number;`: * - `getSymbolAtLocation` at the location `T` will return the exported symbol for `T`. * - But the result of `getSymbolsInScope` will contain the *local* symbol for `T`, not the exported symbol. * - Calling `getExportSymbolOfSymbol` on that local symbol will return the exported symbol. */ getExportSymbolOfSymbol(symbol: Symbol): Symbol; getPropertySymbolOfDestructuringAssignment(location: Identifier): Symbol | undefined; getTypeOfAssignmentPattern(pattern: AssignmentPattern): Type; getTypeAtLocation(node: Node): Type; getTypeFromTypeNode(node: TypeNode): Type; signatureToString(signature: Signature, enclosingDeclaration?: Node, flags?: TypeFormatFlags, kind?: SignatureKind): string; typeToString(type: Type, enclosingDeclaration?: Node, flags?: TypeFormatFlags): string; symbolToString(symbol: Symbol, enclosingDeclaration?: Node, meaning?: SymbolFlags, flags?: SymbolFormatFlags): string; typePredicateToString(predicate: TypePredicate, enclosingDeclaration?: Node, flags?: TypeFormatFlags): string; getFullyQualifiedName(symbol: Symbol): string; getAugmentedPropertiesOfType(type: Type): Symbol[]; getRootSymbols(symbol: Symbol): readonly Symbol[]; getSymbolOfExpando(node: Node, allowDeclaration: boolean): Symbol | undefined; getContextualType(node: Expression): Type | undefined; /** * returns unknownSignature in the case of an error. * returns undefined if the node is not valid. * @param argumentCount Apparent number of arguments, passed in case of a possibly incomplete call. This should come from an ArgumentListInfo. See `signatureHelp.ts`. */ getResolvedSignature(node: CallLikeExpression, candidatesOutArray?: Signature[], argumentCount?: number): Signature | undefined; getSignatureFromDeclaration(declaration: SignatureDeclaration): Signature | undefined; isImplementationOfOverload(node: SignatureDeclaration): boolean | undefined; isUndefinedSymbol(symbol: Symbol): boolean; isArgumentsSymbol(symbol: Symbol): boolean; isUnknownSymbol(symbol: Symbol): boolean; getConstantValue(node: EnumMember | PropertyAccessExpression | ElementAccessExpression): string | number | undefined; isValidPropertyAccess(node: PropertyAccessExpression | QualifiedName | ImportTypeNode, propertyName: string): boolean; /** Follow all aliases to get the original symbol. */ getAliasedSymbol(symbol: Symbol): Symbol; getExportsOfModule(moduleSymbol: Symbol): Symbol[]; getJsxIntrinsicTagNamesAt(location: Node): Symbol[]; isOptionalParameter(node: ParameterDeclaration): boolean; getAmbientModules(): Symbol[]; tryGetMemberInModuleExports(memberName: string, moduleSymbol: Symbol): Symbol | undefined; getApparentType(type: Type): Type; getBaseConstraintOfType(type: Type): Type | undefined; getDefaultFromTypeParameter(type: Type): Type | undefined; /** * Depending on the operation performed, it may be appropriate to throw away the checker * if the cancellation token is triggered. Typically, if it is used for error checking * and the operation is cancelled, then it should be discarded, otherwise it is safe to keep. */ runWithCancellationToken<T>(token: CancellationToken, cb: (checker: TypeChecker) => T): T; } export enum NodeBuilderFlags { None = 0, NoTruncation = 1, WriteArrayAsGenericType = 2, GenerateNamesForShadowedTypeParams = 4, UseStructuralFallback = 8, ForbidIndexedAccessSymbolReferences = 16, WriteTypeArgumentsOfSignature = 32, UseFullyQualifiedType = 64, UseOnlyExternalAliasing = 128, SuppressAnyReturnType = 256, WriteTypeParametersInQualifiedName = 512, MultilineObjectLiterals = 1024, WriteClassExpressionAsTypeLiteral = 2048, UseTypeOfFunction = 4096, OmitParameterModifiers = 8192, UseAliasDefinedOutsideCurrentScope = 16384, UseSingleQuotesForStringLiteralType = 268435456, NoTypeReduction = 536870912, NoUndefinedOptionalParameterType = 1073741824, AllowThisInObjectLiteral = 32768, AllowQualifedNameInPlaceOfIdentifier = 65536, AllowAnonymousIdentifier = 131072, AllowEmptyUnionOrIntersection = 262144, AllowEmptyTuple = 524288, AllowUniqueESSymbolType = 1048576, AllowEmptyIndexInfoType = 2097152, AllowNodeModulesRelativePaths = 67108864, IgnoreErrors = 70221824, InObjectTypeLiteral = 4194304, InTypeAlias = 8388608, InInitialEntityName = 16777216, InReverseMappedType = 33554432 } export enum TypeFormatFlags { None = 0, NoTruncation = 1, WriteArrayAsGenericType = 2, UseStructuralFallback = 8, WriteTypeArgumentsOfSignature = 32, UseFullyQualifiedType = 64, SuppressAnyReturnType = 256, MultilineObjectLiterals = 1024, WriteClassExpressionAsTypeLiteral = 2048, UseTypeOfFunction = 4096, OmitParameterModifiers = 8192, UseAliasDefinedOutsideCurrentScope = 16384, UseSingleQuotesForStringLiteralType = 268435456, NoTypeReduction = 536870912, AllowUniqueESSymbolType = 1048576, AddUndefined = 131072, WriteArrowStyleSignature = 262144, InArrayType = 524288, InElementType = 2097152, InFirstTypeArgument = 4194304, InTypeAlias = 8388608, /** @deprecated */ WriteOwnNameForAnyLike = 0, NodeBuilderFlagsMask = 814775659 } export enum SymbolFormatFlags { None = 0, WriteTypeParametersOrArguments = 1, UseOnlyExternalAliasing = 2, AllowAnyNodeKind = 4, UseAliasDefinedOutsideCurrentScope = 8, } export enum TypePredicateKind { This = 0, Identifier = 1, AssertsThis = 2, AssertsIdentifier = 3 } export interface TypePredicateBase { kind: TypePredicateKind; type: Type | undefined; } export interface ThisTypePredicate extends TypePredicateBase { kind: TypePredicateKind.This; parameterName: undefined; parameterIndex: undefined; type: Type; } export interface IdentifierTypePredicate extends TypePredicateBase { kind: TypePredicateKind.Identifier; parameterName: string; parameterIndex: number; type: Type; } export interface AssertsThisTypePredicate extends TypePredicateBase { kind: TypePredicateKind.AssertsThis; parameterName: undefined; parameterIndex: undefined; type: Type | undefined; } export interface AssertsIdentifierTypePredicate extends TypePredicateBase { kind: TypePredicateKind.AssertsIdentifier; parameterName: string; parameterIndex: number; type: Type | undefined; } export type TypePredicate = ThisTypePredicate | IdentifierTypePredicate | AssertsThisTypePredicate | AssertsIdentifierTypePredicate; export enum SymbolFlags { None = 0, FunctionScopedVariable = 1, BlockScopedVariable = 2, Property = 4, EnumMember = 8, Function = 16, Class = 32, Interface = 64, ConstEnum = 128, RegularEnum = 256, ValueModule = 512, NamespaceModule = 1024, TypeLiteral = 2048, ObjectLiteral = 4096, Method = 8192, Constructor = 16384, GetAccessor = 32768, SetAccessor = 65536, Signature = 131072, TypeParameter = 262144, TypeAlias = 524288, ExportValue = 1048576, Alias = 2097152, Prototype = 4194304, ExportStar = 8388608, Optional = 16777216, Transient = 33554432, Assignment = 67108864, ModuleExports = 134217728, Enum = 384, Variable = 3, Value = 111551, Type = 788968, Namespace = 1920, Module = 1536, Accessor = 98304, FunctionScopedVariableExcludes = 111550, BlockScopedVariableExcludes = 111551, ParameterExcludes = 111551, PropertyExcludes = 0, EnumMemberExcludes = 900095, FunctionExcludes = 110991, ClassExcludes = 899503, InterfaceExcludes = 788872, RegularEnumExcludes = 899327, ConstEnumExcludes = 899967, ValueModuleExcludes = 110735, NamespaceModuleExcludes = 0, MethodExcludes = 103359, GetAccessorExcludes = 46015, SetAccessorExcludes = 78783, TypeParameterExcludes = 526824, TypeAliasExcludes = 788968, AliasExcludes = 2097152, ModuleMember = 2623475, ExportHasLocal = 944, BlockScoped = 418, PropertyOrAccessor = 98308, ClassMember = 106500, } export interface Symbol { flags: SymbolFlags; escapedName: __String; declarations: Declaration[]; valueDeclaration: Declaration; members?: SymbolTable; exports?: SymbolTable; globalExports?: SymbolTable; } export enum InternalSymbolName { Call = "__call", Constructor = "__constructor", New = "__new", Index = "__index", ExportStar = "__export", Global = "__global", Missing = "__missing", Type = "__type", Object = "__object", JSXAttributes = "__jsxAttributes", Class = "__class", Function = "__function", Computed = "__computed", Resolving = "__resolving__", ExportEquals = "export=", Default = "default", This = "this" } /** * This represents a string whose leading underscore have been escaped by adding extra leading underscores. * The shape of this brand is rather unique compared to others we've used. * Instead of just an intersection of a string and an object, it is that union-ed * with an intersection of void and an object. This makes it wholly incompatible * with a normal string (which is good, it cannot be misused on assignment or on usage), * while still being comparable with a normal string via === (also good) and castable from a string. */ export type __String = (string & { __escapedIdentifier: void; }) | (void & { __escapedIdentifier: void; }) | InternalSymbolName; /** ReadonlyMap where keys are `__String`s. */ export interface ReadonlyUnderscoreEscapedMap<T> extends ReadonlyESMap<__String, T> { } /** Map where keys are `__String`s. */ export interface UnderscoreEscapedMap<T> extends ESMap<__String, T>, ReadonlyUnderscoreEscapedMap<T> { } /** SymbolTable based on ES6 Map interface. */ export type SymbolTable = UnderscoreEscapedMap<Symbol>; export enum TypeFlags { Any = 1, Unknown = 2, String = 4, Number = 8, Boolean = 16, Enum = 32, BigInt = 64, StringLiteral = 128, NumberLiteral = 256, BooleanLiteral = 512, EnumLiteral = 1024, BigIntLiteral = 2048, ESSymbol = 4096, UniqueESSymbol = 8192, Void = 16384, Undefined = 32768, Null = 65536, Never = 131072, TypeParameter = 262144, Object = 524288, Union = 1048576, Intersection = 2097152, Index = 4194304, IndexedAccess = 8388608, Conditional = 16777216, Substitution = 33554432, NonPrimitive = 67108864, TemplateLiteral = 134217728, StringMapping = 268435456, Literal = 2944, Unit = 109440, StringOrNumberLiteral = 384, PossiblyFalsy = 117724, StringLike = 402653316, NumberLike = 296, BigIntLike = 2112, BooleanLike = 528, EnumLike = 1056, ESSymbolLike = 12288, VoidLike = 49152, UnionOrIntersection = 3145728, StructuredType = 3670016, TypeVariable = 8650752, InstantiableNonPrimitive = 58982400, InstantiablePrimitive = 406847488, Instantiable = 465829888, StructuredOrInstantiable = 469499904, Narrowable = 536624127, } export type DestructuringPattern = BindingPattern | ObjectLiteralExpression | ArrayLiteralExpression; export interface Type { flags: TypeFlags; symbol: Symbol; pattern?: DestructuringPattern; aliasSymbol?: Symbol; aliasTypeArguments?: readonly Type[]; } export interface LiteralType extends Type { value: string | number | PseudoBigInt; freshType: LiteralType; regularType: LiteralType; } export interface UniqueESSymbolType extends Type { symbol: Symbol; escapedName: __String; } export interface StringLiteralType extends LiteralType { value: string; } export interface NumberLiteralType extends LiteralType { value: number; } export interface BigIntLiteralType extends LiteralType { value: PseudoBigInt; } export interface EnumType extends Type { } export enum ObjectFlags { Class = 1, Interface = 2, Reference = 4, Tuple = 8, Anonymous = 16, Mapped = 32, Instantiated = 64, ObjectLiteral = 128, EvolvingArray = 256, ObjectLiteralPatternWithComputedProperties = 512, ContainsSpread = 1024, ReverseMapped = 2048, JsxAttributes = 4096, MarkerType = 8192, JSLiteral = 16384, FreshLiteral = 32768, ArrayLiteral = 65536, ObjectRestType = 131072, ClassOrInterface = 3, } export interface ObjectType extends Type { objectFlags: ObjectFlags; } /** Class and interface types (ObjectFlags.Class and ObjectFlags.Interface). */ export interface InterfaceType extends ObjectType { typeParameters: TypeParameter[] | undefined; outerTypeParameters: TypeParameter[] | undefined; localTypeParameters: TypeParameter[] | undefined; thisType: TypeParameter | undefined; } export type BaseType = ObjectType | IntersectionType | TypeVariable; export interface InterfaceTypeWithDeclaredMembers extends InterfaceType { declaredProperties: Symbol[]; declaredCallSignatures: Signature[]; declaredConstructSignatures: Signature[]; declaredStringIndexInfo?: IndexInfo; declaredNumberIndexInfo?: IndexInfo; } /** * Type references (ObjectFlags.Reference). When a class or interface has type parameters or * a "this" type, references to the class or interface are made using type references. The * typeArguments property specifies the types to substitute for the type parameters of the * class or interface and optionally includes an extra element that specifies the type to * substitute for "this" in the resulting instantiation. When no extra argument is present, * the type reference itself is substituted for "this". The typeArguments property is undefined * if the class or interface has no type parameters and the reference isn't specifying an * explicit "this" argument. */ export interface TypeReference extends ObjectType { target: GenericType; node?: TypeReferenceNode | ArrayTypeNode | TupleTypeNode; } export interface DeferredTypeReference extends TypeReference { } export interface GenericType extends InterfaceType, TypeReference { } export enum ElementFlags { Required = 1, Optional = 2, Rest = 4, Variadic = 8, Variable = 12 } export interface TupleType extends GenericType { elementFlags: readonly ElementFlags[]; minLength: number; fixedLength: number; hasRestElement: boolean; combinedFlags: ElementFlags; readonly: boolean; labeledElementDeclarations?: readonly (NamedTupleMember | ParameterDeclaration)[]; } export interface TupleTypeReference extends TypeReference { target: TupleType; } export interface UnionOrIntersectionType extends Type { types: Type[]; } export interface UnionType extends UnionOrIntersectionType { } export interface IntersectionType extends UnionOrIntersectionType { } export type StructuredType = ObjectType | UnionType | IntersectionType; export interface EvolvingArrayType extends ObjectType { elementType: Type; finalArrayType?: Type; } export interface InstantiableType extends Type { } export interface TypeParameter extends InstantiableType { } export interface IndexedAccessType extends InstantiableType { objectType: Type; indexType: Type; constraint?: Type; simplifiedForReading?: Type; simplifiedForWriting?: Type; } export type TypeVariable = TypeParameter | IndexedAccessType; export interface IndexType extends InstantiableType { type: InstantiableType | UnionOrIntersectionType; } export interface ConditionalRoot { node: ConditionalTypeNode; checkType: Type; extendsType: Type; isDistributive: boolean; inferTypeParameters?: TypeParameter[]; outerTypeParameters?: TypeParameter[]; instantiations?: Map<Type>; aliasSymbol?: Symbol; aliasTypeArguments?: Type[]; } export interface ConditionalType extends InstantiableType { root: ConditionalRoot; checkType: Type; extendsType: Type; resolvedTrueType: Type; resolvedFalseType: Type; } export interface TemplateLiteralType extends InstantiableType { texts: readonly string[]; types: readonly Type[]; } export interface StringMappingType extends InstantiableType { symbol: Symbol; type: Type; } export interface SubstitutionType extends InstantiableType { baseType: Type; substitute: Type; } export enum SignatureKind { Call = 0, Construct = 1 } export interface Signature { declaration?: SignatureDeclaration | JSDocSignature; typeParameters?: readonly TypeParameter[]; parameters: readonly Symbol[]; } export enum IndexKind { String = 0, Number = 1 } export interface IndexInfo { type: Type; isReadonly: boolean; declaration?: IndexSignatureDeclaration; } export enum InferencePriority { NakedTypeVariable = 1, SpeculativeTuple = 2, HomomorphicMappedType = 4, PartialHomomorphicMappedType = 8, MappedTypeConstraint = 16, ContravariantConditional = 32, ReturnType = 64, LiteralKeyof = 128, NoConstraints = 256, AlwaysStrict = 512, MaxValue = 1024, PriorityImpliesCombination = 208, Circularity = -1 } /** @deprecated Use FileExtensionInfo instead. */ export type JsFileExtensionInfo = FileExtensionInfo; export interface FileExtensionInfo { extension: string; isMixedContent: boolean; scriptKind?: ScriptKind; } export interface DiagnosticMessage { key: string; category: DiagnosticCategory; code: number; message: string; reportsUnnecessary?: {}; reportsDeprecated?: {}; } /** * A linked list of formatted diagnostic messages to be used as part of a multiline message. * It is built from the bottom up, leaving the head to be the "main" diagnostic. * While it seems that DiagnosticMessageChain is structurally similar to DiagnosticMessage, * the difference is that messages are all preformatted in DMC. */ export interface DiagnosticMessageChain { messageText: string; category: DiagnosticCategory; code: number; next?: DiagnosticMessageChain[]; } export interface Diagnostic extends DiagnosticRelatedInformation { /** May store more in future. For now, this will simply be `true` to indicate when a diagnostic is an unused-identifier diagnostic. */ reportsUnnecessary?: {}; reportsDeprecated?: {}; source?: string; relatedInformation?: DiagnosticRelatedInformation[]; } export interface DiagnosticRelatedInformation { category: DiagnosticCategory; code: number; file: SourceFile | undefined; start: number | undefined; length: number | undefined; messageText: string | DiagnosticMessageChain; } export interface DiagnosticWithLocation extends Diagnostic { file: SourceFile; start: number; length: number; } export enum DiagnosticCategory { Warning = 0, Error = 1, Suggestion = 2, Message = 3 } export enum ModuleResolutionKind { Classic = 1, NodeJs = 2 } export interface PluginImport { name: string; } export interface ProjectReference { /** A normalized path on disk */ path: string; /** The path as the user originally wrote it */ originalPath?: string; /** True if the output of this reference should be prepended to the output of this project. Only valid for --outFile compilations */ prepend?: boolean; /** True if it is intended that this reference form a circularity */ circular?: boolean; } export enum WatchFileKind { FixedPollingInterval = 0, PriorityPollingInterval = 1, DynamicPriorityPolling = 2, UseFsEvents = 3, UseFsEventsOnParentDirectory = 4 } export enum WatchDirectoryKind { UseFsEvents = 0, FixedPollingInterval = 1, DynamicPriorityPolling = 2 } export enum PollingWatchKind { FixedInterval = 0, PriorityInterval = 1, DynamicPriority = 2 } export type CompilerOptionsValue = string | number | boolean | (string | number)[] | string[] | MapLike<string[]> | PluginImport[] | ProjectReference[] | null | undefined; export interface CompilerOptions { allowJs?: boolean; allowSyntheticDefaultImports?: boolean; allowUmdGlobalAccess?: boolean; allowUnreachableCode?: boolean; allowUnusedLabels?: boolean; alwaysStrict?: boolean; baseUrl?: string; charset?: string; checkJs?: boolean; declaration?: boolean; declarationMap?: boolean; emitDeclarationOnly?: boolean; declarationDir?: string; disableSizeLimit?: boolean; disableSourceOfProjectReferenceRedirect?: boolean; disableSolutionSearching?: boolean; disableReferencedProjectLoad?: boolean; downlevelIteration?: boolean; emitBOM?: boolean; emitDecoratorMetadata?: boolean; experimentalDecorators?: boolean; forceConsistentCasingInFileNames?: boolean; importHelpers?: boolean; importsNotUsedAsValues?: ImportsNotUsedAsValues; inlineSourceMap?: boolean; inlineSources?: boolean; isolatedModules?: boolean; jsx?: JsxEmit; keyofStringsOnly?: boolean; lib?: string[]; locale?: string; mapRoot?: string; maxNodeModuleJsDepth?: number; module?: ModuleKind; moduleResolution?: ModuleResolutionKind; newLine?: NewLineKind; noEmit?: boolean; noEmitHelpers?: boolean; noEmitOnError?: boolean; noErrorTruncation?: boolean; noFallthroughCasesInSwitch?: boolean; noImplicitAny?: boolean; noImplicitReturns?: boolean; noImplicitThis?: boolean; noStrictGenericChecks?: boolean; noUnusedLocals?: boolean; noUnusedParameters?: boolean; noImplicitUseStrict?: boolean; assumeChangesOnlyAffectDirectDependencies?: boolean; noLib?: boolean; noResolve?: boolean; noUncheckedIndexedAccess?: boolean; out?: string; outDir?: string; outFile?: string; paths?: MapLike<string[]>; preserveConstEnums?: boolean; preserveSymlinks?: boolean; project?: string; reactNamespace?: string; jsxFactory?: string; jsxFragmentFactory?: string; jsxImportSource?: string; composite?: boolean; incremental?: boolean; tsBuildInfoFile?: string; removeComments?: boolean; rootDir?: string; rootDirs?: string[]; skipLibCheck?: boolean; skipDefaultLibCheck?: boolean; sourceMap?: boolean; sourceRoot?: string; strict?: boolean; strictFunctionTypes?: boolean; strictBindCallApply?: boolean; strictNullChecks?: boolean; strictPropertyInitialization?: boolean; stripInternal?: boolean; suppressExcessPropertyErrors?: boolean; suppressImplicitAnyIndexErrors?: boolean; target?: ScriptTarget; traceResolution?: boolean; resolveJsonModule?: boolean; types?: string[]; /** Paths used to compute primary types search locations */ typeRoots?: string[]; esModuleInterop?: boolean; useDefineForClassFields?: boolean; [option: string]: CompilerOptionsValue | TsConfigSourceFile | undefined; } export interface WatchOptions { watchFile?: WatchFileKind; watchDirectory?: WatchDirectoryKind; fallbackPolling?: PollingWatchKind; synchronousWatchDirectory?: boolean; [option: string]: CompilerOptionsValue | undefined; } export interface TypeAcquisition { /** * @deprecated typingOptions.enableAutoDiscovery * Use typeAcquisition.enable instead. */ enableAutoDiscovery?: boolean; enable?: boolean; include?: string[]; exclude?: string[]; disableFilenameBasedTypeAcquisition?: boolean; [option: string]: CompilerOptionsValue | undefined; } export enum ModuleKind { None = 0, CommonJS = 1, AMD = 2, UMD = 3, System = 4, ES2015 = 5, ES2020 = 6, ESNext = 99 } export enum JsxEmit { None = 0, Preserve = 1, React = 2, ReactNative = 3, ReactJSX = 4, ReactJSXDev = 5 } export enum ImportsNotUsedAsValues { Remove = 0, Preserve = 1, Error = 2 } export enum NewLineKind { CarriageReturnLineFeed = 0, LineFeed = 1 } export interface LineAndCharacter { /** 0-based. */ line: number; character: number; } export enum ScriptKind { Unknown = 0, JS = 1, JSX = 2, TS = 3, TSX = 4, External = 5, JSON = 6, /** * Used on extensions that doesn't define the ScriptKind but the content defines it. * Deferred extensions are going to be included in all project contexts. */ Deferred = 7 } export enum ScriptTarget { ES3 = 0, ES5 = 1, ES2015 = 2, ES2016 = 3, ES2017 = 4, ES2018 = 5, ES2019 = 6, ES2020 = 7, ESNext = 99, JSON = 100, Latest = 99 } export enum LanguageVariant { Standard = 0, JSX = 1 } /** Either a parsed command line or a parsed tsconfig.json */ export interface ParsedCommandLine { options: CompilerOptions; typeAcquisition?: TypeAcquisition; fileNames: string[]; projectReferences?: readonly ProjectReference[]; watchOptions?: WatchOptions; raw?: any; errors: Diagnostic[]; wildcardDirectories?: MapLike<WatchDirectoryFlags>; compileOnSave?: boolean; } export enum WatchDirectoryFlags { None = 0, Recursive = 1 } export interface ExpandResult { fileNames: string[]; wildcardDirectories: MapLike<WatchDirectoryFlags>; } export interface CreateProgramOptions { rootNames: readonly string[]; options: CompilerOptions; projectReferences?: readonly ProjectReference[]; host?: CompilerHost; oldProgram?: Program; configFileParsingDiagnostics?: readonly Diagnostic[]; } export interface ModuleResolutionHost { fileExists(fileName: string): boolean; readFile(fileName: string): string | undefined; trace?(s: string): void; directoryExists?(directoryName: string): boolean; /** * Resolve a symbolic link. * @see https://nodejs.org/api/fs.html#fs_fs_realpathsync_path_options */ realpath?(path: string): string; getCurrentDirectory?(): string; getDirectories?(path: string): string[]; } /** * Represents the result of module resolution. * Module resolution will pick up tsx/jsx/js files even if '--jsx' and '--allowJs' are turned off. * The Program will then filter results based on these flags. * * Prefer to return a `ResolvedModuleFull` so that the file type does not have to be inferred. */ export interface ResolvedModule { /** Path of the file the module was resolved to. */ resolvedFileName: string; /** True if `resolvedFileName` comes from `node_modules`. */ isExternalLibraryImport?: boolean; } /** * ResolvedModule with an explicitly provided `extension` property. * Prefer this over `ResolvedModule`. * If changing this, remember to change `moduleResolutionIsEqualTo`. */ export interface ResolvedModuleFull extends ResolvedModule { /** * Extension of resolvedFileName. This must match what's at the end of resolvedFileName. * This is optional for backwards-compatibility, but will be added if not provided. */ extension: Extension; packageId?: PackageId; } /** * Unique identifier with a package name and version. * If changing this, remember to change `packageIdIsEqual`. */ export interface PackageId { /** * Name of the package. * Should not include `@types`. * If accessing a non-index file, this should include its name e.g. "foo/bar". */ name: string; /** * Name of a submodule within this package. * May be "". */ subModuleName: string; /** Version of the package, e.g. "1.2.3" */ version: string; } export enum Extension { Ts = ".ts", Tsx = ".tsx", Dts = ".d.ts", Js = ".js", Jsx = ".jsx", Json = ".json", TsBuildInfo = ".tsbuildinfo" } export interface ResolvedModuleWithFailedLookupLocations { readonly resolvedModule: ResolvedModuleFull | undefined; } export interface ResolvedTypeReferenceDirective { primary: boolean; resolvedFileName: string | undefined; packageId?: PackageId; /** True if `resolvedFileName` comes from `node_modules`. */ isExternalLibraryImport?: boolean; } export interface ResolvedTypeReferenceDirectiveWithFailedLookupLocations { readonly resolvedTypeReferenceDirective: ResolvedTypeReferenceDirective | undefined; readonly failedLookupLocations: string[]; } export interface CompilerHost extends ModuleResolutionHost { getSourceFile(fileName: string, languageVersion: ScriptTarget, onError?: (message: string) => void, shouldCreateNewSourceFile?: boolean): SourceFile | undefined; getSourceFileByPath?(fileName: string, path: Path, languageVersion: ScriptTarget, onError?: (message: string) => void, shouldCreateNewSourceFile?: boolean): SourceFile | undefined; getCancellationToken?(): CancellationToken; getDefaultLibFileName(options: CompilerOptions): string; getDefaultLibLocation?(): string; writeFile: WriteFileCallback; getCurrentDirectory(): string; getCanonicalFileName(fileName: string): string; useCaseSensitiveFileNames(): boolean; getNewLine(): string; readDirectory?(rootDir: string, extensions: readonly string[], excludes: readonly string[] | undefined, includes: readonly string[], depth?: number): string[]; resolveModuleNames?(moduleNames: string[], containingFile: string, reusedNames: string[] | undefined, redirectedReference: ResolvedProjectReference | undefined, options: CompilerOptions): (ResolvedModule | undefined)[]; /** * This method is a companion for 'resolveModuleNames' and is used to resolve 'types' references to actual type declaration files */ resolveTypeReferenceDirectives?(typeReferenceDirectiveNames: string[], containingFile: string, redirectedReference: ResolvedProjectReference | undefined, options: CompilerOptions): (ResolvedTypeReferenceDirective | undefined)[]; getEnvironmentVariable?(name: string): string | undefined; createHash?(data: string): string; getParsedCommandLine?(fileName: string): ParsedCommandLine | undefined; } export interface SourceMapRange extends TextRange { source?: SourceMapSource; } export interface SourceMapSource { fileName: string; text: string; skipTrivia?: (pos: number) => number; } export enum EmitFlags { None = 0, SingleLine = 1, AdviseOnEmitNode = 2, NoSubstitution = 4, CapturesThis = 8, NoLeadingSourceMap = 16, NoTrailingSourceMap = 32, NoSourceMap = 48, NoNestedSourceMaps = 64, NoTokenLeadingSourceMaps = 128, NoTokenTrailingSourceMaps = 256, NoTokenSourceMaps = 384, NoLeadingComments = 512, NoTrailingComments = 1024, NoComments = 1536, NoNestedComments = 2048, HelperName = 4096, ExportName = 8192, LocalName = 16384, InternalName = 32768, Indented = 65536, NoIndentation = 131072, AsyncFunctionBody = 262144, ReuseTempVariableScope = 524288, CustomPrologue = 1048576, NoHoisting = 2097152, HasEndOfDeclarationMarker = 4194304, Iterator = 8388608, NoAsciiEscaping = 16777216, } export interface EmitHelper { readonly name: string; readonly scoped: boolean; readonly text: string | ((node: EmitHelperUniqueNameCallback) => string); readonly priority?: number; readonly dependencies?: EmitHelper[]; } export interface UnscopedEmitHelper extends EmitHelper { readonly scoped: false; readonly text: string; } export type EmitHelperUniqueNameCallback = (name: string) => string; export enum EmitHint { SourceFile = 0, Expression = 1, IdentifierName = 2, MappedTypeParameter = 3, Unspecified = 4, EmbeddedStatement = 5, JsxAttributeValue = 6 } export enum OuterExpressionKinds { Parentheses = 1, TypeAssertions = 2, NonNullAssertions = 4, PartiallyEmittedExpressions = 8, Assertions = 6, All = 15 } export type TypeOfTag = "undefined" | "number" | "bigint" | "boolean" | "string" | "symbol" | "object" | "function"; export interface NodeFactory { createNodeArray<T extends Node>(elements?: readonly T[], hasTrailingComma?: boolean): NodeArray<T>; createNumericLiteral(value: string | number, numericLiteralFlags?: TokenFlags): NumericLiteral; createBigIntLiteral(value: string | PseudoBigInt): BigIntLiteral; createStringLiteral(text: string, isSingleQuote?: boolean): StringLiteral; createStringLiteralFromNode(sourceNode: PropertyNameLiteral, isSingleQuote?: boolean): StringLiteral; createRegularExpressionLiteral(text: string): RegularExpressionLiteral; createIdentifier(text: string): Identifier; /** Create a unique temporary variable. */ createTempVariable(recordTempVariable: ((node: Identifier) => void) | undefined): Identifier; /** Create a unique temporary variable for use in a loop. */ createLoopVariable(): Identifier; /** Create a unique name based on the supplied text. */ createUniqueName(text: string, flags?: GeneratedIdentifierFlags): Identifier; /** Create a unique name generated for a node. */ getGeneratedNameForNode(node: Node | undefined): Identifier; createPrivateIdentifier(text: string): PrivateIdentifier; createToken(token: SyntaxKind.SuperKeyword): SuperExpression; createToken(token: SyntaxKind.ThisKeyword): ThisExpression; createToken(token: SyntaxKind.NullKeyword): NullLiteral; createToken(token: SyntaxKind.TrueKeyword): TrueLiteral; createToken(token: SyntaxKind.FalseKeyword): FalseLiteral; createToken<TKind extends PunctuationSyntaxKind>(token: TKind): PunctuationToken<TKind>; createToken<TKind extends KeywordTypeSyntaxKind>(token: TKind): KeywordTypeNode<TKind>; createToken<TKind extends ModifierSyntaxKind>(token: TKind): ModifierToken<TKind>; createToken<TKind extends KeywordSyntaxKind>(token: TKind): KeywordToken<TKind>; createToken<TKind extends SyntaxKind.Unknown | SyntaxKind.EndOfFileToken>(token: TKind): Token<TKind>; createSuper(): SuperExpression; createThis(): ThisExpression; createNull(): NullLiteral; createTrue(): TrueLiteral; createFalse(): FalseLiteral; createModifier<T extends ModifierSyntaxKind>(kind: T): ModifierToken<T>; createModifiersFromModifierFlags(flags: ModifierFlags): Modifier[]; createQualifiedName(left: EntityName, right: string | Identifier): QualifiedName; updateQualifiedName(node: QualifiedName, left: EntityName, right: Identifier): QualifiedName; createComputedPropertyName(expression: Expression): ComputedPropertyName; updateComputedPropertyName(node: ComputedPropertyName, expression: Expression): ComputedPropertyName; createTypeParameterDeclaration(name: string | Identifier, constraint?: TypeNode, defaultType?: TypeNode): TypeParameterDeclaration; updateTypeParameterDeclaration(node: TypeParameterDeclaration, name: Identifier, constraint: TypeNode | undefined, defaultType: TypeNode | undefined): TypeParameterDeclaration; createParameterDeclaration(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, dotDotDotToken: DotDotDotToken | undefined, name: string | BindingName, questionToken?: QuestionToken, type?: TypeNode, initializer?: Expression): ParameterDeclaration; updateParameterDeclaration(node: ParameterDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, dotDotDotToken: DotDotDotToken | undefined, name: string | BindingName, questionToken: QuestionToken | undefined, type: TypeNode | undefined, initializer: Expression | undefined): ParameterDeclaration; createDecorator(expression: Expression): Decorator; updateDecorator(node: Decorator, expression: Expression): Decorator; createPropertySignature(modifiers: readonly Modifier[] | undefined, name: PropertyName | string, questionToken: QuestionToken | undefined, type: TypeNode | undefined): PropertySignature; updatePropertySignature(node: PropertySignature, modifiers: readonly Modifier[] | undefined, name: PropertyName, questionToken: QuestionToken | undefined, type: TypeNode | undefined): PropertySignature; createPropertyDeclaration(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | PropertyName, questionOrExclamationToken: QuestionToken | ExclamationToken | undefined, type: TypeNode | undefined, initializer: Expression | undefined): PropertyDeclaration; updatePropertyDeclaration(node: PropertyDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | PropertyName, questionOrExclamationToken: QuestionToken | ExclamationToken | undefined, type: TypeNode | undefined, initializer: Expression | undefined): PropertyDeclaration; createMethodSignature(modifiers: readonly Modifier[] | undefined, name: string | PropertyName, questionToken: QuestionToken | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined): MethodSignature; updateMethodSignature(node: MethodSignature, modifiers: readonly Modifier[] | undefined, name: PropertyName, questionToken: QuestionToken | undefined, typeParameters: NodeArray<TypeParameterDeclaration> | undefined, parameters: NodeArray<ParameterDeclaration>, type: TypeNode | undefined): MethodSignature; createMethodDeclaration(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, asteriskToken: AsteriskToken | undefined, name: string | PropertyName, questionToken: QuestionToken | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: Block | undefined): MethodDeclaration; updateMethodDeclaration(node: MethodDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, asteriskToken: AsteriskToken | undefined, name: PropertyName, questionToken: QuestionToken | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: Block | undefined): MethodDeclaration; createConstructorDeclaration(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, parameters: readonly ParameterDeclaration[], body: Block | undefined): ConstructorDeclaration; updateConstructorDeclaration(node: ConstructorDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, parameters: readonly ParameterDeclaration[], body: Block | undefined): ConstructorDeclaration; createGetAccessorDeclaration(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | PropertyName, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: Block | undefined): GetAccessorDeclaration; updateGetAccessorDeclaration(node: GetAccessorDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: PropertyName, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: Block | undefined): GetAccessorDeclaration; createSetAccessorDeclaration(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | PropertyName, parameters: readonly ParameterDeclaration[], body: Block | undefined): SetAccessorDeclaration; updateSetAccessorDeclaration(node: SetAccessorDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: PropertyName, parameters: readonly ParameterDeclaration[], body: Block | undefined): SetAccessorDeclaration; createCallSignature(typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined): CallSignatureDeclaration; updateCallSignature(node: CallSignatureDeclaration, typeParameters: NodeArray<TypeParameterDeclaration> | undefined, parameters: NodeArray<ParameterDeclaration>, type: TypeNode | undefined): CallSignatureDeclaration; createConstructSignature(typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined): ConstructSignatureDeclaration; updateConstructSignature(node: ConstructSignatureDeclaration, typeParameters: NodeArray<TypeParameterDeclaration> | undefined, parameters: NodeArray<ParameterDeclaration>, type: TypeNode | undefined): ConstructSignatureDeclaration; createIndexSignature(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode): IndexSignatureDeclaration; updateIndexSignature(node: IndexSignatureDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode): IndexSignatureDeclaration; createTemplateLiteralTypeSpan(type: TypeNode, literal: TemplateMiddle | TemplateTail): TemplateLiteralTypeSpan; updateTemplateLiteralTypeSpan(node: TemplateLiteralTypeSpan, type: TypeNode, literal: TemplateMiddle | TemplateTail): TemplateLiteralTypeSpan; createKeywordTypeNode<TKind extends KeywordTypeSyntaxKind>(kind: TKind): KeywordTypeNode<TKind>; createTypePredicateNode(assertsModifier: AssertsKeyword | undefined, parameterName: Identifier | ThisTypeNode | string, type: TypeNode | undefined): TypePredicateNode; updateTypePredicateNode(node: TypePredicateNode, assertsModifier: AssertsKeyword | undefined, parameterName: Identifier | ThisTypeNode, type: TypeNode | undefined): TypePredicateNode; createTypeReferenceNode(typeName: string | EntityName, typeArguments?: readonly TypeNode[]): TypeReferenceNode; updateTypeReferenceNode(node: TypeReferenceNode, typeName: EntityName, typeArguments: NodeArray<TypeNode> | undefined): TypeReferenceNode; createFunctionTypeNode(typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode): FunctionTypeNode; updateFunctionTypeNode(node: FunctionTypeNode, typeParameters: NodeArray<TypeParameterDeclaration> | undefined, parameters: NodeArray<ParameterDeclaration>, type: TypeNode): FunctionTypeNode; createConstructorTypeNode(typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode): ConstructorTypeNode; updateConstructorTypeNode(node: ConstructorTypeNode, typeParameters: NodeArray<TypeParameterDeclaration> | undefined, parameters: NodeArray<ParameterDeclaration>, type: TypeNode): ConstructorTypeNode; createTypeQueryNode(exprName: EntityName): TypeQueryNode; updateTypeQueryNode(node: TypeQueryNode, exprName: EntityName): TypeQueryNode; createTypeLiteralNode(members: readonly TypeElement[] | undefined): TypeLiteralNode; updateTypeLiteralNode(node: TypeLiteralNode, members: NodeArray<TypeElement>): TypeLiteralNode; createArrayTypeNode(elementType: TypeNode): ArrayTypeNode; updateArrayTypeNode(node: ArrayTypeNode, elementType: TypeNode): ArrayTypeNode; createTupleTypeNode(elements: readonly (TypeNode | NamedTupleMember)[]): TupleTypeNode; updateTupleTypeNode(node: TupleTypeNode, elements: readonly (TypeNode | NamedTupleMember)[]): TupleTypeNode; createNamedTupleMember(dotDotDotToken: DotDotDotToken | undefined, name: Identifier, questionToken: QuestionToken | undefined, type: TypeNode): NamedTupleMember; updateNamedTupleMember(node: NamedTupleMember, dotDotDotToken: DotDotDotToken | undefined, name: Identifier, questionToken: QuestionToken | undefined, type: TypeNode): NamedTupleMember; createOptionalTypeNode(type: TypeNode): OptionalTypeNode; updateOptionalTypeNode(node: OptionalTypeNode, type: TypeNode): OptionalTypeNode; createRestTypeNode(type: TypeNode): RestTypeNode; updateRestTypeNode(node: RestTypeNode, type: TypeNode): RestTypeNode; createUnionTypeNode(types: readonly TypeNode[]): UnionTypeNode; updateUnionTypeNode(node: UnionTypeNode, types: NodeArray<TypeNode>): UnionTypeNode; createIntersectionTypeNode(types: readonly TypeNode[]): IntersectionTypeNode; updateIntersectionTypeNode(node: IntersectionTypeNode, types: NodeArray<TypeNode>): IntersectionTypeNode; createConditionalTypeNode(checkType: TypeNode, extendsType: TypeNode, trueType: TypeNode, falseType: TypeNode): ConditionalTypeNode; updateConditionalTypeNode(node: ConditionalTypeNode, checkType: TypeNode, extendsType: TypeNode, trueType: TypeNode, falseType: TypeNode): ConditionalTypeNode; createInferTypeNode(typeParameter: TypeParameterDeclaration): InferTypeNode; updateInferTypeNode(node: InferTypeNode, typeParameter: TypeParameterDeclaration): InferTypeNode; createImportTypeNode(argument: TypeNode, qualifier?: EntityName, typeArguments?: readonly TypeNode[], isTypeOf?: boolean): ImportTypeNode; updateImportTypeNode(node: ImportTypeNode, argument: TypeNode, qualifier: EntityName | undefined, typeArguments: readonly TypeNode[] | undefined, isTypeOf?: boolean): ImportTypeNode; createParenthesizedType(type: TypeNode): ParenthesizedTypeNode; updateParenthesizedType(node: ParenthesizedTypeNode, type: TypeNode): ParenthesizedTypeNode; createThisTypeNode(): ThisTypeNode; createTypeOperatorNode(operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword | SyntaxKind.ReadonlyKeyword, type: TypeNode): TypeOperatorNode; updateTypeOperatorNode(node: TypeOperatorNode, type: TypeNode): TypeOperatorNode; createIndexedAccessTypeNode(objectType: TypeNode, indexType: TypeNode): IndexedAccessTypeNode; updateIndexedAccessTypeNode(node: IndexedAccessTypeNode, objectType: TypeNode, indexType: TypeNode): IndexedAccessTypeNode; createMappedTypeNode(readonlyToken: ReadonlyKeyword | PlusToken | MinusToken | undefined, typeParameter: TypeParameterDeclaration, nameType: TypeNode | undefined, questionToken: QuestionToken | PlusToken | MinusToken | undefined, type: TypeNode | undefined): MappedTypeNode; updateMappedTypeNode(node: MappedTypeNode, readonlyToken: ReadonlyKeyword | PlusToken | MinusToken | undefined, typeParameter: TypeParameterDeclaration, nameType: TypeNode | undefined, questionToken: QuestionToken | PlusToken | MinusToken | undefined, type: TypeNode | undefined): MappedTypeNode; createLiteralTypeNode(literal: LiteralTypeNode["literal"]): LiteralTypeNode; updateLiteralTypeNode(node: LiteralTypeNode, literal: LiteralTypeNode["literal"]): LiteralTypeNode; createTemplateLiteralType(head: TemplateHead, templateSpans: readonly TemplateLiteralTypeSpan[]): TemplateLiteralTypeNode; updateTemplateLiteralType(node: TemplateLiteralTypeNode, head: TemplateHead, templateSpans: readonly TemplateLiteralTypeSpan[]): TemplateLiteralTypeNode; createObjectBindingPattern(elements: readonly BindingElement[]): ObjectBindingPattern; updateObjectBindingPattern(node: ObjectBindingPattern, elements: readonly BindingElement[]): ObjectBindingPattern; createArrayBindingPattern(elements: readonly ArrayBindingElement[]): ArrayBindingPattern; updateArrayBindingPattern(node: ArrayBindingPattern, elements: readonly ArrayBindingElement[]): ArrayBindingPattern; createBindingElement(dotDotDotToken: DotDotDotToken | undefined, propertyName: string | PropertyName | undefined, name: string | BindingName, initializer?: Expression): BindingElement; updateBindingElement(node: BindingElement, dotDotDotToken: DotDotDotToken | undefined, propertyName: PropertyName | undefined, name: BindingName, initializer: Expression | undefined): BindingElement; createArrayLiteralExpression(elements?: readonly Expression[], multiLine?: boolean): ArrayLiteralExpression; updateArrayLiteralExpression(node: ArrayLiteralExpression, elements: readonly Expression[]): ArrayLiteralExpression; createObjectLiteralExpression(properties?: readonly ObjectLiteralElementLike[], multiLine?: boolean): ObjectLiteralExpression; updateObjectLiteralExpression(node: ObjectLiteralExpression, properties: readonly ObjectLiteralElementLike[]): ObjectLiteralExpression; createPropertyAccessExpression(expression: Expression, name: string | Identifier | PrivateIdentifier): PropertyAccessExpression; updatePropertyAccessExpression(node: PropertyAccessExpression, expression: Expression, name: Identifier | PrivateIdentifier): PropertyAccessExpression; createPropertyAccessChain(expression: Expression, questionDotToken: QuestionDotToken | undefined, name: string | Identifier | PrivateIdentifier): PropertyAccessChain; updatePropertyAccessChain(node: PropertyAccessChain, expression: Expression, questionDotToken: QuestionDotToken | undefined, name: Identifier | PrivateIdentifier): PropertyAccessChain; createElementAccessExpression(expression: Expression, index: number | Expression): ElementAccessExpression; updateElementAccessExpression(node: ElementAccessExpression, expression: Expression, argumentExpression: Expression): ElementAccessExpression; createElementAccessChain(expression: Expression, questionDotToken: QuestionDotToken | undefined, index: number | Expression): ElementAccessChain; updateElementAccessChain(node: ElementAccessChain, expression: Expression, questionDotToken: QuestionDotToken | undefined, argumentExpression: Expression): ElementAccessChain; createCallExpression(expression: Expression, typeArguments: readonly TypeNode[] | undefined, argumentsArray: readonly Expression[] | undefined): CallExpression; updateCallExpression(node: CallExpression, expression: Expression, typeArguments: readonly TypeNode[] | undefined, argumentsArray: readonly Expression[]): CallExpression; createCallChain(expression: Expression, questionDotToken: QuestionDotToken | undefined, typeArguments: readonly TypeNode[] | undefined, argumentsArray: readonly Expression[] | undefined): CallChain; updateCallChain(node: CallChain, expression: Expression, questionDotToken: QuestionDotToken | undefined, typeArguments: readonly TypeNode[] | undefined, argumentsArray: readonly Expression[]): CallChain; createNewExpression(expression: Expression, typeArguments: readonly TypeNode[] | undefined, argumentsArray: readonly Expression[] | undefined): NewExpression; updateNewExpression(node: NewExpression, expression: Expression, typeArguments: readonly TypeNode[] | undefined, argumentsArray: readonly Expression[] | undefined): NewExpression; createTaggedTemplateExpression(tag: Expression, typeArguments: readonly TypeNode[] | undefined, template: TemplateLiteral): TaggedTemplateExpression; updateTaggedTemplateExpression(node: TaggedTemplateExpression, tag: Expression, typeArguments: readonly TypeNode[] | undefined, template: TemplateLiteral): TaggedTemplateExpression; createTypeAssertion(type: TypeNode, expression: Expression): TypeAssertion; updateTypeAssertion(node: TypeAssertion, type: TypeNode, expression: Expression): TypeAssertion; createParenthesizedExpression(expression: Expression): ParenthesizedExpression; updateParenthesizedExpression(node: ParenthesizedExpression, expression: Expression): ParenthesizedExpression; createFunctionExpression(modifiers: readonly Modifier[] | undefined, asteriskToken: AsteriskToken | undefined, name: string | Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[] | undefined, type: TypeNode | undefined, body: Block): FunctionExpression; updateFunctionExpression(node: FunctionExpression, modifiers: readonly Modifier[] | undefined, asteriskToken: AsteriskToken | undefined, name: Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: Block): FunctionExpression; createArrowFunction(modifiers: readonly Modifier[] | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, equalsGreaterThanToken: EqualsGreaterThanToken | undefined, body: ConciseBody): ArrowFunction; updateArrowFunction(node: ArrowFunction, modifiers: readonly Modifier[] | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, equalsGreaterThanToken: EqualsGreaterThanToken, body: ConciseBody): ArrowFunction; createDeleteExpression(expression: Expression): DeleteExpression; updateDeleteExpression(node: DeleteExpression, expression: Expression): DeleteExpression; createTypeOfExpression(expression: Expression): TypeOfExpression; updateTypeOfExpression(node: TypeOfExpression, expression: Expression): TypeOfExpression; createVoidExpression(expression: Expression): VoidExpression; updateVoidExpression(node: VoidExpression, expression: Expression): VoidExpression; createAwaitExpression(expression: Expression): AwaitExpression; updateAwaitExpression(node: AwaitExpression, expression: Expression): AwaitExpression; createPrefixUnaryExpression(operator: PrefixUnaryOperator, operand: Expression): PrefixUnaryExpression; updatePrefixUnaryExpression(node: PrefixUnaryExpression, operand: Expression): PrefixUnaryExpression; createPostfixUnaryExpression(operand: Expression, operator: PostfixUnaryOperator): PostfixUnaryExpression; updatePostfixUnaryExpression(node: PostfixUnaryExpression, operand: Expression): PostfixUnaryExpression; createBinaryExpression(left: Expression, operator: BinaryOperator | BinaryOperatorToken, right: Expression): BinaryExpression; updateBinaryExpression(node: BinaryExpression, left: Expression, operator: BinaryOperator | BinaryOperatorToken, right: Expression): BinaryExpression; createConditionalExpression(condition: Expression, questionToken: QuestionToken | undefined, whenTrue: Expression, colonToken: ColonToken | undefined, whenFalse: Expression): ConditionalExpression; updateConditionalExpression(node: ConditionalExpression, condition: Expression, questionToken: QuestionToken, whenTrue: Expression, colonToken: ColonToken, whenFalse: Expression): ConditionalExpression; createTemplateExpression(head: TemplateHead, templateSpans: readonly TemplateSpan[]): TemplateExpression; updateTemplateExpression(node: TemplateExpression, head: TemplateHead, templateSpans: readonly TemplateSpan[]): TemplateExpression; createTemplateHead(text: string, rawText?: string, templateFlags?: TokenFlags): TemplateHead; createTemplateHead(text: string | undefined, rawText: string, templateFlags?: TokenFlags): TemplateHead; createTemplateMiddle(text: string, rawText?: string, templateFlags?: TokenFlags): TemplateMiddle; createTemplateMiddle(text: string | undefined, rawText: string, templateFlags?: TokenFlags): TemplateMiddle; createTemplateTail(text: string, rawText?: string, templateFlags?: TokenFlags): TemplateTail; createTemplateTail(text: string | undefined, rawText: string, templateFlags?: TokenFlags): TemplateTail; createNoSubstitutionTemplateLiteral(text: string, rawText?: string): NoSubstitutionTemplateLiteral; createNoSubstitutionTemplateLiteral(text: string | undefined, rawText: string): NoSubstitutionTemplateLiteral; createYieldExpression(asteriskToken: AsteriskToken, expression: Expression): YieldExpression; createYieldExpression(asteriskToken: undefined, expression: Expression | undefined): YieldExpression; updateYieldExpression(node: YieldExpression, asteriskToken: AsteriskToken | undefined, expression: Expression | undefined): YieldExpression; createSpreadElement(expression: Expression): SpreadElement; updateSpreadElement(node: SpreadElement, expression: Expression): SpreadElement; createClassExpression(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, heritageClauses: readonly HeritageClause[] | undefined, members: readonly ClassElement[]): ClassExpression; updateClassExpression(node: ClassExpression, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, heritageClauses: readonly HeritageClause[] | undefined, members: readonly ClassElement[]): ClassExpression; createOmittedExpression(): OmittedExpression; createExpressionWithTypeArguments(expression: Expression, typeArguments: readonly TypeNode[] | undefined): ExpressionWithTypeArguments; updateExpressionWithTypeArguments(node: ExpressionWithTypeArguments, expression: Expression, typeArguments: readonly TypeNode[] | undefined): ExpressionWithTypeArguments; createAsExpression(expression: Expression, type: TypeNode): AsExpression; updateAsExpression(node: AsExpression, expression: Expression, type: TypeNode): AsExpression; createNonNullExpression(expression: Expression): NonNullExpression; updateNonNullExpression(node: NonNullExpression, expression: Expression): NonNullExpression; createNonNullChain(expression: Expression): NonNullChain; updateNonNullChain(node: NonNullChain, expression: Expression): NonNullChain; createMetaProperty(keywordToken: MetaProperty["keywordToken"], name: Identifier): MetaProperty; updateMetaProperty(node: MetaProperty, name: Identifier): MetaProperty; createTemplateSpan(expression: Expression, literal: TemplateMiddle | TemplateTail): TemplateSpan; updateTemplateSpan(node: TemplateSpan, expression: Expression, literal: TemplateMiddle | TemplateTail): TemplateSpan; createSemicolonClassElement(): SemicolonClassElement; createBlock(statements: readonly Statement[], multiLine?: boolean): Block; updateBlock(node: Block, statements: readonly Statement[]): Block; createVariableStatement(modifiers: readonly Modifier[] | undefined, declarationList: VariableDeclarationList | readonly VariableDeclaration[]): VariableStatement; updateVariableStatement(node: VariableStatement, modifiers: readonly Modifier[] | undefined, declarationList: VariableDeclarationList): VariableStatement; createEmptyStatement(): EmptyStatement; createExpressionStatement(expression: Expression): ExpressionStatement; updateExpressionStatement(node: ExpressionStatement, expression: Expression): ExpressionStatement; createIfStatement(expression: Expression, thenStatement: Statement, elseStatement?: Statement): IfStatement; updateIfStatement(node: IfStatement, expression: Expression, thenStatement: Statement, elseStatement: Statement | undefined): IfStatement; createDoStatement(statement: Statement, expression: Expression): DoStatement; updateDoStatement(node: DoStatement, statement: Statement, expression: Expression): DoStatement; createWhileStatement(expression: Expression, statement: Statement): WhileStatement; updateWhileStatement(node: WhileStatement, expression: Expression, statement: Statement): WhileStatement; createForStatement(initializer: ForInitializer | undefined, condition: Expression | undefined, incrementor: Expression | undefined, statement: Statement): ForStatement; updateForStatement(node: ForStatement, initializer: ForInitializer | undefined, condition: Expression | undefined, incrementor: Expression | undefined, statement: Statement): ForStatement; createForInStatement(initializer: ForInitializer, expression: Expression, statement: Statement): ForInStatement; updateForInStatement(node: ForInStatement, initializer: ForInitializer, expression: Expression, statement: Statement): ForInStatement; createForOfStatement(awaitModifier: AwaitKeyword | undefined, initializer: ForInitializer, expression: Expression, statement: Statement): ForOfStatement; updateForOfStatement(node: ForOfStatement, awaitModifier: AwaitKeyword | undefined, initializer: ForInitializer, expression: Expression, statement: Statement): ForOfStatement; createContinueStatement(label?: string | Identifier): ContinueStatement; updateContinueStatement(node: ContinueStatement, label: Identifier | undefined): ContinueStatement; createBreakStatement(label?: string | Identifier): BreakStatement; updateBreakStatement(node: BreakStatement, label: Identifier | undefined): BreakStatement; createReturnStatement(expression?: Expression): ReturnStatement; updateReturnStatement(node: ReturnStatement, expression: Expression | undefined): ReturnStatement; createWithStatement(expression: Expression, statement: Statement): WithStatement; updateWithStatement(node: WithStatement, expression: Expression, statement: Statement): WithStatement; createSwitchStatement(expression: Expression, caseBlock: CaseBlock): SwitchStatement; updateSwitchStatement(node: SwitchStatement, expression: Expression, caseBlock: CaseBlock): SwitchStatement; createLabeledStatement(label: string | Identifier, statement: Statement): LabeledStatement; updateLabeledStatement(node: LabeledStatement, label: Identifier, statement: Statement): LabeledStatement; createThrowStatement(expression: Expression): ThrowStatement; updateThrowStatement(node: ThrowStatement, expression: Expression): ThrowStatement; createTryStatement(tryBlock: Block, catchClause: CatchClause | undefined, finallyBlock: Block | undefined): TryStatement; updateTryStatement(node: TryStatement, tryBlock: Block, catchClause: CatchClause | undefined, finallyBlock: Block | undefined): TryStatement; createDebuggerStatement(): DebuggerStatement; createVariableDeclaration(name: string | BindingName, exclamationToken?: ExclamationToken, type?: TypeNode, initializer?: Expression): VariableDeclaration; updateVariableDeclaration(node: VariableDeclaration, name: BindingName, exclamationToken: ExclamationToken | undefined, type: TypeNode | undefined, initializer: Expression | undefined): VariableDeclaration; createVariableDeclarationList(declarations: readonly VariableDeclaration[], flags?: NodeFlags): VariableDeclarationList; updateVariableDeclarationList(node: VariableDeclarationList, declarations: readonly VariableDeclaration[]): VariableDeclarationList; createFunctionDeclaration(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, asteriskToken: AsteriskToken | undefined, name: string | Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: Block | undefined): FunctionDeclaration; updateFunctionDeclaration(node: FunctionDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, asteriskToken: AsteriskToken | undefined, name: Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: Block | undefined): FunctionDeclaration; createClassDeclaration(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, heritageClauses: readonly HeritageClause[] | undefined, members: readonly ClassElement[]): ClassDeclaration; updateClassDeclaration(node: ClassDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, heritageClauses: readonly HeritageClause[] | undefined, members: readonly ClassElement[]): ClassDeclaration; createInterfaceDeclaration(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | Identifier, typeParameters: readonly TypeParameterDeclaration[] | undefined, heritageClauses: readonly HeritageClause[] | undefined, members: readonly TypeElement[]): InterfaceDeclaration; updateInterfaceDeclaration(node: InterfaceDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: Identifier, typeParameters: readonly TypeParameterDeclaration[] | undefined, heritageClauses: readonly HeritageClause[] | undefined, members: readonly TypeElement[]): InterfaceDeclaration; createTypeAliasDeclaration(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | Identifier, typeParameters: readonly TypeParameterDeclaration[] | undefined, type: TypeNode): TypeAliasDeclaration; updateTypeAliasDeclaration(node: TypeAliasDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: Identifier, typeParameters: readonly TypeParameterDeclaration[] | undefined, type: TypeNode): TypeAliasDeclaration; createEnumDeclaration(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | Identifier, members: readonly EnumMember[]): EnumDeclaration; updateEnumDeclaration(node: EnumDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: Identifier, members: readonly EnumMember[]): EnumDeclaration; createModuleDeclaration(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: ModuleName, body: ModuleBody | undefined, flags?: NodeFlags): ModuleDeclaration; updateModuleDeclaration(node: ModuleDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: ModuleName, body: ModuleBody | undefined): ModuleDeclaration; createModuleBlock(statements: readonly Statement[]): ModuleBlock; updateModuleBlock(node: ModuleBlock, statements: readonly Statement[]): ModuleBlock; createCaseBlock(clauses: readonly CaseOrDefaultClause[]): CaseBlock; updateCaseBlock(node: CaseBlock, clauses: readonly CaseOrDefaultClause[]): CaseBlock; createNamespaceExportDeclaration(name: string | Identifier): NamespaceExportDeclaration; updateNamespaceExportDeclaration(node: NamespaceExportDeclaration, name: Identifier): NamespaceExportDeclaration; createImportEqualsDeclaration(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | Identifier, moduleReference: ModuleReference): ImportEqualsDeclaration; updateImportEqualsDeclaration(node: ImportEqualsDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: Identifier, moduleReference: ModuleReference): ImportEqualsDeclaration; createImportDeclaration(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, importClause: ImportClause | undefined, moduleSpecifier: Expression): ImportDeclaration; updateImportDeclaration(node: ImportDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, importClause: ImportClause | undefined, moduleSpecifier: Expression): ImportDeclaration; createImportClause(isTypeOnly: boolean, name: Identifier | undefined, namedBindings: NamedImportBindings | undefined): ImportClause; updateImportClause(node: ImportClause, isTypeOnly: boolean, name: Identifier | undefined, namedBindings: NamedImportBindings | undefined): ImportClause; createNamespaceImport(name: Identifier): NamespaceImport; updateNamespaceImport(node: NamespaceImport, name: Identifier): NamespaceImport; createNamespaceExport(name: Identifier): NamespaceExport; updateNamespaceExport(node: NamespaceExport, name: Identifier): NamespaceExport; createNamedImports(elements: readonly ImportSpecifier[]): NamedImports; updateNamedImports(node: NamedImports, elements: readonly ImportSpecifier[]): NamedImports; createImportSpecifier(propertyName: Identifier | undefined, name: Identifier): ImportSpecifier; updateImportSpecifier(node: ImportSpecifier, propertyName: Identifier | undefined, name: Identifier): ImportSpecifier; createExportAssignment(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, isExportEquals: boolean | undefined, expression: Expression): ExportAssignment; updateExportAssignment(node: ExportAssignment, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, expression: Expression): ExportAssignment; createExportDeclaration(decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, isTypeOnly: boolean, exportClause: NamedExportBindings | undefined, moduleSpecifier?: Expression): ExportDeclaration; updateExportDeclaration(node: ExportDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, isTypeOnly: boolean, exportClause: NamedExportBindings | undefined, moduleSpecifier: Expression | undefined): ExportDeclaration; createNamedExports(elements: readonly ExportSpecifier[]): NamedExports; updateNamedExports(node: NamedExports, elements: readonly ExportSpecifier[]): NamedExports; createExportSpecifier(propertyName: string | Identifier | undefined, name: string | Identifier): ExportSpecifier; updateExportSpecifier(node: ExportSpecifier, propertyName: Identifier | undefined, name: Identifier): ExportSpecifier; createExternalModuleReference(expression: Expression): ExternalModuleReference; updateExternalModuleReference(node: ExternalModuleReference, expression: Expression): ExternalModuleReference; createJSDocAllType(): JSDocAllType; createJSDocUnknownType(): JSDocUnknownType; createJSDocNonNullableType(type: TypeNode): JSDocNonNullableType; updateJSDocNonNullableType(node: JSDocNonNullableType, type: TypeNode): JSDocNonNullableType; createJSDocNullableType(type: TypeNode): JSDocNullableType; updateJSDocNullableType(node: JSDocNullableType, type: TypeNode): JSDocNullableType; createJSDocOptionalType(type: TypeNode): JSDocOptionalType; updateJSDocOptionalType(node: JSDocOptionalType, type: TypeNode): JSDocOptionalType; createJSDocFunctionType(parameters: readonly ParameterDeclaration[], type: TypeNode | undefined): JSDocFunctionType; updateJSDocFunctionType(node: JSDocFunctionType, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined): JSDocFunctionType; createJSDocVariadicType(type: TypeNode): JSDocVariadicType; updateJSDocVariadicType(node: JSDocVariadicType, type: TypeNode): JSDocVariadicType; createJSDocNamepathType(type: TypeNode): JSDocNamepathType; updateJSDocNamepathType(node: JSDocNamepathType, type: TypeNode): JSDocNamepathType; createJSDocTypeExpression(type: TypeNode): JSDocTypeExpression; updateJSDocTypeExpression(node: JSDocTypeExpression, type: TypeNode): JSDocTypeExpression; createJSDocNameReference(name: EntityName): JSDocNameReference; updateJSDocNameReference(node: JSDocNameReference, name: EntityName): JSDocNameReference; createJSDocTypeLiteral(jsDocPropertyTags?: readonly JSDocPropertyLikeTag[], isArrayType?: boolean): JSDocTypeLiteral; updateJSDocTypeLiteral(node: JSDocTypeLiteral, jsDocPropertyTags: readonly JSDocPropertyLikeTag[] | undefined, isArrayType: boolean | undefined): JSDocTypeLiteral; createJSDocSignature(typeParameters: readonly JSDocTemplateTag[] | undefined, parameters: readonly JSDocParameterTag[], type?: JSDocReturnTag): JSDocSignature; updateJSDocSignature(node: JSDocSignature, typeParameters: readonly JSDocTemplateTag[] | undefined, parameters: readonly JSDocParameterTag[], type: JSDocReturnTag | undefined): JSDocSignature; createJSDocTemplateTag(tagName: Identifier | undefined, constraint: JSDocTypeExpression | undefined, typeParameters: readonly TypeParameterDeclaration[], comment?: string): JSDocTemplateTag; updateJSDocTemplateTag(node: JSDocTemplateTag, tagName: Identifier | undefined, constraint: JSDocTypeExpression | undefined, typeParameters: readonly TypeParameterDeclaration[], comment: string | undefined): JSDocTemplateTag; createJSDocTypedefTag(tagName: Identifier | undefined, typeExpression?: JSDocTypeExpression | JSDocTypeLiteral, fullName?: Identifier | JSDocNamespaceDeclaration, comment?: string): JSDocTypedefTag; updateJSDocTypedefTag(node: JSDocTypedefTag, tagName: Identifier | undefined, typeExpression: JSDocTypeExpression | JSDocTypeLiteral | undefined, fullName: Identifier | JSDocNamespaceDeclaration | undefined, comment: string | undefined): JSDocTypedefTag; createJSDocParameterTag(tagName: Identifier | undefined, name: EntityName, isBracketed: boolean, typeExpression?: JSDocTypeExpression, isNameFirst?: boolean, comment?: string): JSDocParameterTag; updateJSDocParameterTag(node: JSDocParameterTag, tagName: Identifier | undefined, name: EntityName, isBracketed: boolean, typeExpression: JSDocTypeExpression | undefined, isNameFirst: boolean, comment: string | undefined): JSDocParameterTag; createJSDocPropertyTag(tagName: Identifier | undefined, name: EntityName, isBracketed: boolean, typeExpression?: JSDocTypeExpression, isNameFirst?: boolean, comment?: string): JSDocPropertyTag; updateJSDocPropertyTag(node: JSDocPropertyTag, tagName: Identifier | undefined, name: EntityName, isBracketed: boolean, typeExpression: JSDocTypeExpression | undefined, isNameFirst: boolean, comment: string | undefined): JSDocPropertyTag; createJSDocTypeTag(tagName: Identifier | undefined, typeExpression: JSDocTypeExpression, comment?: string): JSDocTypeTag; updateJSDocTypeTag(node: JSDocTypeTag, tagName: Identifier | undefined, typeExpression: JSDocTypeExpression, comment: string | undefined): JSDocTypeTag; createJSDocSeeTag(tagName: Identifier | undefined, nameExpression: JSDocNameReference | undefined, comment?: string): JSDocSeeTag; updateJSDocSeeTag(node: JSDocSeeTag, tagName: Identifier | undefined, nameExpression: JSDocNameReference | undefined, comment?: string): JSDocSeeTag; createJSDocReturnTag(tagName: Identifier | undefined, typeExpression?: JSDocTypeExpression, comment?: string): JSDocReturnTag; updateJSDocReturnTag(node: JSDocReturnTag, tagName: Identifier | undefined, typeExpression: JSDocTypeExpression | undefined, comment: string | undefined): JSDocReturnTag; createJSDocThisTag(tagName: Identifier | undefined, typeExpression: JSDocTypeExpression, comment?: string): JSDocThisTag; updateJSDocThisTag(node: JSDocThisTag, tagName: Identifier | undefined, typeExpression: JSDocTypeExpression | undefined, comment: string | undefined): JSDocThisTag; createJSDocEnumTag(tagName: Identifier | undefined, typeExpression: JSDocTypeExpression, comment?: string): JSDocEnumTag; updateJSDocEnumTag(node: JSDocEnumTag, tagName: Identifier | undefined, typeExpression: JSDocTypeExpression, comment: string | undefined): JSDocEnumTag; createJSDocCallbackTag(tagName: Identifier | undefined, typeExpression: JSDocSignature, fullName?: Identifier | JSDocNamespaceDeclaration, comment?: string): JSDocCallbackTag; updateJSDocCallbackTag(node: JSDocCallbackTag, tagName: Identifier | undefined, typeExpression: JSDocSignature, fullName: Identifier | JSDocNamespaceDeclaration | undefined, comment: string | undefined): JSDocCallbackTag; createJSDocAugmentsTag(tagName: Identifier | undefined, className: JSDocAugmentsTag["class"], comment?: string): JSDocAugmentsTag; updateJSDocAugmentsTag(node: JSDocAugmentsTag, tagName: Identifier | undefined, className: JSDocAugmentsTag["class"], comment: string | undefined): JSDocAugmentsTag; createJSDocImplementsTag(tagName: Identifier | undefined, className: JSDocImplementsTag["class"], comment?: string): JSDocImplementsTag; updateJSDocImplementsTag(node: JSDocImplementsTag, tagName: Identifier | undefined, className: JSDocImplementsTag["class"], comment: string | undefined): JSDocImplementsTag; createJSDocAuthorTag(tagName: Identifier | undefined, comment?: string): JSDocAuthorTag; updateJSDocAuthorTag(node: JSDocAuthorTag, tagName: Identifier | undefined, comment: string | undefined): JSDocAuthorTag; createJSDocClassTag(tagName: Identifier | undefined, comment?: string): JSDocClassTag; updateJSDocClassTag(node: JSDocClassTag, tagName: Identifier | undefined, comment: string | undefined): JSDocClassTag; createJSDocPublicTag(tagName: Identifier | undefined, comment?: string): JSDocPublicTag; updateJSDocPublicTag(node: JSDocPublicTag, tagName: Identifier | undefined, comment: string | undefined): JSDocPublicTag; createJSDocPrivateTag(tagName: Identifier | undefined, comment?: string): JSDocPrivateTag; updateJSDocPrivateTag(node: JSDocPrivateTag, tagName: Identifier | undefined, comment: string | undefined): JSDocPrivateTag; createJSDocProtectedTag(tagName: Identifier | undefined, comment?: string): JSDocProtectedTag; updateJSDocProtectedTag(node: JSDocProtectedTag, tagName: Identifier | undefined, comment: string | undefined): JSDocProtectedTag; createJSDocReadonlyTag(tagName: Identifier | undefined, comment?: string): JSDocReadonlyTag; updateJSDocReadonlyTag(node: JSDocReadonlyTag, tagName: Identifier | undefined, comment: string | undefined): JSDocReadonlyTag; createJSDocUnknownTag(tagName: Identifier, comment?: string): JSDocUnknownTag; updateJSDocUnknownTag(node: JSDocUnknownTag, tagName: Identifier, comment: string | undefined): JSDocUnknownTag; createJSDocDeprecatedTag(tagName: Identifier, comment?: string): JSDocDeprecatedTag; updateJSDocDeprecatedTag(node: JSDocDeprecatedTag, tagName: Identifier, comment?: string): JSDocDeprecatedTag; createJSDocComment(comment?: string | undefined, tags?: readonly JSDocTag[] | undefined): JSDoc; updateJSDocComment(node: JSDoc, comment: string | undefined, tags: readonly JSDocTag[] | undefined): JSDoc; createJsxElement(openingElement: JsxOpeningElement, children: readonly JsxChild[], closingElement: JsxClosingElement): JsxElement; updateJsxElement(node: JsxElement, openingElement: JsxOpeningElement, children: readonly JsxChild[], closingElement: JsxClosingElement): JsxElement; createJsxSelfClosingElement(tagName: JsxTagNameExpression, typeArguments: readonly TypeNode[] | undefined, attributes: JsxAttributes): JsxSelfClosingElement; updateJsxSelfClosingElement(node: JsxSelfClosingElement, tagName: JsxTagNameExpression, typeArguments: readonly TypeNode[] | undefined, attributes: JsxAttributes): JsxSelfClosingElement; createJsxOpeningElement(tagName: JsxTagNameExpression, typeArguments: readonly TypeNode[] | undefined, attributes: JsxAttributes): JsxOpeningElement; updateJsxOpeningElement(node: JsxOpeningElement, tagName: JsxTagNameExpression, typeArguments: readonly TypeNode[] | undefined, attributes: JsxAttributes): JsxOpeningElement; createJsxClosingElement(tagName: JsxTagNameExpression): JsxClosingElement; updateJsxClosingElement(node: JsxClosingElement, tagName: JsxTagNameExpression): JsxClosingElement; createJsxFragment(openingFragment: JsxOpeningFragment, children: readonly JsxChild[], closingFragment: JsxClosingFragment): JsxFragment; createJsxText(text: string, containsOnlyTriviaWhiteSpaces?: boolean): JsxText; updateJsxText(node: JsxText, text: string, containsOnlyTriviaWhiteSpaces?: boolean): JsxText; createJsxOpeningFragment(): JsxOpeningFragment; createJsxJsxClosingFragment(): JsxClosingFragment; updateJsxFragment(node: JsxFragment, openingFragment: JsxOpeningFragment, children: readonly JsxChild[], closingFragment: JsxClosingFragment): JsxFragment; createJsxAttribute(name: Identifier, initializer: StringLiteral | JsxExpression | undefined): JsxAttribute; updateJsxAttribute(node: JsxAttribute, name: Identifier, initializer: StringLiteral | JsxExpression | undefined): JsxAttribute; createJsxAttributes(properties: readonly JsxAttributeLike[]): JsxAttributes; updateJsxAttributes(node: JsxAttributes, properties: readonly JsxAttributeLike[]): JsxAttributes; createJsxSpreadAttribute(expression: Expression): JsxSpreadAttribute; updateJsxSpreadAttribute(node: JsxSpreadAttribute, expression: Expression): JsxSpreadAttribute; createJsxExpression(dotDotDotToken: DotDotDotToken | undefined, expression: Expression | undefined): JsxExpression; updateJsxExpression(node: JsxExpression, expression: Expression | undefined): JsxExpression; createCaseClause(expression: Expression, statements: readonly Statement[]): CaseClause; updateCaseClause(node: CaseClause, expression: Expression, statements: readonly Statement[]): CaseClause; createDefaultClause(statements: readonly Statement[]): DefaultClause; updateDefaultClause(node: DefaultClause, statements: readonly Statement[]): DefaultClause; createHeritageClause(token: HeritageClause["token"], types: readonly ExpressionWithTypeArguments[]): HeritageClause; updateHeritageClause(node: HeritageClause, types: readonly ExpressionWithTypeArguments[]): HeritageClause; createCatchClause(variableDeclaration: string | VariableDeclaration | undefined, block: Block): CatchClause; updateCatchClause(node: CatchClause, variableDeclaration: VariableDeclaration | undefined, block: Block): CatchClause; createPropertyAssignment(name: string | PropertyName, initializer: Expression): PropertyAssignment; updatePropertyAssignment(node: PropertyAssignment, name: PropertyName, initializer: Expression): PropertyAssignment; createShorthandPropertyAssignment(name: string | Identifier, objectAssignmentInitializer?: Expression): ShorthandPropertyAssignment; updateShorthandPropertyAssignment(node: ShorthandPropertyAssignment, name: Identifier, objectAssignmentInitializer: Expression | undefined): ShorthandPropertyAssignment; createSpreadAssignment(expression: Expression): SpreadAssignment; updateSpreadAssignment(node: SpreadAssignment, expression: Expression): SpreadAssignment; createEnumMember(name: string | PropertyName, initializer?: Expression): EnumMember; updateEnumMember(node: EnumMember, name: PropertyName, initializer: Expression | undefined): EnumMember; createSourceFile(statements: readonly Statement[], endOfFileToken: EndOfFileToken, flags: NodeFlags): SourceFile; updateSourceFile(node: SourceFile, statements: readonly Statement[], isDeclarationFile?: boolean, referencedFiles?: readonly FileReference[], typeReferences?: readonly FileReference[], hasNoDefaultLib?: boolean, libReferences?: readonly FileReference[]): SourceFile; createNotEmittedStatement(original: Node): NotEmittedStatement; createPartiallyEmittedExpression(expression: Expression, original?: Node): PartiallyEmittedExpression; updatePartiallyEmittedExpression(node: PartiallyEmittedExpression, expression: Expression): PartiallyEmittedExpression; createCommaListExpression(elements: readonly Expression[]): CommaListExpression; updateCommaListExpression(node: CommaListExpression, elements: readonly Expression[]): CommaListExpression; createBundle(sourceFiles: readonly SourceFile[], prepends?: readonly (UnparsedSource | InputFiles)[]): Bundle; updateBundle(node: Bundle, sourceFiles: readonly SourceFile[], prepends?: readonly (UnparsedSource | InputFiles)[]): Bundle; createComma(left: Expression, right: Expression): BinaryExpression; createAssignment(left: ObjectLiteralExpression | ArrayLiteralExpression, right: Expression): DestructuringAssignment; createAssignment(left: Expression, right: Expression): AssignmentExpression<EqualsToken>; createLogicalOr(left: Expression, right: Expression): BinaryExpression; createLogicalAnd(left: Expression, right: Expression): BinaryExpression; createBitwiseOr(left: Expression, right: Expression): BinaryExpression; createBitwiseXor(left: Expression, right: Expression): BinaryExpression; createBitwiseAnd(left: Expression, right: Expression): BinaryExpression; createStrictEquality(left: Expression, right: Expression): BinaryExpression; createStrictInequality(left: Expression, right: Expression): BinaryExpression; createEquality(left: Expression, right: Expression): BinaryExpression; createInequality(left: Expression, right: Expression): BinaryExpression; createLessThan(left: Expression, right: Expression): BinaryExpression; createLessThanEquals(left: Expression, right: Expression): BinaryExpression; createGreaterThan(left: Expression, right: Expression): BinaryExpression; createGreaterThanEquals(left: Expression, right: Expression): BinaryExpression; createLeftShift(left: Expression, right: Expression): BinaryExpression; createRightShift(left: Expression, right: Expression): BinaryExpression; createUnsignedRightShift(left: Expression, right: Expression): BinaryExpression; createAdd(left: Expression, right: Expression): BinaryExpression; createSubtract(left: Expression, right: Expression): BinaryExpression; createMultiply(left: Expression, right: Expression): BinaryExpression; createDivide(left: Expression, right: Expression): BinaryExpression; createModulo(left: Expression, right: Expression): BinaryExpression; createExponent(left: Expression, right: Expression): BinaryExpression; createPrefixPlus(operand: Expression): PrefixUnaryExpression; createPrefixMinus(operand: Expression): PrefixUnaryExpression; createPrefixIncrement(operand: Expression): PrefixUnaryExpression; createPrefixDecrement(operand: Expression): PrefixUnaryExpression; createBitwiseNot(operand: Expression): PrefixUnaryExpression; createLogicalNot(operand: Expression): PrefixUnaryExpression; createPostfixIncrement(operand: Expression): PostfixUnaryExpression; createPostfixDecrement(operand: Expression): PostfixUnaryExpression; createImmediatelyInvokedFunctionExpression(statements: readonly Statement[]): CallExpression; createImmediatelyInvokedFunctionExpression(statements: readonly Statement[], param: ParameterDeclaration, paramValue: Expression): CallExpression; createImmediatelyInvokedArrowFunction(statements: readonly Statement[]): CallExpression; createImmediatelyInvokedArrowFunction(statements: readonly Statement[], param: ParameterDeclaration, paramValue: Expression): CallExpression; createVoidZero(): VoidExpression; createExportDefault(expression: Expression): ExportAssignment; createExternalModuleExport(exportName: Identifier): ExportDeclaration; restoreOuterExpressions(outerExpression: Expression | undefined, innerExpression: Expression, kinds?: OuterExpressionKinds): Expression; } export interface CoreTransformationContext { readonly factory: NodeFactory; /** Gets the compiler options supplied to the transformer. */ getCompilerOptions(): CompilerOptions; /** Starts a new lexical environment. */ startLexicalEnvironment(): void; /** Suspends the current lexical environment, usually after visiting a parameter list. */ suspendLexicalEnvironment(): void; /** Resumes a suspended lexical environment, usually before visiting a function body. */ resumeLexicalEnvironment(): void; /** Ends a lexical environment, returning any declarations. */ endLexicalEnvironment(): Statement[] | undefined; /** Hoists a function declaration to the containing scope. */ hoistFunctionDeclaration(node: FunctionDeclaration): void; /** Hoists a variable declaration to the containing scope. */ hoistVariableDeclaration(node: Identifier): void; } export interface TransformationContext extends CoreTransformationContext { /** Records a request for a non-scoped emit helper in the current context. */ requestEmitHelper(helper: EmitHelper): void; /** Gets and resets the requested non-scoped emit helpers. */ readEmitHelpers(): EmitHelper[] | undefined; /** Enables expression substitutions in the pretty printer for the provided SyntaxKind. */ enableSubstitution(kind: SyntaxKind): void; /** Determines whether expression substitutions are enabled for the provided node. */ isSubstitutionEnabled(node: Node): boolean; /** * Hook used by transformers to substitute expressions just before they * are emitted by the pretty printer. * * NOTE: Transformation hooks should only be modified during `Transformer` initialization, * before returning the `NodeTransformer` callback. */ onSubstituteNode: (hint: EmitHint, node: Node) => Node; /** * Enables before/after emit notifications in the pretty printer for the provided * SyntaxKind. */ enableEmitNotification(kind: SyntaxKind): void; /** * Determines whether before/after emit notifications should be raised in the pretty * printer when it emits a node. */ isEmitNotificationEnabled(node: Node): boolean; /** * Hook used to allow transformers to capture state before or after * the printer emits a node. * * NOTE: Transformation hooks should only be modified during `Transformer` initialization, * before returning the `NodeTransformer` callback. */ onEmitNode: (hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void) => void; } export interface TransformationResult<T extends Node> { /** Gets the transformed source files. */ transformed: T[]; /** Gets diagnostics for the transformation. */ diagnostics?: DiagnosticWithLocation[]; /** * Gets a substitute for a node, if one is available; otherwise, returns the original node. * * @param hint A hint as to the intended usage of the node. * @param node The node to substitute. */ substituteNode(hint: EmitHint, node: Node): Node; /** * Emits a node with possible notification. * * @param hint A hint as to the intended usage of the node. * @param node The node to emit. * @param emitCallback A callback used to emit the node. */ emitNodeWithNotification(hint: EmitHint, node: Node, emitCallback: (hint: EmitHint, node: Node) => void): void; /** * Indicates if a given node needs an emit notification * * @param node The node to emit. */ isEmitNotificationEnabled?(node: Node): boolean; /** * Clean up EmitNode entries on any parse-tree nodes. */ dispose(): void; } /** * A function that is used to initialize and return a `Transformer` callback, which in turn * will be used to transform one or more nodes. */ export type TransformerFactory<T extends Node> = (context: TransformationContext) => Transformer<T>; /** * A function that transforms a node. */ export type Transformer<T extends Node> = (node: T) => T; /** * A function that accepts and possibly transforms a node. */ export type Visitor = (node: Node) => VisitResult<Node>; export interface NodeVisitor { <T extends Node>(nodes: T, visitor: Visitor | undefined, test?: (node: Node) => boolean, lift?: (node: NodeArray<Node>) => T): T; <T extends Node>(nodes: T | undefined, visitor: Visitor | undefined, test?: (node: Node) => boolean, lift?: (node: NodeArray<Node>) => T): T | undefined; } export interface NodesVisitor { <T extends Node>(nodes: NodeArray<T>, visitor: Visitor | undefined, test?: (node: Node) => boolean, start?: number, count?: number): NodeArray<T>; <T extends Node>(nodes: NodeArray<T> | undefined, visitor: Visitor | undefined, test?: (node: Node) => boolean, start?: number, count?: number): NodeArray<T> | undefined; } export type VisitResult<T extends Node> = T | T[] | undefined; export interface Printer { /** * Print a node and its subtree as-is, without any emit transformations. * @param hint A value indicating the purpose of a node. This is primarily used to * distinguish between an `Identifier` used in an expression position, versus an * `Identifier` used as an `IdentifierName` as part of a declaration. For most nodes you * should just pass `Unspecified`. * @param node The node to print. The node and its subtree are printed as-is, without any * emit transformations. * @param sourceFile A source file that provides context for the node. The source text of * the file is used to emit the original source content for literals and identifiers, while * the identifiers of the source file are used when generating unique names to avoid * collisions. */ printNode(hint: EmitHint, node: Node, sourceFile: SourceFile): string; /** * Prints a list of nodes using the given format flags */ printList<T extends Node>(format: ListFormat, list: NodeArray<T>, sourceFile: SourceFile): string; /** * Prints a source file as-is, without any emit transformations. */ printFile(sourceFile: SourceFile): string; /** * Prints a bundle of source files as-is, without any emit transformations. */ printBundle(bundle: Bundle): string; } export interface PrintHandlers { /** * A hook used by the Printer when generating unique names to avoid collisions with * globally defined names that exist outside of the current source file. */ hasGlobalName?(name: string): boolean; /** * A hook used by the Printer to provide notifications prior to emitting a node. A * compatible implementation **must** invoke `emitCallback` with the provided `hint` and * `node` values. * @param hint A hint indicating the intended purpose of the node. * @param node The node to emit. * @param emitCallback A callback that, when invoked, will emit the node. * @example * ```ts * var printer = createPrinter(printerOptions, { * onEmitNode(hint, node, emitCallback) { * // set up or track state prior to emitting the node... * emitCallback(hint, node); * // restore state after emitting the node... * } * }); * ``` */ onEmitNode?(hint: EmitHint, node: Node | undefined, emitCallback: (hint: EmitHint, node: Node | undefined) => void): void; /** * A hook used to check if an emit notification is required for a node. * @param node The node to emit. */ isEmitNotificationEnabled?(node: Node | undefined): boolean; /** * A hook used by the Printer to perform just-in-time substitution of a node. This is * primarily used by node transformations that need to substitute one node for another, * such as replacing `myExportedVar` with `exports.myExportedVar`. * @param hint A hint indicating the intended purpose of the node. * @param node The node to emit. * @example * ```ts * var printer = createPrinter(printerOptions, { * substituteNode(hint, node) { * // perform substitution if necessary... * return node; * } * }); * ``` */ substituteNode?(hint: EmitHint, node: Node): Node; } export interface PrinterOptions { removeComments?: boolean; newLine?: NewLineKind; omitTrailingSemicolon?: boolean; noEmitHelpers?: boolean; } export interface GetEffectiveTypeRootsHost { directoryExists?(directoryName: string): boolean; getCurrentDirectory?(): string; } export interface TextSpan { start: number; length: number; } export interface TextChangeRange { span: TextSpan; newLength: number; } export interface SyntaxList extends Node { kind: SyntaxKind.SyntaxList; _children: Node[]; } export enum ListFormat { None = 0, SingleLine = 0, MultiLine = 1, PreserveLines = 2, LinesMask = 3, NotDelimited = 0, BarDelimited = 4, AmpersandDelimited = 8, CommaDelimited = 16, AsteriskDelimited = 32, DelimitersMask = 60, AllowTrailingComma = 64, Indented = 128, SpaceBetweenBraces = 256, SpaceBetweenSiblings = 512, Braces = 1024, Parenthesis = 2048, AngleBrackets = 4096, SquareBrackets = 8192, BracketsMask = 15360, OptionalIfUndefined = 16384, OptionalIfEmpty = 32768, Optional = 49152, PreferNewLine = 65536, NoTrailingNewLine = 131072, NoInterveningComments = 262144, NoSpaceIfEmpty = 524288, SingleElement = 1048576, SpaceAfterList = 2097152, Modifiers = 262656, HeritageClauses = 512, SingleLineTypeLiteralMembers = 768, MultiLineTypeLiteralMembers = 32897, SingleLineTupleTypeElements = 528, MultiLineTupleTypeElements = 657, UnionTypeConstituents = 516, IntersectionTypeConstituents = 520, ObjectBindingPatternElements = 525136, ArrayBindingPatternElements = 524880, ObjectLiteralExpressionProperties = 526226, ArrayLiteralExpressionElements = 8914, CommaListElements = 528, CallExpressionArguments = 2576, NewExpressionArguments = 18960, TemplateExpressionSpans = 262144, SingleLineBlockStatements = 768, MultiLineBlockStatements = 129, VariableDeclarationList = 528, SingleLineFunctionBodyStatements = 768, MultiLineFunctionBodyStatements = 1, ClassHeritageClauses = 0, ClassMembers = 129, InterfaceMembers = 129, EnumMembers = 145, CaseBlockClauses = 129, NamedImportsOrExportsElements = 525136, JsxElementOrFragmentChildren = 262144, JsxElementAttributes = 262656, CaseOrDefaultClauseStatements = 163969, HeritageClauseTypes = 528, SourceFileStatements = 131073, Decorators = 2146305, TypeArguments = 53776, TypeParameters = 53776, Parameters = 2576, IndexSignatureParameters = 8848, JSDocComment = 33 } export interface UserPreferences { readonly disableSuggestions?: boolean; readonly quotePreference?: "auto" | "double" | "single"; readonly includeCompletionsForModuleExports?: boolean; readonly includeAutomaticOptionalChainCompletions?: boolean; readonly includeCompletionsWithInsertText?: boolean; readonly importModuleSpecifierPreference?: "auto" | "relative" | "non-relative"; /** Determines whether we import `foo/index.ts` as "foo", "foo/index", or "foo/index.js" */ readonly importModuleSpecifierEnding?: "auto" | "minimal" | "index" | "js"; readonly allowTextChangesInNewFiles?: boolean; readonly providePrefixAndSuffixTextForRename?: boolean; readonly includePackageJsonAutoImports?: "auto" | "on" | "off"; readonly provideRefactorNotApplicableReason?: boolean; } /** Represents a bigint literal value without requiring bigint support */ export interface PseudoBigInt { negative: boolean; base10Value: string; } export {}; } declare function setTimeout(handler: (...args: any[]) => void, timeout: number): any; declare function clearTimeout(handle: any): void; declare namespace ts { export enum FileWatcherEventKind { Created = 0, Changed = 1, Deleted = 2 } export type FileWatcherCallback = (fileName: string, eventKind: FileWatcherEventKind) => void; export type DirectoryWatcherCallback = (fileName: string) => void; export interface System { args: string[]; newLine: string; useCaseSensitiveFileNames: boolean; write(s: string): void; writeOutputIsTTY?(): boolean; readFile(path: string, encoding?: string): string | undefined; getFileSize?(path: string): number; writeFile(path: string, data: string, writeByteOrderMark?: boolean): void; /** * @pollingInterval - this parameter is used in polling-based watchers and ignored in watchers that * use native OS file watching */ watchFile?(path: string, callback: FileWatcherCallback, pollingInterval?: number, options?: WatchOptions): FileWatcher; watchDirectory?(path: string, callback: DirectoryWatcherCallback, recursive?: boolean, options?: WatchOptions): FileWatcher; resolvePath(path: string): string; fileExists(path: string): boolean; directoryExists(path: string): boolean; createDirectory(path: string): void; getExecutingFilePath(): string; getCurrentDirectory(): string; getDirectories(path: string): string[]; readDirectory(path: string, extensions?: readonly string[], exclude?: readonly string[], include?: readonly string[], depth?: number): string[]; getModifiedTime?(path: string): Date | undefined; setModifiedTime?(path: string, time: Date): void; deleteFile?(path: string): void; /** * A good implementation is node.js' `crypto.createHash`. (https://nodejs.org/api/crypto.html#crypto_crypto_createhash_algorithm) */ createHash?(data: string): string; /** This must be cryptographically secure. Only implement this method using `crypto.createHash("sha256")`. */ createSHA256Hash?(data: string): string; getMemoryUsage?(): number; exit(exitCode?: number): void; realpath?(path: string): string; setTimeout?(callback: (...args: any[]) => void, ms: number, ...args: any[]): any; clearTimeout?(timeoutId: any): void; clearScreen?(): void; base64decode?(input: string): string; base64encode?(input: string): string; } export interface FileWatcher { close(): void; } export function getNodeMajorVersion(): number | undefined; export let sys: System; export {}; } declare namespace ts { type ErrorCallback = (message: DiagnosticMessage, length: number) => void; interface Scanner { getStartPos(): number; getToken(): SyntaxKind; getTextPos(): number; getTokenPos(): number; getTokenText(): string; getTokenValue(): string; hasUnicodeEscape(): boolean; hasExtendedUnicodeEscape(): boolean; hasPrecedingLineBreak(): boolean; isIdentifier(): boolean; isReservedWord(): boolean; isUnterminated(): boolean; reScanGreaterToken(): SyntaxKind; reScanSlashToken(): SyntaxKind; reScanAsteriskEqualsToken(): SyntaxKind; reScanTemplateToken(isTaggedTemplate: boolean): SyntaxKind; reScanTemplateHeadOrNoSubstitutionTemplate(): SyntaxKind; scanJsxIdentifier(): SyntaxKind; scanJsxAttributeValue(): SyntaxKind; reScanJsxAttributeValue(): SyntaxKind; reScanJsxToken(): JsxTokenSyntaxKind; reScanLessThanToken(): SyntaxKind; reScanQuestionToken(): SyntaxKind; scanJsxToken(): JsxTokenSyntaxKind; scanJsDocToken(): JSDocSyntaxKind; scan(): SyntaxKind; getText(): string; setText(text: string | undefined, start?: number, length?: number): void; setOnError(onError: ErrorCallback | undefined): void; setScriptTarget(scriptTarget: ScriptTarget): void; setLanguageVariant(variant: LanguageVariant): void; setTextPos(textPos: number): void; lookAhead<T>(callback: () => T): T; scanRange<T>(start: number, length: number, callback: () => T): T; tryScan<T>(callback: () => T): T; } function tokenToString(t: SyntaxKind): string | undefined; function getPositionOfLineAndCharacter(sourceFile: SourceFileLike, line: number, character: number): number; function getLineAndCharacterOfPosition(sourceFile: SourceFileLike, position: number): LineAndCharacter; function isWhiteSpaceLike(ch: number): boolean; /** Does not include line breaks. For that, see isWhiteSpaceLike. */ function isWhiteSpaceSingleLine(ch: number): boolean; function isLineBreak(ch: number): boolean; function couldStartTrivia(text: string, pos: number): boolean; function forEachLeadingCommentRange<U>(text: string, pos: number, cb: (pos: number, end: number, kind: CommentKind, hasTrailingNewLine: boolean) => U): U | undefined; function forEachLeadingCommentRange<T, U>(text: string, pos: number, cb: (pos: number, end: number, kind: CommentKind, hasTrailingNewLine: boolean, state: T) => U, state: T): U | undefined; function forEachTrailingCommentRange<U>(text: string, pos: number, cb: (pos: number, end: number, kind: CommentKind, hasTrailingNewLine: boolean) => U): U | undefined; function forEachTrailingCommentRange<T, U>(text: string, pos: number, cb: (pos: number, end: number, kind: CommentKind, hasTrailingNewLine: boolean, state: T) => U, state: T): U | undefined; function reduceEachLeadingCommentRange<T, U>(text: string, pos: number, cb: (pos: number, end: number, kind: CommentKind, hasTrailingNewLine: boolean, state: T, memo: U) => U, state: T, initial: U): U | undefined; function reduceEachTrailingCommentRange<T, U>(text: string, pos: number, cb: (pos: number, end: number, kind: CommentKind, hasTrailingNewLine: boolean, state: T, memo: U) => U, state: T, initial: U): U | undefined; function getLeadingCommentRanges(text: string, pos: number): CommentRange[] | undefined; function getTrailingCommentRanges(text: string, pos: number): CommentRange[] | undefined; /** Optionally, get the shebang */ function getShebang(text: string): string | undefined; function isIdentifierStart(ch: number, languageVersion: ScriptTarget | undefined): boolean; function isIdentifierPart(ch: number, languageVersion: ScriptTarget | undefined, identifierVariant?: LanguageVariant): boolean; function createScanner(languageVersion: ScriptTarget, skipTrivia: boolean, languageVariant?: LanguageVariant, textInitial?: string, onError?: ErrorCallback, start?: number, length?: number): Scanner; } declare namespace ts { function isExternalModuleNameRelative(moduleName: string): boolean; function sortAndDeduplicateDiagnostics<T extends Diagnostic>(diagnostics: readonly T[]): SortedReadonlyArray<T>; function getDefaultLibFileName(options: CompilerOptions): string; function textSpanEnd(span: TextSpan): number; function textSpanIsEmpty(span: TextSpan): boolean; function textSpanContainsPosition(span: TextSpan, position: number): boolean; function textSpanContainsTextSpan(span: TextSpan, other: TextSpan): boolean; function textSpanOverlapsWith(span: TextSpan, other: TextSpan): boolean; function textSpanOverlap(span1: TextSpan, span2: TextSpan): TextSpan | undefined; function textSpanIntersectsWithTextSpan(span: TextSpan, other: TextSpan): boolean; function textSpanIntersectsWith(span: TextSpan, start: number, length: number): boolean; function decodedTextSpanIntersectsWith(start1: number, length1: number, start2: number, length2: number): boolean; function textSpanIntersectsWithPosition(span: TextSpan, position: number): boolean; function textSpanIntersection(span1: TextSpan, span2: TextSpan): TextSpan | undefined; function createTextSpan(start: number, length: number): TextSpan; function createTextSpanFromBounds(start: number, end: number): TextSpan; function textChangeRangeNewSpan(range: TextChangeRange): TextSpan; function textChangeRangeIsUnchanged(range: TextChangeRange): boolean; function createTextChangeRange(span: TextSpan, newLength: number): TextChangeRange; let unchangedTextChangeRange: TextChangeRange; /** * Called to merge all the changes that occurred across several versions of a script snapshot * into a single change. i.e. if a user keeps making successive edits to a script we will * have a text change from V1 to V2, V2 to V3, ..., Vn. * * This function will then merge those changes into a single change range valid between V1 and * Vn. */ function collapseTextChangeRangesAcrossMultipleVersions(changes: readonly TextChangeRange[]): TextChangeRange; function getTypeParameterOwner(d: Declaration): Declaration | undefined; type ParameterPropertyDeclaration = ParameterDeclaration & { parent: ConstructorDeclaration; name: Identifier; }; function isParameterPropertyDeclaration(node: Node, parent: Node): node is ParameterPropertyDeclaration; function isEmptyBindingPattern(node: BindingName): node is BindingPattern; function isEmptyBindingElement(node: BindingElement): boolean; function walkUpBindingElementsAndPatterns(binding: BindingElement): VariableDeclaration | ParameterDeclaration; function getCombinedModifierFlags(node: Declaration): ModifierFlags; function getCombinedNodeFlags(node: Node): NodeFlags; /** * Checks to see if the locale is in the appropriate format, * and if it is, attempts to set the appropriate language. */ function validateLocaleAndSetLanguage(locale: string, sys: { getExecutingFilePath(): string; resolvePath(path: string): string; fileExists(fileName: string): boolean; readFile(fileName: string): string | undefined; }, errors?: Push<Diagnostic>): void; function getOriginalNode(node: Node): Node; function getOriginalNode<T extends Node>(node: Node, nodeTest: (node: Node) => node is T): T; function getOriginalNode(node: Node | undefined): Node | undefined; function getOriginalNode<T extends Node>(node: Node | undefined, nodeTest: (node: Node | undefined) => node is T): T | undefined; /** * Iterates through the parent chain of a node and performs the callback on each parent until the callback * returns a truthy value, then returns that value. * If no such value is found, it applies the callback until the parent pointer is undefined or the callback returns "quit" * At that point findAncestor returns undefined. */ function findAncestor<T extends Node>(node: Node | undefined, callback: (element: Node) => element is T): T | undefined; function findAncestor(node: Node | undefined, callback: (element: Node) => boolean | "quit"): Node | undefined; /** * Gets a value indicating whether a node originated in the parse tree. * * @param node The node to test. */ function isParseTreeNode(node: Node): boolean; /** * Gets the original parse tree node for a node. * * @param node The original node. * @returns The original parse tree node if found; otherwise, undefined. */ function getParseTreeNode(node: Node | undefined): Node | undefined; /** * Gets the original parse tree node for a node. * * @param node The original node. * @param nodeTest A callback used to ensure the correct type of parse tree node is returned. * @returns The original parse tree node if found; otherwise, undefined. */ function getParseTreeNode<T extends Node>(node: T | undefined, nodeTest?: (node: Node) => node is T): T | undefined; /** Add an extra underscore to identifiers that start with two underscores to avoid issues with magic names like '__proto__' */ function escapeLeadingUnderscores(identifier: string): __String; /** * Remove extra underscore from escaped identifier text content. * * @param identifier The escaped identifier text. * @returns The unescaped identifier text. */ function unescapeLeadingUnderscores(identifier: __String): string; function idText(identifierOrPrivateName: Identifier | PrivateIdentifier): string; function symbolName(symbol: Symbol): string; function getNameOfJSDocTypedef(declaration: JSDocTypedefTag): Identifier | PrivateIdentifier | undefined; function getNameOfDeclaration(declaration: Declaration | Expression): DeclarationName | undefined; /** * Gets the JSDoc parameter tags for the node if present. * * @remarks Returns any JSDoc param tag whose name matches the provided * parameter, whether a param tag on a containing function * expression, or a param tag on a variable declaration whose * initializer is the containing function. The tags closest to the * node are returned first, so in the previous example, the param * tag on the containing function expression would be first. * * For binding patterns, parameter tags are matched by position. */ function getJSDocParameterTags(param: ParameterDeclaration): readonly JSDocParameterTag[]; /** * Gets the JSDoc type parameter tags for the node if present. * * @remarks Returns any JSDoc template tag whose names match the provided * parameter, whether a template tag on a containing function * expression, or a template tag on a variable declaration whose * initializer is the containing function. The tags closest to the * node are returned first, so in the previous example, the template * tag on the containing function expression would be first. */ function getJSDocTypeParameterTags(param: TypeParameterDeclaration): readonly JSDocTemplateTag[]; /** * Return true if the node has JSDoc parameter tags. * * @remarks Includes parameter tags that are not directly on the node, * for example on a variable declaration whose initializer is a function expression. */ function hasJSDocParameterTags(node: FunctionLikeDeclaration | SignatureDeclaration): boolean; /** Gets the JSDoc augments tag for the node if present */ function getJSDocAugmentsTag(node: Node): JSDocAugmentsTag | undefined; /** Gets the JSDoc implements tags for the node if present */ function getJSDocImplementsTags(node: Node): readonly JSDocImplementsTag[]; /** Gets the JSDoc class tag for the node if present */ function getJSDocClassTag(node: Node): JSDocClassTag | undefined; /** Gets the JSDoc public tag for the node if present */ function getJSDocPublicTag(node: Node): JSDocPublicTag | undefined; /** Gets the JSDoc private tag for the node if present */ function getJSDocPrivateTag(node: Node): JSDocPrivateTag | undefined; /** Gets the JSDoc protected tag for the node if present */ function getJSDocProtectedTag(node: Node): JSDocProtectedTag | undefined; /** Gets the JSDoc protected tag for the node if present */ function getJSDocReadonlyTag(node: Node): JSDocReadonlyTag | undefined; /** Gets the JSDoc deprecated tag for the node if present */ function getJSDocDeprecatedTag(node: Node): JSDocDeprecatedTag | undefined; /** Gets the JSDoc enum tag for the node if present */ function getJSDocEnumTag(node: Node): JSDocEnumTag | undefined; /** Gets the JSDoc this tag for the node if present */ function getJSDocThisTag(node: Node): JSDocThisTag | undefined; /** Gets the JSDoc return tag for the node if present */ function getJSDocReturnTag(node: Node): JSDocReturnTag | undefined; /** Gets the JSDoc template tag for the node if present */ function getJSDocTemplateTag(node: Node): JSDocTemplateTag | undefined; /** Gets the JSDoc type tag for the node if present and valid */ function getJSDocTypeTag(node: Node): JSDocTypeTag | undefined; /** * Gets the type node for the node if provided via JSDoc. * * @remarks The search includes any JSDoc param tag that relates * to the provided parameter, for example a type tag on the * parameter itself, or a param tag on a containing function * expression, or a param tag on a variable declaration whose * initializer is the containing function. The tags closest to the * node are examined first, so in the previous example, the type * tag directly on the node would be returned. */ function getJSDocType(node: Node): TypeNode | undefined; /** * Gets the return type node for the node if provided via JSDoc return tag or type tag. * * @remarks `getJSDocReturnTag` just gets the whole JSDoc tag. This function * gets the type from inside the braces, after the fat arrow, etc. */ function getJSDocReturnType(node: Node): TypeNode | undefined; /** Get all JSDoc tags related to a node, including those on parent nodes. */ function getJSDocTags(node: Node): readonly JSDocTag[]; /** Gets all JSDoc tags that match a specified predicate */ function getAllJSDocTags<T extends JSDocTag>(node: Node, predicate: (tag: JSDocTag) => tag is T): readonly T[]; /** Gets all JSDoc tags of a specified kind */ function getAllJSDocTagsOfKind(node: Node, kind: SyntaxKind): readonly JSDocTag[]; /** * Gets the effective type parameters. If the node was parsed in a * JavaScript file, gets the type parameters from the `@template` tag from JSDoc. */ function getEffectiveTypeParameterDeclarations(node: DeclarationWithTypeParameters): readonly TypeParameterDeclaration[]; function getEffectiveConstraintOfTypeParameter(node: TypeParameterDeclaration): TypeNode | undefined; function isIdentifierOrPrivateIdentifier(node: Node): node is Identifier | PrivateIdentifier; function isPropertyAccessChain(node: Node): node is PropertyAccessChain; function isElementAccessChain(node: Node): node is ElementAccessChain; function isCallChain(node: Node): node is CallChain; function isOptionalChain(node: Node): node is PropertyAccessChain | ElementAccessChain | CallChain | NonNullChain; function isNullishCoalesce(node: Node): boolean; function isConstTypeReference(node: Node): boolean; function skipPartiallyEmittedExpressions(node: Expression): Expression; function skipPartiallyEmittedExpressions(node: Node): Node; function isNonNullChain(node: Node): node is NonNullChain; function isBreakOrContinueStatement(node: Node): node is BreakOrContinueStatement; function isNamedExportBindings(node: Node): node is NamedExportBindings; function isUnparsedTextLike(node: Node): node is UnparsedTextLike; function isUnparsedNode(node: Node): node is UnparsedNode; function isJSDocPropertyLikeTag(node: Node): node is JSDocPropertyLikeTag; /** * True if node is of some token syntax kind. * For example, this is true for an IfKeyword but not for an IfStatement. * Literals are considered tokens, except TemplateLiteral, but does include TemplateHead/Middle/Tail. */ function isToken(n: Node): boolean; function isLiteralExpression(node: Node): node is LiteralExpression; function isTemplateLiteralToken(node: Node): node is TemplateLiteralToken; function isTemplateMiddleOrTemplateTail(node: Node): node is TemplateMiddle | TemplateTail; function isImportOrExportSpecifier(node: Node): node is ImportSpecifier | ExportSpecifier; function isTypeOnlyImportOrExportDeclaration(node: Node): node is TypeOnlyCompatibleAliasDeclaration; function isStringTextContainingNode(node: Node): node is StringLiteral | TemplateLiteralToken; function isModifier(node: Node): node is Modifier; function isEntityName(node: Node): node is EntityName; function isPropertyName(node: Node): node is PropertyName; function isBindingName(node: Node): node is BindingName; function isFunctionLike(node: Node): node is SignatureDeclaration; function isClassElement(node: Node): node is ClassElement; function isClassLike(node: Node): node is ClassLikeDeclaration; function isAccessor(node: Node): node is AccessorDeclaration; function isTypeElement(node: Node): node is TypeElement; function isClassOrTypeElement(node: Node): node is ClassElement | TypeElement; function isObjectLiteralElementLike(node: Node): node is ObjectLiteralElementLike; /** * Node test that determines whether a node is a valid type node. * This differs from the `isPartOfTypeNode` function which determines whether a node is *part* * of a TypeNode. */ function isTypeNode(node: Node): node is TypeNode; function isFunctionOrConstructorTypeNode(node: Node): node is FunctionTypeNode | ConstructorTypeNode; function isPropertyAccessOrQualifiedName(node: Node): node is PropertyAccessExpression | QualifiedName; function isCallLikeExpression(node: Node): node is CallLikeExpression; function isCallOrNewExpression(node: Node): node is CallExpression | NewExpression; function isTemplateLiteral(node: Node): node is TemplateLiteral; function isAssertionExpression(node: Node): node is AssertionExpression; function isIterationStatement(node: Node, lookInLabeledStatements: false): node is IterationStatement; function isIterationStatement(node: Node, lookInLabeledStatements: boolean): node is IterationStatement | LabeledStatement; function isJsxOpeningLikeElement(node: Node): node is JsxOpeningLikeElement; function isCaseOrDefaultClause(node: Node): node is CaseOrDefaultClause; /** True if node is of a kind that may contain comment text. */ function isJSDocCommentContainingNode(node: Node): boolean; function isSetAccessor(node: Node): node is SetAccessorDeclaration; function isGetAccessor(node: Node): node is GetAccessorDeclaration; /** True if has initializer node attached to it. */ function hasOnlyExpressionInitializer(node: Node): node is HasExpressionInitializer; function isObjectLiteralElement(node: Node): node is ObjectLiteralElement; function isStringLiteralLike(node: Node): node is StringLiteralLike; } declare namespace ts { const factory: NodeFactory; function createUnparsedSourceFile(text: string): UnparsedSource; function createUnparsedSourceFile(inputFile: InputFiles, type: "js" | "dts", stripInternal?: boolean): UnparsedSource; function createUnparsedSourceFile(text: string, mapPath: string | undefined, map: string | undefined): UnparsedSource; function createInputFiles(javascriptText: string, declarationText: string): InputFiles; function createInputFiles(readFileText: (path: string) => string | undefined, javascriptPath: string, javascriptMapPath: string | undefined, declarationPath: string, declarationMapPath: string | undefined, buildInfoPath: string | undefined): InputFiles; function createInputFiles(javascriptText: string, declarationText: string, javascriptMapPath: string | undefined, javascriptMapText: string | undefined, declarationMapPath: string | undefined, declarationMapText: string | undefined): InputFiles; /** * Create an external source map source file reference */ function createSourceMapSource(fileName: string, text: string, skipTrivia?: (pos: number) => number): SourceMapSource; function setOriginalNode<T extends Node>(node: T, original: Node | undefined): T; } declare namespace ts { /** * Clears any `EmitNode` entries from parse-tree nodes. * @param sourceFile A source file. */ function disposeEmitNodes(sourceFile: SourceFile | undefined): void; /** * Sets flags that control emit behavior of a node. */ function setEmitFlags<T extends Node>(node: T, emitFlags: EmitFlags): T; /** * Gets a custom text range to use when emitting source maps. */ function getSourceMapRange(node: Node): SourceMapRange; /** * Sets a custom text range to use when emitting source maps. */ function setSourceMapRange<T extends Node>(node: T, range: SourceMapRange | undefined): T; /** * Gets the TextRange to use for source maps for a token of a node. */ function getTokenSourceMapRange(node: Node, token: SyntaxKind): SourceMapRange | undefined; /** * Sets the TextRange to use for source maps for a token of a node. */ function setTokenSourceMapRange<T extends Node>(node: T, token: SyntaxKind, range: SourceMapRange | undefined): T; /** * Gets a custom text range to use when emitting comments. */ function getCommentRange(node: Node): TextRange; /** * Sets a custom text range to use when emitting comments. */ function setCommentRange<T extends Node>(node: T, range: TextRange): T; function getSyntheticLeadingComments(node: Node): SynthesizedComment[] | undefined; function setSyntheticLeadingComments<T extends Node>(node: T, comments: SynthesizedComment[] | undefined): T; function addSyntheticLeadingComment<T extends Node>(node: T, kind: SyntaxKind.SingleLineCommentTrivia | SyntaxKind.MultiLineCommentTrivia, text: string, hasTrailingNewLine?: boolean): T; function getSyntheticTrailingComments(node: Node): SynthesizedComment[] | undefined; function setSyntheticTrailingComments<T extends Node>(node: T, comments: SynthesizedComment[] | undefined): T; function addSyntheticTrailingComment<T extends Node>(node: T, kind: SyntaxKind.SingleLineCommentTrivia | SyntaxKind.MultiLineCommentTrivia, text: string, hasTrailingNewLine?: boolean): T; function moveSyntheticComments<T extends Node>(node: T, original: Node): T; /** * Gets the constant value to emit for an expression representing an enum. */ function getConstantValue(node: AccessExpression): string | number | undefined; /** * Sets the constant value to emit for an expression. */ function setConstantValue(node: AccessExpression, value: string | number): AccessExpression; /** * Adds an EmitHelper to a node. */ function addEmitHelper<T extends Node>(node: T, helper: EmitHelper): T; /** * Add EmitHelpers to a node. */ function addEmitHelpers<T extends Node>(node: T, helpers: EmitHelper[] | undefined): T; /** * Removes an EmitHelper from a node. */ function removeEmitHelper(node: Node, helper: EmitHelper): boolean; /** * Gets the EmitHelpers of a node. */ function getEmitHelpers(node: Node): EmitHelper[] | undefined; /** * Moves matching emit helpers from a source node to a target node. */ function moveEmitHelpers(source: Node, target: Node, predicate: (helper: EmitHelper) => boolean): void; } declare namespace ts { function isNumericLiteral(node: Node): node is NumericLiteral; function isBigIntLiteral(node: Node): node is BigIntLiteral; function isStringLiteral(node: Node): node is StringLiteral; function isJsxText(node: Node): node is JsxText; function isRegularExpressionLiteral(node: Node): node is RegularExpressionLiteral; function isNoSubstitutionTemplateLiteral(node: Node): node is NoSubstitutionTemplateLiteral; function isTemplateHead(node: Node): node is TemplateHead; function isTemplateMiddle(node: Node): node is TemplateMiddle; function isTemplateTail(node: Node): node is TemplateTail; function isIdentifier(node: Node): node is Identifier; function isQualifiedName(node: Node): node is QualifiedName; function isComputedPropertyName(node: Node): node is ComputedPropertyName; function isPrivateIdentifier(node: Node): node is PrivateIdentifier; function isTypeParameterDeclaration(node: Node): node is TypeParameterDeclaration; function isParameter(node: Node): node is ParameterDeclaration; function isDecorator(node: Node): node is Decorator; function isPropertySignature(node: Node): node is PropertySignature; function isPropertyDeclaration(node: Node): node is PropertyDeclaration; function isMethodSignature(node: Node): node is MethodSignature; function isMethodDeclaration(node: Node): node is MethodDeclaration; function isConstructorDeclaration(node: Node): node is ConstructorDeclaration; function isGetAccessorDeclaration(node: Node): node is GetAccessorDeclaration; function isSetAccessorDeclaration(node: Node): node is SetAccessorDeclaration; function isCallSignatureDeclaration(node: Node): node is CallSignatureDeclaration; function isConstructSignatureDeclaration(node: Node): node is ConstructSignatureDeclaration; function isIndexSignatureDeclaration(node: Node): node is IndexSignatureDeclaration; function isTypePredicateNode(node: Node): node is TypePredicateNode; function isTypeReferenceNode(node: Node): node is TypeReferenceNode; function isFunctionTypeNode(node: Node): node is FunctionTypeNode; function isConstructorTypeNode(node: Node): node is ConstructorTypeNode; function isTypeQueryNode(node: Node): node is TypeQueryNode; function isTypeLiteralNode(node: Node): node is TypeLiteralNode; function isArrayTypeNode(node: Node): node is ArrayTypeNode; function isTupleTypeNode(node: Node): node is TupleTypeNode; function isNamedTupleMember(node: Node): node is NamedTupleMember; function isOptionalTypeNode(node: Node): node is OptionalTypeNode; function isRestTypeNode(node: Node): node is RestTypeNode; function isUnionTypeNode(node: Node): node is UnionTypeNode; function isIntersectionTypeNode(node: Node): node is IntersectionTypeNode; function isConditionalTypeNode(node: Node): node is ConditionalTypeNode; function isInferTypeNode(node: Node): node is InferTypeNode; function isParenthesizedTypeNode(node: Node): node is ParenthesizedTypeNode; function isThisTypeNode(node: Node): node is ThisTypeNode; function isTypeOperatorNode(node: Node): node is TypeOperatorNode; function isIndexedAccessTypeNode(node: Node): node is IndexedAccessTypeNode; function isMappedTypeNode(node: Node): node is MappedTypeNode; function isLiteralTypeNode(node: Node): node is LiteralTypeNode; function isImportTypeNode(node: Node): node is ImportTypeNode; function isTemplateLiteralTypeSpan(node: Node): node is TemplateLiteralTypeSpan; function isTemplateLiteralTypeNode(node: Node): node is TemplateLiteralTypeNode; function isObjectBindingPattern(node: Node): node is ObjectBindingPattern; function isArrayBindingPattern(node: Node): node is ArrayBindingPattern; function isBindingElement(node: Node): node is BindingElement; function isArrayLiteralExpression(node: Node): node is ArrayLiteralExpression; function isObjectLiteralExpression(node: Node): node is ObjectLiteralExpression; function isPropertyAccessExpression(node: Node): node is PropertyAccessExpression; function isElementAccessExpression(node: Node): node is ElementAccessExpression; function isCallExpression(node: Node): node is CallExpression; function isNewExpression(node: Node): node is NewExpression; function isTaggedTemplateExpression(node: Node): node is TaggedTemplateExpression; function isTypeAssertionExpression(node: Node): node is TypeAssertion; function isParenthesizedExpression(node: Node): node is ParenthesizedExpression; function isFunctionExpression(node: Node): node is FunctionExpression; function isArrowFunction(node: Node): node is ArrowFunction; function isDeleteExpression(node: Node): node is DeleteExpression; function isTypeOfExpression(node: Node): node is TypeOfExpression; function isVoidExpression(node: Node): node is VoidExpression; function isAwaitExpression(node: Node): node is AwaitExpression; function isPrefixUnaryExpression(node: Node): node is PrefixUnaryExpression; function isPostfixUnaryExpression(node: Node): node is PostfixUnaryExpression; function isBinaryExpression(node: Node): node is BinaryExpression; function isConditionalExpression(node: Node): node is ConditionalExpression; function isTemplateExpression(node: Node): node is TemplateExpression; function isYieldExpression(node: Node): node is YieldExpression; function isSpreadElement(node: Node): node is SpreadElement; function isClassExpression(node: Node): node is ClassExpression; function isOmittedExpression(node: Node): node is OmittedExpression; function isExpressionWithTypeArguments(node: Node): node is ExpressionWithTypeArguments; function isAsExpression(node: Node): node is AsExpression; function isNonNullExpression(node: Node): node is NonNullExpression; function isMetaProperty(node: Node): node is MetaProperty; function isSyntheticExpression(node: Node): node is SyntheticExpression; function isPartiallyEmittedExpression(node: Node): node is PartiallyEmittedExpression; function isCommaListExpression(node: Node): node is CommaListExpression; function isTemplateSpan(node: Node): node is TemplateSpan; function isSemicolonClassElement(node: Node): node is SemicolonClassElement; function isBlock(node: Node): node is Block; function isVariableStatement(node: Node): node is VariableStatement; function isEmptyStatement(node: Node): node is EmptyStatement; function isExpressionStatement(node: Node): node is ExpressionStatement; function isIfStatement(node: Node): node is IfStatement; function isDoStatement(node: Node): node is DoStatement; function isWhileStatement(node: Node): node is WhileStatement; function isForStatement(node: Node): node is ForStatement; function isForInStatement(node: Node): node is ForInStatement; function isForOfStatement(node: Node): node is ForOfStatement; function isContinueStatement(node: Node): node is ContinueStatement; function isBreakStatement(node: Node): node is BreakStatement; function isReturnStatement(node: Node): node is ReturnStatement; function isWithStatement(node: Node): node is WithStatement; function isSwitchStatement(node: Node): node is SwitchStatement; function isLabeledStatement(node: Node): node is LabeledStatement; function isThrowStatement(node: Node): node is ThrowStatement; function isTryStatement(node: Node): node is TryStatement; function isDebuggerStatement(node: Node): node is DebuggerStatement; function isVariableDeclaration(node: Node): node is VariableDeclaration; function isVariableDeclarationList(node: Node): node is VariableDeclarationList; function isFunctionDeclaration(node: Node): node is FunctionDeclaration; function isClassDeclaration(node: Node): node is ClassDeclaration; function isInterfaceDeclaration(node: Node): node is InterfaceDeclaration; function isTypeAliasDeclaration(node: Node): node is TypeAliasDeclaration; function isEnumDeclaration(node: Node): node is EnumDeclaration; function isModuleDeclaration(node: Node): node is ModuleDeclaration; function isModuleBlock(node: Node): node is ModuleBlock; function isCaseBlock(node: Node): node is CaseBlock; function isNamespaceExportDeclaration(node: Node): node is NamespaceExportDeclaration; function isImportEqualsDeclaration(node: Node): node is ImportEqualsDeclaration; function isImportDeclaration(node: Node): node is ImportDeclaration; function isImportClause(node: Node): node is ImportClause; function isNamespaceImport(node: Node): node is NamespaceImport; function isNamespaceExport(node: Node): node is NamespaceExport; function isNamedImports(node: Node): node is NamedImports; function isImportSpecifier(node: Node): node is ImportSpecifier; function isExportAssignment(node: Node): node is ExportAssignment; function isExportDeclaration(node: Node): node is ExportDeclaration; function isNamedExports(node: Node): node is NamedExports; function isExportSpecifier(node: Node): node is ExportSpecifier; function isMissingDeclaration(node: Node): node is MissingDeclaration; function isNotEmittedStatement(node: Node): node is NotEmittedStatement; function isExternalModuleReference(node: Node): node is ExternalModuleReference; function isJsxElement(node: Node): node is JsxElement; function isJsxSelfClosingElement(node: Node): node is JsxSelfClosingElement; function isJsxOpeningElement(node: Node): node is JsxOpeningElement; function isJsxClosingElement(node: Node): node is JsxClosingElement; function isJsxFragment(node: Node): node is JsxFragment; function isJsxOpeningFragment(node: Node): node is JsxOpeningFragment; function isJsxClosingFragment(node: Node): node is JsxClosingFragment; function isJsxAttribute(node: Node): node is JsxAttribute; function isJsxAttributes(node: Node): node is JsxAttributes; function isJsxSpreadAttribute(node: Node): node is JsxSpreadAttribute; function isJsxExpression(node: Node): node is JsxExpression; function isCaseClause(node: Node): node is CaseClause; function isDefaultClause(node: Node): node is DefaultClause; function isHeritageClause(node: Node): node is HeritageClause; function isCatchClause(node: Node): node is CatchClause; function isPropertyAssignment(node: Node): node is PropertyAssignment; function isShorthandPropertyAssignment(node: Node): node is ShorthandPropertyAssignment; function isSpreadAssignment(node: Node): node is SpreadAssignment; function isEnumMember(node: Node): node is EnumMember; function isUnparsedPrepend(node: Node): node is UnparsedPrepend; function isSourceFile(node: Node): node is SourceFile; function isBundle(node: Node): node is Bundle; function isUnparsedSource(node: Node): node is UnparsedSource; function isJSDocTypeExpression(node: Node): node is JSDocTypeExpression; function isJSDocNameReference(node: Node): node is JSDocNameReference; function isJSDocAllType(node: Node): node is JSDocAllType; function isJSDocUnknownType(node: Node): node is JSDocUnknownType; function isJSDocNullableType(node: Node): node is JSDocNullableType; function isJSDocNonNullableType(node: Node): node is JSDocNonNullableType; function isJSDocOptionalType(node: Node): node is JSDocOptionalType; function isJSDocFunctionType(node: Node): node is JSDocFunctionType; function isJSDocVariadicType(node: Node): node is JSDocVariadicType; function isJSDocNamepathType(node: Node): node is JSDocNamepathType; function isJSDoc(node: Node): node is JSDoc; function isJSDocTypeLiteral(node: Node): node is JSDocTypeLiteral; function isJSDocSignature(node: Node): node is JSDocSignature; function isJSDocAugmentsTag(node: Node): node is JSDocAugmentsTag; function isJSDocAuthorTag(node: Node): node is JSDocAuthorTag; function isJSDocClassTag(node: Node): node is JSDocClassTag; function isJSDocCallbackTag(node: Node): node is JSDocCallbackTag; function isJSDocPublicTag(node: Node): node is JSDocPublicTag; function isJSDocPrivateTag(node: Node): node is JSDocPrivateTag; function isJSDocProtectedTag(node: Node): node is JSDocProtectedTag; function isJSDocReadonlyTag(node: Node): node is JSDocReadonlyTag; function isJSDocDeprecatedTag(node: Node): node is JSDocDeprecatedTag; function isJSDocEnumTag(node: Node): node is JSDocEnumTag; function isJSDocParameterTag(node: Node): node is JSDocParameterTag; function isJSDocReturnTag(node: Node): node is JSDocReturnTag; function isJSDocThisTag(node: Node): node is JSDocThisTag; function isJSDocTypeTag(node: Node): node is JSDocTypeTag; function isJSDocTemplateTag(node: Node): node is JSDocTemplateTag; function isJSDocTypedefTag(node: Node): node is JSDocTypedefTag; function isJSDocUnknownTag(node: Node): node is JSDocUnknownTag; function isJSDocPropertyTag(node: Node): node is JSDocPropertyTag; function isJSDocImplementsTag(node: Node): node is JSDocImplementsTag; } declare namespace ts { function setTextRange<T extends TextRange>(range: T, location: TextRange | undefined): T; } declare namespace ts { /** * Invokes a callback for each child of the given node. The 'cbNode' callback is invoked for all child nodes * stored in properties. If a 'cbNodes' callback is specified, it is invoked for embedded arrays; otherwise, * embedded arrays are flattened and the 'cbNode' callback is invoked for each element. If a callback returns * a truthy value, iteration stops and that value is returned. Otherwise, undefined is returned. * * @param node a given node to visit its children * @param cbNode a callback to be invoked for all child nodes * @param cbNodes a callback to be invoked for embedded array * * @remarks `forEachChild` must visit the children of a node in the order * that they appear in the source code. The language service depends on this property to locate nodes by position. */ export function forEachChild<T>(node: Node, cbNode: (node: Node) => T | undefined, cbNodes?: (nodes: NodeArray<Node>) => T | undefined): T | undefined; export function createSourceFile(fileName: string, sourceText: string, languageVersion: ScriptTarget, setParentNodes?: boolean, scriptKind?: ScriptKind): SourceFile; export function parseIsolatedEntityName(text: string, languageVersion: ScriptTarget): EntityName | undefined; /** * Parse json text into SyntaxTree and return node and parse errors if any * @param fileName * @param sourceText */ export function parseJsonText(fileName: string, sourceText: string): JsonSourceFile; export function isExternalModule(file: SourceFile): boolean; export function updateSourceFile(sourceFile: SourceFile, newText: string, textChangeRange: TextChangeRange, aggressiveChecks?: boolean): SourceFile; export {}; } declare namespace ts { export function parseCommandLine(commandLine: readonly string[], readFile?: (path: string) => string | undefined): ParsedCommandLine; export type DiagnosticReporter = (diagnostic: Diagnostic) => void; /** * Reports config file diagnostics */ export interface ConfigFileDiagnosticsReporter { /** * Reports unrecoverable error when parsing config file */ onUnRecoverableConfigFileDiagnostic: DiagnosticReporter; } /** * Interface extending ParseConfigHost to support ParseConfigFile that reads config file and reports errors */ export interface ParseConfigFileHost extends ParseConfigHost, ConfigFileDiagnosticsReporter { getCurrentDirectory(): string; } /** * Reads the config file, reports errors if any and exits if the config file cannot be found */ export function getParsedCommandLineOfConfigFile(configFileName: string, optionsToExtend: CompilerOptions, host: ParseConfigFileHost, extendedConfigCache?: Map<ExtendedConfigCacheEntry>, watchOptionsToExtend?: WatchOptions, extraFileExtensions?: readonly FileExtensionInfo[]): ParsedCommandLine | undefined; /** * Read tsconfig.json file * @param fileName The path to the config file */ export function readConfigFile(fileName: string, readFile: (path: string) => string | undefined): { config?: any; error?: Diagnostic; }; /** * Parse the text of the tsconfig.json file * @param fileName The path to the config file * @param jsonText The text of the config file */ export function parseConfigFileTextToJson(fileName: string, jsonText: string): { config?: any; error?: Diagnostic; }; /** * Read tsconfig.json file * @param fileName The path to the config file */ export function readJsonConfigFile(fileName: string, readFile: (path: string) => string | undefined): TsConfigSourceFile; /** * Convert the json syntax tree into the json value */ export function convertToObject(sourceFile: JsonSourceFile, errors: Push<Diagnostic>): any; /** * Parse the contents of a config file (tsconfig.json). * @param json The contents of the config file to parse * @param host Instance of ParseConfigHost used to enumerate files in folder. * @param basePath A root directory to resolve relative path entries in the config * file to. e.g. outDir */ export function parseJsonConfigFileContent(json: any, host: ParseConfigHost, basePath: string, existingOptions?: CompilerOptions, configFileName?: string, resolutionStack?: Path[], extraFileExtensions?: readonly FileExtensionInfo[], extendedConfigCache?: Map<ExtendedConfigCacheEntry>, existingWatchOptions?: WatchOptions): ParsedCommandLine; /** * Parse the contents of a config file (tsconfig.json). * @param jsonNode The contents of the config file to parse * @param host Instance of ParseConfigHost used to enumerate files in folder. * @param basePath A root directory to resolve relative path entries in the config * file to. e.g. outDir */ export function parseJsonSourceFileConfigFileContent(sourceFile: TsConfigSourceFile, host: ParseConfigHost, basePath: string, existingOptions?: CompilerOptions, configFileName?: string, resolutionStack?: Path[], extraFileExtensions?: readonly FileExtensionInfo[], extendedConfigCache?: Map<ExtendedConfigCacheEntry>, existingWatchOptions?: WatchOptions): ParsedCommandLine; export interface ParsedTsconfig { raw: any; options?: CompilerOptions; watchOptions?: WatchOptions; typeAcquisition?: TypeAcquisition; /** * Note that the case of the config path has not yet been normalized, as no files have been imported into the project yet */ extendedConfigPath?: string; } export interface ExtendedConfigCacheEntry { extendedResult: TsConfigSourceFile; extendedConfig: ParsedTsconfig | undefined; } export function convertCompilerOptionsFromJson(jsonOptions: any, basePath: string, configFileName?: string): { options: CompilerOptions; errors: Diagnostic[]; }; export function convertTypeAcquisitionFromJson(jsonOptions: any, basePath: string, configFileName?: string): { options: TypeAcquisition; errors: Diagnostic[]; }; export {}; } declare namespace ts { function getEffectiveTypeRoots(options: CompilerOptions, host: GetEffectiveTypeRootsHost): string[] | undefined; /** * @param {string | undefined} containingFile - file that contains type reference directive, can be undefined if containing file is unknown. * This is possible in case if resolution is performed for directives specified via 'types' parameter. In this case initial path for secondary lookups * is assumed to be the same as root directory of the project. */ function resolveTypeReferenceDirective(typeReferenceDirectiveName: string, containingFile: string | undefined, options: CompilerOptions, host: ModuleResolutionHost, redirectedReference?: ResolvedProjectReference): ResolvedTypeReferenceDirectiveWithFailedLookupLocations; /** * Given a set of options, returns the set of type directive names * that should be included for this program automatically. * This list could either come from the config file, * or from enumerating the types root + initial secondary types lookup location. * More type directives might appear in the program later as a result of loading actual source files; * this list is only the set of defaults that are implicitly included. */ function getAutomaticTypeDirectiveNames(options: CompilerOptions, host: ModuleResolutionHost): string[]; /** * Cached module resolutions per containing directory. * This assumes that any module id will have the same resolution for sibling files located in the same folder. */ interface ModuleResolutionCache extends NonRelativeModuleNameResolutionCache { getOrCreateCacheForDirectory(directoryName: string, redirectedReference?: ResolvedProjectReference): Map<ResolvedModuleWithFailedLookupLocations>; } /** * Stored map from non-relative module name to a table: directory -> result of module lookup in this directory * We support only non-relative module names because resolution of relative module names is usually more deterministic and thus less expensive. */ interface NonRelativeModuleNameResolutionCache { getOrCreateCacheForModuleName(nonRelativeModuleName: string, redirectedReference?: ResolvedProjectReference): PerModuleNameCache; } interface PerModuleNameCache { get(directory: string): ResolvedModuleWithFailedLookupLocations | undefined; set(directory: string, result: ResolvedModuleWithFailedLookupLocations): void; } function createModuleResolutionCache(currentDirectory: string, getCanonicalFileName: (s: string) => string, options?: CompilerOptions): ModuleResolutionCache; function resolveModuleNameFromCache(moduleName: string, containingFile: string, cache: ModuleResolutionCache): ResolvedModuleWithFailedLookupLocations | undefined; function resolveModuleName(moduleName: string, containingFile: string, compilerOptions: CompilerOptions, host: ModuleResolutionHost, cache?: ModuleResolutionCache, redirectedReference?: ResolvedProjectReference): ResolvedModuleWithFailedLookupLocations; function nodeModuleNameResolver(moduleName: string, containingFile: string, compilerOptions: CompilerOptions, host: ModuleResolutionHost, cache?: ModuleResolutionCache, redirectedReference?: ResolvedProjectReference): ResolvedModuleWithFailedLookupLocations; function classicNameResolver(moduleName: string, containingFile: string, compilerOptions: CompilerOptions, host: ModuleResolutionHost, cache?: NonRelativeModuleNameResolutionCache, redirectedReference?: ResolvedProjectReference): ResolvedModuleWithFailedLookupLocations; } declare namespace ts { /** * Visits a Node using the supplied visitor, possibly returning a new Node in its place. * * @param node The Node to visit. * @param visitor The callback used to visit the Node. * @param test A callback to execute to verify the Node is valid. * @param lift An optional callback to execute to lift a NodeArray into a valid Node. */ function visitNode<T extends Node>(node: T, visitor: Visitor | undefined, test?: (node: Node) => boolean, lift?: (node: NodeArray<Node>) => T): T; /** * Visits a Node using the supplied visitor, possibly returning a new Node in its place. * * @param node The Node to visit. * @param visitor The callback used to visit the Node. * @param test A callback to execute to verify the Node is valid. * @param lift An optional callback to execute to lift a NodeArray into a valid Node. */ function visitNode<T extends Node>(node: T | undefined, visitor: Visitor | undefined, test?: (node: Node) => boolean, lift?: (node: NodeArray<Node>) => T): T | undefined; /** * Visits a NodeArray using the supplied visitor, possibly returning a new NodeArray in its place. * * @param nodes The NodeArray to visit. * @param visitor The callback used to visit a Node. * @param test A node test to execute for each node. * @param start An optional value indicating the starting offset at which to start visiting. * @param count An optional value indicating the maximum number of nodes to visit. */ function visitNodes<T extends Node>(nodes: NodeArray<T>, visitor: Visitor | undefined, test?: (node: Node) => boolean, start?: number, count?: number): NodeArray<T>; /** * Visits a NodeArray using the supplied visitor, possibly returning a new NodeArray in its place. * * @param nodes The NodeArray to visit. * @param visitor The callback used to visit a Node. * @param test A node test to execute for each node. * @param start An optional value indicating the starting offset at which to start visiting. * @param count An optional value indicating the maximum number of nodes to visit. */ function visitNodes<T extends Node>(nodes: NodeArray<T> | undefined, visitor: Visitor | undefined, test?: (node: Node) => boolean, start?: number, count?: number): NodeArray<T> | undefined; /** * Starts a new lexical environment and visits a statement list, ending the lexical environment * and merging hoisted declarations upon completion. */ function visitLexicalEnvironment(statements: NodeArray<Statement>, visitor: Visitor, context: TransformationContext, start?: number, ensureUseStrict?: boolean, nodesVisitor?: NodesVisitor): NodeArray<Statement>; /** * Starts a new lexical environment and visits a parameter list, suspending the lexical * environment upon completion. */ function visitParameterList(nodes: NodeArray<ParameterDeclaration>, visitor: Visitor, context: TransformationContext, nodesVisitor?: NodesVisitor): NodeArray<ParameterDeclaration>; function visitParameterList(nodes: NodeArray<ParameterDeclaration> | undefined, visitor: Visitor, context: TransformationContext, nodesVisitor?: NodesVisitor): NodeArray<ParameterDeclaration> | undefined; /** * Resumes a suspended lexical environment and visits a function body, ending the lexical * environment and merging hoisted declarations upon completion. */ function visitFunctionBody(node: FunctionBody, visitor: Visitor, context: TransformationContext): FunctionBody; /** * Resumes a suspended lexical environment and visits a function body, ending the lexical * environment and merging hoisted declarations upon completion. */ function visitFunctionBody(node: FunctionBody | undefined, visitor: Visitor, context: TransformationContext): FunctionBody | undefined; /** * Resumes a suspended lexical environment and visits a concise body, ending the lexical * environment and merging hoisted declarations upon completion. */ function visitFunctionBody(node: ConciseBody, visitor: Visitor, context: TransformationContext): ConciseBody; /** * Visits each child of a Node using the supplied visitor, possibly returning a new Node of the same kind in its place. * * @param node The Node whose children will be visited. * @param visitor The callback used to visit each child. * @param context A lexical environment context for the visitor. */ function visitEachChild<T extends Node>(node: T, visitor: Visitor, context: TransformationContext): T; /** * Visits each child of a Node using the supplied visitor, possibly returning a new Node of the same kind in its place. * * @param node The Node whose children will be visited. * @param visitor The callback used to visit each child. * @param context A lexical environment context for the visitor. */ function visitEachChild<T extends Node>(node: T | undefined, visitor: Visitor, context: TransformationContext, nodesVisitor?: typeof visitNodes, tokenVisitor?: Visitor): T | undefined; } declare namespace ts { function getTsBuildInfoEmitOutputFilePath(options: CompilerOptions): string | undefined; function getOutputFileNames(commandLine: ParsedCommandLine, inputFileName: string, ignoreCase: boolean): readonly string[]; function createPrinter(printerOptions?: PrinterOptions, handlers?: PrintHandlers): Printer; } declare namespace ts { export function findConfigFile(searchPath: string, fileExists: (fileName: string) => boolean, configName?: string): string | undefined; export function resolveTripleslashReference(moduleName: string, containingFile: string): string; export function createCompilerHost(options: CompilerOptions, setParentNodes?: boolean): CompilerHost; export function getPreEmitDiagnostics(program: Program, sourceFile?: SourceFile, cancellationToken?: CancellationToken): readonly Diagnostic[]; export interface FormatDiagnosticsHost { getCurrentDirectory(): string; getCanonicalFileName(fileName: string): string; getNewLine(): string; } export function formatDiagnostics(diagnostics: readonly Diagnostic[], host: FormatDiagnosticsHost): string; export function formatDiagnostic(diagnostic: Diagnostic, host: FormatDiagnosticsHost): string; export function formatDiagnosticsWithColorAndContext(diagnostics: readonly Diagnostic[], host: FormatDiagnosticsHost): string; export function flattenDiagnosticMessageText(diag: string | DiagnosticMessageChain | undefined, newLine: string, indent?: number): string; export function getConfigFileParsingDiagnostics(configFileParseResult: ParsedCommandLine): readonly Diagnostic[]; /** * Create a new 'Program' instance. A Program is an immutable collection of 'SourceFile's and a 'CompilerOptions' * that represent a compilation unit. * * Creating a program proceeds from a set of root files, expanding the set of inputs by following imports and * triple-slash-reference-path directives transitively. '@types' and triple-slash-reference-types are also pulled in. * * @param createProgramOptions - The options for creating a program. * @returns A 'Program' object. */ export function createProgram(createProgramOptions: CreateProgramOptions): Program; /** * Create a new 'Program' instance. A Program is an immutable collection of 'SourceFile's and a 'CompilerOptions' * that represent a compilation unit. * * Creating a program proceeds from a set of root files, expanding the set of inputs by following imports and * triple-slash-reference-path directives transitively. '@types' and triple-slash-reference-types are also pulled in. * * @param rootNames - A set of root files. * @param options - The compiler options which should be used. * @param host - The host interacts with the underlying file system. * @param oldProgram - Reuses an old program structure. * @param configFileParsingDiagnostics - error during config file parsing * @returns A 'Program' object. */ export function createProgram(rootNames: readonly string[], options: CompilerOptions, host?: CompilerHost, oldProgram?: Program, configFileParsingDiagnostics?: readonly Diagnostic[]): Program; /** @deprecated */ export interface ResolveProjectReferencePathHost { fileExists(fileName: string): boolean; } /** * Returns the target config filename of a project reference. * Note: The file might not exist. */ export function resolveProjectReferencePath(ref: ProjectReference): ResolvedConfigFileName; /** @deprecated */ export function resolveProjectReferencePath(host: ResolveProjectReferencePathHost, ref: ProjectReference): ResolvedConfigFileName; export {}; } declare namespace ts { interface EmitOutput { outputFiles: OutputFile[]; emitSkipped: boolean; } interface OutputFile { name: string; writeByteOrderMark: boolean; text: string; } } declare namespace ts { type AffectedFileResult<T> = { result: T; affected: SourceFile | Program; } | undefined; interface BuilderProgramHost { /** * return true if file names are treated with case sensitivity */ useCaseSensitiveFileNames(): boolean; /** * If provided this would be used this hash instead of actual file shape text for detecting changes */ createHash?: (data: string) => string; /** * When emit or emitNextAffectedFile are called without writeFile, * this callback if present would be used to write files */ writeFile?: WriteFileCallback; } /** * Builder to manage the program state changes */ interface BuilderProgram { /** * Returns current program */ getProgram(): Program; /** * Get compiler options of the program */ getCompilerOptions(): CompilerOptions; /** * Get the source file in the program with file name */ getSourceFile(fileName: string): SourceFile | undefined; /** * Get a list of files in the program */ getSourceFiles(): readonly SourceFile[]; /** * Get the diagnostics for compiler options */ getOptionsDiagnostics(cancellationToken?: CancellationToken): readonly Diagnostic[]; /** * Get the diagnostics that dont belong to any file */ getGlobalDiagnostics(cancellationToken?: CancellationToken): readonly Diagnostic[]; /** * Get the diagnostics from config file parsing */ getConfigFileParsingDiagnostics(): readonly Diagnostic[]; /** * Get the syntax diagnostics, for all source files if source file is not supplied */ getSyntacticDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): readonly Diagnostic[]; /** * Get the declaration diagnostics, for all source files if source file is not supplied */ getDeclarationDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): readonly DiagnosticWithLocation[]; /** * Get all the dependencies of the file */ getAllDependencies(sourceFile: SourceFile): readonly string[]; /** * Gets the semantic diagnostics from the program corresponding to this state of file (if provided) or whole program * The semantic diagnostics are cached and managed here * Note that it is assumed that when asked about semantic diagnostics through this API, * the file has been taken out of affected files so it is safe to use cache or get from program and cache the diagnostics * In case of SemanticDiagnosticsBuilderProgram if the source file is not provided, * it will iterate through all the affected files, to ensure that cache stays valid and yet provide a way to get all semantic diagnostics */ getSemanticDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): readonly Diagnostic[]; /** * Emits the JavaScript and declaration files. * When targetSource file is specified, emits the files corresponding to that source file, * otherwise for the whole program. * In case of EmitAndSemanticDiagnosticsBuilderProgram, when targetSourceFile is specified, * it is assumed that that file is handled from affected file list. If targetSourceFile is not specified, * it will only emit all the affected files instead of whole program * * The first of writeFile if provided, writeFile of BuilderProgramHost if provided, writeFile of compiler host * in that order would be used to write the files */ emit(targetSourceFile?: SourceFile, writeFile?: WriteFileCallback, cancellationToken?: CancellationToken, emitOnlyDtsFiles?: boolean, customTransformers?: CustomTransformers): EmitResult; /** * Get the current directory of the program */ getCurrentDirectory(): string; } /** * The builder that caches the semantic diagnostics for the program and handles the changed files and affected files */ interface SemanticDiagnosticsBuilderProgram extends BuilderProgram { /** * Gets the semantic diagnostics from the program for the next affected file and caches it * Returns undefined if the iteration is complete */ getSemanticDiagnosticsOfNextAffectedFile(cancellationToken?: CancellationToken, ignoreSourceFile?: (sourceFile: SourceFile) => boolean): AffectedFileResult<readonly Diagnostic[]>; } /** * The builder that can handle the changes in program and iterate through changed file to emit the files * The semantic diagnostics are cached per file and managed by clearing for the changed/affected files */ interface EmitAndSemanticDiagnosticsBuilderProgram extends SemanticDiagnosticsBuilderProgram { /** * Emits the next affected file's emit result (EmitResult and sourceFiles emitted) or returns undefined if iteration is complete * The first of writeFile if provided, writeFile of BuilderProgramHost if provided, writeFile of compiler host * in that order would be used to write the files */ emitNextAffectedFile(writeFile?: WriteFileCallback, cancellationToken?: CancellationToken, emitOnlyDtsFiles?: boolean, customTransformers?: CustomTransformers): AffectedFileResult<EmitResult>; } /** * Create the builder to manage semantic diagnostics and cache them */ function createSemanticDiagnosticsBuilderProgram(newProgram: Program, host: BuilderProgramHost, oldProgram?: SemanticDiagnosticsBuilderProgram, configFileParsingDiagnostics?: readonly Diagnostic[]): SemanticDiagnosticsBuilderProgram; function createSemanticDiagnosticsBuilderProgram(rootNames: readonly string[] | undefined, options: CompilerOptions | undefined, host?: CompilerHost, oldProgram?: SemanticDiagnosticsBuilderProgram, configFileParsingDiagnostics?: readonly Diagnostic[], projectReferences?: readonly ProjectReference[]): SemanticDiagnosticsBuilderProgram; /** * Create the builder that can handle the changes in program and iterate through changed files * to emit the those files and manage semantic diagnostics cache as well */ function createEmitAndSemanticDiagnosticsBuilderProgram(newProgram: Program, host: BuilderProgramHost, oldProgram?: EmitAndSemanticDiagnosticsBuilderProgram, configFileParsingDiagnostics?: readonly Diagnostic[]): EmitAndSemanticDiagnosticsBuilderProgram; function createEmitAndSemanticDiagnosticsBuilderProgram(rootNames: readonly string[] | undefined, options: CompilerOptions | undefined, host?: CompilerHost, oldProgram?: EmitAndSemanticDiagnosticsBuilderProgram, configFileParsingDiagnostics?: readonly Diagnostic[], projectReferences?: readonly ProjectReference[]): EmitAndSemanticDiagnosticsBuilderProgram; /** * Creates a builder thats just abstraction over program and can be used with watch */ function createAbstractBuilder(newProgram: Program, host: BuilderProgramHost, oldProgram?: BuilderProgram, configFileParsingDiagnostics?: readonly Diagnostic[]): BuilderProgram; function createAbstractBuilder(rootNames: readonly string[] | undefined, options: CompilerOptions | undefined, host?: CompilerHost, oldProgram?: BuilderProgram, configFileParsingDiagnostics?: readonly Diagnostic[], projectReferences?: readonly ProjectReference[]): BuilderProgram; } declare namespace ts { interface ReadBuildProgramHost { useCaseSensitiveFileNames(): boolean; getCurrentDirectory(): string; readFile(fileName: string): string | undefined; } function readBuilderProgram(compilerOptions: CompilerOptions, host: ReadBuildProgramHost): EmitAndSemanticDiagnosticsBuilderProgram | undefined; function createIncrementalCompilerHost(options: CompilerOptions, system?: System): CompilerHost; interface IncrementalProgramOptions<T extends BuilderProgram> { rootNames: readonly string[]; options: CompilerOptions; configFileParsingDiagnostics?: readonly Diagnostic[]; projectReferences?: readonly ProjectReference[]; host?: CompilerHost; createProgram?: CreateProgram<T>; } function createIncrementalProgram<T extends BuilderProgram = EmitAndSemanticDiagnosticsBuilderProgram>({ rootNames, options, configFileParsingDiagnostics, projectReferences, host, createProgram }: IncrementalProgramOptions<T>): T; type WatchStatusReporter = (diagnostic: Diagnostic, newLine: string, options: CompilerOptions, errorCount?: number) => void; /** Create the program with rootNames and options, if they are undefined, oldProgram and new configFile diagnostics create new program */ type CreateProgram<T extends BuilderProgram> = (rootNames: readonly string[] | undefined, options: CompilerOptions | undefined, host?: CompilerHost, oldProgram?: T, configFileParsingDiagnostics?: readonly Diagnostic[], projectReferences?: readonly ProjectReference[] | undefined) => T; /** Host that has watch functionality used in --watch mode */ interface WatchHost { /** If provided, called with Diagnostic message that informs about change in watch status */ onWatchStatusChange?(diagnostic: Diagnostic, newLine: string, options: CompilerOptions, errorCount?: number): void; /** Used to watch changes in source files, missing files needed to update the program or config file */ watchFile(path: string, callback: FileWatcherCallback, pollingInterval?: number, options?: CompilerOptions): FileWatcher; /** Used to watch resolved module's failed lookup locations, config file specs, type roots where auto type reference directives are added */ watchDirectory(path: string, callback: DirectoryWatcherCallback, recursive?: boolean, options?: CompilerOptions): FileWatcher; /** If provided, will be used to set delayed compilation, so that multiple changes in short span are compiled together */ setTimeout?(callback: (...args: any[]) => void, ms: number, ...args: any[]): any; /** If provided, will be used to reset existing delayed compilation */ clearTimeout?(timeoutId: any): void; } interface ProgramHost<T extends BuilderProgram> { /** * Used to create the program when need for program creation or recreation detected */ createProgram: CreateProgram<T>; useCaseSensitiveFileNames(): boolean; getNewLine(): string; getCurrentDirectory(): string; getDefaultLibFileName(options: CompilerOptions): string; getDefaultLibLocation?(): string; createHash?(data: string): string; /** * Use to check file presence for source files and * if resolveModuleNames is not provided (complier is in charge of module resolution) then module files as well */ fileExists(path: string): boolean; /** * Use to read file text for source files and * if resolveModuleNames is not provided (complier is in charge of module resolution) then module files as well */ readFile(path: string, encoding?: string): string | undefined; /** If provided, used for module resolution as well as to handle directory structure */ directoryExists?(path: string): boolean; /** If provided, used in resolutions as well as handling directory structure */ getDirectories?(path: string): string[]; /** If provided, used to cache and handle directory structure modifications */ readDirectory?(path: string, extensions?: readonly string[], exclude?: readonly string[], include?: readonly string[], depth?: number): string[]; /** Symbol links resolution */ realpath?(path: string): string; /** If provided would be used to write log about compilation */ trace?(s: string): void; /** If provided is used to get the environment variable */ getEnvironmentVariable?(name: string): string | undefined; /** If provided, used to resolve the module names, otherwise typescript's default module resolution */ resolveModuleNames?(moduleNames: string[], containingFile: string, reusedNames: string[] | undefined, redirectedReference: ResolvedProjectReference | undefined, options: CompilerOptions): (ResolvedModule | undefined)[]; /** If provided, used to resolve type reference directives, otherwise typescript's default resolution */ resolveTypeReferenceDirectives?(typeReferenceDirectiveNames: string[], containingFile: string, redirectedReference: ResolvedProjectReference | undefined, options: CompilerOptions): (ResolvedTypeReferenceDirective | undefined)[]; } interface WatchCompilerHost<T extends BuilderProgram> extends ProgramHost<T>, WatchHost { /** Instead of using output d.ts file from project reference, use its source file */ useSourceOfProjectReferenceRedirect?(): boolean; /** If provided, callback to invoke after every new program creation */ afterProgramCreate?(program: T): void; } /** * Host to create watch with root files and options */ interface WatchCompilerHostOfFilesAndCompilerOptions<T extends BuilderProgram> extends WatchCompilerHost<T> { /** root files to use to generate program */ rootFiles: string[]; /** Compiler options */ options: CompilerOptions; watchOptions?: WatchOptions; /** Project References */ projectReferences?: readonly ProjectReference[]; } /** * Host to create watch with config file */ interface WatchCompilerHostOfConfigFile<T extends BuilderProgram> extends WatchCompilerHost<T>, ConfigFileDiagnosticsReporter { /** Name of the config file to compile */ configFileName: string; /** Options to extend */ optionsToExtend?: CompilerOptions; watchOptionsToExtend?: WatchOptions; extraFileExtensions?: readonly FileExtensionInfo[]; /** * Used to generate source file names from the config file and its include, exclude, files rules * and also to cache the directory stucture */ readDirectory(path: string, extensions?: readonly string[], exclude?: readonly string[], include?: readonly string[], depth?: number): string[]; } interface Watch<T> { /** Synchronize with host and get updated program */ getProgram(): T; /** Closes the watch */ close(): void; } /** * Creates the watch what generates program using the config file */ interface WatchOfConfigFile<T> extends Watch<T> { } /** * Creates the watch that generates program using the root files and compiler options */ interface WatchOfFilesAndCompilerOptions<T> extends Watch<T> { /** Updates the root files in the program, only if this is not config file compilation */ updateRootFileNames(fileNames: string[]): void; } /** * Create the watch compiler host for either configFile or fileNames and its options */ function createWatchCompilerHost<T extends BuilderProgram>(configFileName: string, optionsToExtend: CompilerOptions | undefined, system: System, createProgram?: CreateProgram<T>, reportDiagnostic?: DiagnosticReporter, reportWatchStatus?: WatchStatusReporter, watchOptionsToExtend?: WatchOptions, extraFileExtensions?: readonly FileExtensionInfo[]): WatchCompilerHostOfConfigFile<T>; function createWatchCompilerHost<T extends BuilderProgram>(rootFiles: string[], options: CompilerOptions, system: System, createProgram?: CreateProgram<T>, reportDiagnostic?: DiagnosticReporter, reportWatchStatus?: WatchStatusReporter, projectReferences?: readonly ProjectReference[], watchOptions?: WatchOptions): WatchCompilerHostOfFilesAndCompilerOptions<T>; /** * Creates the watch from the host for root files and compiler options */ function createWatchProgram<T extends BuilderProgram>(host: WatchCompilerHostOfFilesAndCompilerOptions<T>): WatchOfFilesAndCompilerOptions<T>; /** * Creates the watch from the host for config file */ function createWatchProgram<T extends BuilderProgram>(host: WatchCompilerHostOfConfigFile<T>): WatchOfConfigFile<T>; } declare namespace ts { interface BuildOptions { dry?: boolean; force?: boolean; verbose?: boolean; incremental?: boolean; assumeChangesOnlyAffectDirectDependencies?: boolean; traceResolution?: boolean; [option: string]: CompilerOptionsValue | undefined; } type ReportEmitErrorSummary = (errorCount: number) => void; interface SolutionBuilderHostBase<T extends BuilderProgram> extends ProgramHost<T> { createDirectory?(path: string): void; /** * Should provide create directory and writeFile if done of invalidatedProjects is not invoked with * writeFileCallback */ writeFile?(path: string, data: string, writeByteOrderMark?: boolean): void; getModifiedTime(fileName: string): Date | undefined; setModifiedTime(fileName: string, date: Date): void; deleteFile(fileName: string): void; getParsedCommandLine?(fileName: string): ParsedCommandLine | undefined; reportDiagnostic: DiagnosticReporter; reportSolutionBuilderStatus: DiagnosticReporter; afterProgramEmitAndDiagnostics?(program: T): void; } interface SolutionBuilderHost<T extends BuilderProgram> extends SolutionBuilderHostBase<T> { reportErrorSummary?: ReportEmitErrorSummary; } interface SolutionBuilderWithWatchHost<T extends BuilderProgram> extends SolutionBuilderHostBase<T>, WatchHost { } interface SolutionBuilder<T extends BuilderProgram> { build(project?: string, cancellationToken?: CancellationToken): ExitStatus; clean(project?: string): ExitStatus; buildReferences(project: string, cancellationToken?: CancellationToken): ExitStatus; cleanReferences(project?: string): ExitStatus; getNextInvalidatedProject(cancellationToken?: CancellationToken): InvalidatedProject<T> | undefined; } /** * Create a function that reports watch status by writing to the system and handles the formating of the diagnostic */ function createBuilderStatusReporter(system: System, pretty?: boolean): DiagnosticReporter; function createSolutionBuilderHost<T extends BuilderProgram = EmitAndSemanticDiagnosticsBuilderProgram>(system?: System, createProgram?: CreateProgram<T>, reportDiagnostic?: DiagnosticReporter, reportSolutionBuilderStatus?: DiagnosticReporter, reportErrorSummary?: ReportEmitErrorSummary): SolutionBuilderHost<T>; function createSolutionBuilderWithWatchHost<T extends BuilderProgram = EmitAndSemanticDiagnosticsBuilderProgram>(system?: System, createProgram?: CreateProgram<T>, reportDiagnostic?: DiagnosticReporter, reportSolutionBuilderStatus?: DiagnosticReporter, reportWatchStatus?: WatchStatusReporter): SolutionBuilderWithWatchHost<T>; function createSolutionBuilder<T extends BuilderProgram>(host: SolutionBuilderHost<T>, rootNames: readonly string[], defaultOptions: BuildOptions): SolutionBuilder<T>; function createSolutionBuilderWithWatch<T extends BuilderProgram>(host: SolutionBuilderWithWatchHost<T>, rootNames: readonly string[], defaultOptions: BuildOptions, baseWatchOptions?: WatchOptions): SolutionBuilder<T>; enum InvalidatedProjectKind { Build = 0, UpdateBundle = 1, UpdateOutputFileStamps = 2 } interface InvalidatedProjectBase { readonly kind: InvalidatedProjectKind; readonly project: ResolvedConfigFileName; /** * To dispose this project and ensure that all the necessary actions are taken and state is updated accordingly */ done(cancellationToken?: CancellationToken, writeFile?: WriteFileCallback, customTransformers?: CustomTransformers): ExitStatus; getCompilerOptions(): CompilerOptions; getCurrentDirectory(): string; } interface UpdateOutputFileStampsProject extends InvalidatedProjectBase { readonly kind: InvalidatedProjectKind.UpdateOutputFileStamps; updateOutputFileStatmps(): void; } interface BuildInvalidedProject<T extends BuilderProgram> extends InvalidatedProjectBase { readonly kind: InvalidatedProjectKind.Build; getBuilderProgram(): T | undefined; getProgram(): Program | undefined; getSourceFile(fileName: string): SourceFile | undefined; getSourceFiles(): readonly SourceFile[]; getOptionsDiagnostics(cancellationToken?: CancellationToken): readonly Diagnostic[]; getGlobalDiagnostics(cancellationToken?: CancellationToken): readonly Diagnostic[]; getConfigFileParsingDiagnostics(): readonly Diagnostic[]; getSyntacticDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): readonly Diagnostic[]; getAllDependencies(sourceFile: SourceFile): readonly string[]; getSemanticDiagnostics(sourceFile?: SourceFile, cancellationToken?: CancellationToken): readonly Diagnostic[]; getSemanticDiagnosticsOfNextAffectedFile(cancellationToken?: CancellationToken, ignoreSourceFile?: (sourceFile: SourceFile) => boolean): AffectedFileResult<readonly Diagnostic[]>; emit(targetSourceFile?: SourceFile, writeFile?: WriteFileCallback, cancellationToken?: CancellationToken, emitOnlyDtsFiles?: boolean, customTransformers?: CustomTransformers): EmitResult | undefined; } interface UpdateBundleProject<T extends BuilderProgram> extends InvalidatedProjectBase { readonly kind: InvalidatedProjectKind.UpdateBundle; emit(writeFile?: WriteFileCallback, customTransformers?: CustomTransformers): EmitResult | BuildInvalidedProject<T> | undefined; } type InvalidatedProject<T extends BuilderProgram> = UpdateOutputFileStampsProject | BuildInvalidedProject<T> | UpdateBundleProject<T>; } declare namespace ts.server { type ActionSet = "action::set"; type ActionInvalidate = "action::invalidate"; type ActionPackageInstalled = "action::packageInstalled"; type EventTypesRegistry = "event::typesRegistry"; type EventBeginInstallTypes = "event::beginInstallTypes"; type EventEndInstallTypes = "event::endInstallTypes"; type EventInitializationFailed = "event::initializationFailed"; } declare namespace ts.server { interface TypingInstallerResponse { readonly kind: ActionSet | ActionInvalidate | EventTypesRegistry | ActionPackageInstalled | EventBeginInstallTypes | EventEndInstallTypes | EventInitializationFailed; } interface TypingInstallerRequestWithProjectName { readonly projectName: string; } interface DiscoverTypings extends TypingInstallerRequestWithProjectName { readonly fileNames: string[]; readonly projectRootPath: Path; readonly compilerOptions: CompilerOptions; readonly watchOptions?: WatchOptions; readonly typeAcquisition: TypeAcquisition; readonly unresolvedImports: SortedReadonlyArray<string>; readonly cachePath?: string; readonly kind: "discover"; } interface CloseProject extends TypingInstallerRequestWithProjectName { readonly kind: "closeProject"; } interface TypesRegistryRequest { readonly kind: "typesRegistry"; } interface InstallPackageRequest extends TypingInstallerRequestWithProjectName { readonly kind: "installPackage"; readonly fileName: Path; readonly packageName: string; readonly projectRootPath: Path; } interface PackageInstalledResponse extends ProjectResponse { readonly kind: ActionPackageInstalled; readonly success: boolean; readonly message: string; } interface InitializationFailedResponse extends TypingInstallerResponse { readonly kind: EventInitializationFailed; readonly message: string; readonly stack?: string; } interface ProjectResponse extends TypingInstallerResponse { readonly projectName: string; } interface InvalidateCachedTypings extends ProjectResponse { readonly kind: ActionInvalidate; } interface InstallTypes extends ProjectResponse { readonly kind: EventBeginInstallTypes | EventEndInstallTypes; readonly eventId: number; readonly typingsInstallerVersion: string; readonly packagesToInstall: readonly string[]; } interface BeginInstallTypes extends InstallTypes { readonly kind: EventBeginInstallTypes; } interface EndInstallTypes extends InstallTypes { readonly kind: EventEndInstallTypes; readonly installSuccess: boolean; } interface SetTypings extends ProjectResponse { readonly typeAcquisition: TypeAcquisition; readonly compilerOptions: CompilerOptions; readonly typings: string[]; readonly unresolvedImports: SortedReadonlyArray<string>; readonly kind: ActionSet; } } declare namespace ts { interface Node { getSourceFile(): SourceFile; getChildCount(sourceFile?: SourceFile): number; getChildAt(index: number, sourceFile?: SourceFile): Node; getChildren(sourceFile?: SourceFile): Node[]; getStart(sourceFile?: SourceFile, includeJsDocComment?: boolean): number; getFullStart(): number; getEnd(): number; getWidth(sourceFile?: SourceFileLike): number; getFullWidth(): number; getLeadingTriviaWidth(sourceFile?: SourceFile): number; getFullText(sourceFile?: SourceFile): string; getText(sourceFile?: SourceFile): string; getFirstToken(sourceFile?: SourceFile): Node | undefined; getLastToken(sourceFile?: SourceFile): Node | undefined; forEachChild<T>(cbNode: (node: Node) => T | undefined, cbNodeArray?: (nodes: NodeArray<Node>) => T | undefined): T | undefined; } interface Identifier { readonly text: string; } interface PrivateIdentifier { readonly text: string; } interface Symbol { readonly name: string; getFlags(): SymbolFlags; getEscapedName(): __String; getName(): string; getDeclarations(): Declaration[] | undefined; getDocumentationComment(typeChecker: TypeChecker | undefined): SymbolDisplayPart[]; getJsDocTags(): JSDocTagInfo[]; } interface Type { getFlags(): TypeFlags; getSymbol(): Symbol | undefined; getProperties(): Symbol[]; getProperty(propertyName: string): Symbol | undefined; getApparentProperties(): Symbol[]; getCallSignatures(): readonly Signature[]; getConstructSignatures(): readonly Signature[]; getStringIndexType(): Type | undefined; getNumberIndexType(): Type | undefined; getBaseTypes(): BaseType[] | undefined; getNonNullableType(): Type; getConstraint(): Type | undefined; getDefault(): Type | undefined; isUnion(): this is UnionType; isIntersection(): this is IntersectionType; isUnionOrIntersection(): this is UnionOrIntersectionType; isLiteral(): this is LiteralType; isStringLiteral(): this is StringLiteralType; isNumberLiteral(): this is NumberLiteralType; isTypeParameter(): this is TypeParameter; isClassOrInterface(): this is InterfaceType; isClass(): this is InterfaceType; } interface TypeReference { typeArguments?: readonly Type[]; } interface Signature { getDeclaration(): SignatureDeclaration; getTypeParameters(): TypeParameter[] | undefined; getParameters(): Symbol[]; getReturnType(): Type; getDocumentationComment(typeChecker: TypeChecker | undefined): SymbolDisplayPart[]; getJsDocTags(): JSDocTagInfo[]; } interface SourceFile { getLineAndCharacterOfPosition(pos: number): LineAndCharacter; getLineEndOfPosition(pos: number): number; getLineStarts(): readonly number[]; getPositionOfLineAndCharacter(line: number, character: number): number; update(newText: string, textChangeRange: TextChangeRange): SourceFile; } interface SourceFileLike { getLineAndCharacterOfPosition(pos: number): LineAndCharacter; } interface SourceMapSource { getLineAndCharacterOfPosition(pos: number): LineAndCharacter; } /** * Represents an immutable snapshot of a script at a specified time.Once acquired, the * snapshot is observably immutable. i.e. the same calls with the same parameters will return * the same values. */ interface IScriptSnapshot { /** Gets a portion of the script snapshot specified by [start, end). */ getText(start: number, end: number): string; /** Gets the length of this script snapshot. */ getLength(): number; /** * Gets the TextChangeRange that describe how the text changed between this text and * an older version. This information is used by the incremental parser to determine * what sections of the script need to be re-parsed. 'undefined' can be returned if the * change range cannot be determined. However, in that case, incremental parsing will * not happen and the entire document will be re - parsed. */ getChangeRange(oldSnapshot: IScriptSnapshot): TextChangeRange | undefined; /** Releases all resources held by this script snapshot */ dispose?(): void; } namespace ScriptSnapshot { function fromString(text: string): IScriptSnapshot; } interface PreProcessedFileInfo { referencedFiles: FileReference[]; typeReferenceDirectives: FileReference[]; libReferenceDirectives: FileReference[]; importedFiles: FileReference[]; ambientExternalModules?: string[]; isLibFile: boolean; } interface HostCancellationToken { isCancellationRequested(): boolean; } interface InstallPackageOptions { fileName: Path; packageName: string; } interface PerformanceEvent { kind: "UpdateGraph" | "CreatePackageJsonAutoImportProvider"; durationMs: number; } enum LanguageServiceMode { Semantic = 0, PartialSemantic = 1, Syntactic = 2 } interface LanguageServiceHost extends GetEffectiveTypeRootsHost { getCompilationSettings(): CompilerOptions; getNewLine?(): string; getProjectVersion?(): string; getScriptFileNames(): string[]; getScriptKind?(fileName: string): ScriptKind; getScriptVersion(fileName: string): string; getScriptSnapshot(fileName: string): IScriptSnapshot | undefined; getProjectReferences?(): readonly ProjectReference[] | undefined; getLocalizedDiagnosticMessages?(): any; getCancellationToken?(): HostCancellationToken; getCurrentDirectory(): string; getDefaultLibFileName(options: CompilerOptions): string; log?(s: string): void; trace?(s: string): void; error?(s: string): void; useCaseSensitiveFileNames?(): boolean; readDirectory?(path: string, extensions?: readonly string[], exclude?: readonly string[], include?: readonly string[], depth?: number): string[]; readFile?(path: string, encoding?: string): string | undefined; realpath?(path: string): string; fileExists?(path: string): boolean; getTypeRootsVersion?(): number; resolveModuleNames?(moduleNames: string[], containingFile: string, reusedNames: string[] | undefined, redirectedReference: ResolvedProjectReference | undefined, options: CompilerOptions): (ResolvedModule | undefined)[]; getResolvedModuleWithFailedLookupLocationsFromCache?(modulename: string, containingFile: string): ResolvedModuleWithFailedLookupLocations | undefined; resolveTypeReferenceDirectives?(typeDirectiveNames: string[], containingFile: string, redirectedReference: ResolvedProjectReference | undefined, options: CompilerOptions): (ResolvedTypeReferenceDirective | undefined)[]; getDirectories?(directoryName: string): string[]; /** * Gets a set of custom transformers to use during emit. */ getCustomTransformers?(): CustomTransformers | undefined; isKnownTypesPackageName?(name: string): boolean; installPackage?(options: InstallPackageOptions): Promise<ApplyCodeActionCommandResult>; writeFile?(fileName: string, content: string): void; } type WithMetadata<T> = T & { metadata?: unknown; }; enum SemanticClassificationFormat { Original = "original", TwentyTwenty = "2020" } interface LanguageService { /** This is used as a part of restarting the language service. */ cleanupSemanticCache(): void; /** * Gets errors indicating invalid syntax in a file. * * In English, "this cdeo have, erorrs" is syntactically invalid because it has typos, * grammatical errors, and misplaced punctuation. Likewise, examples of syntax * errors in TypeScript are missing parentheses in an `if` statement, mismatched * curly braces, and using a reserved keyword as a variable name. * * These diagnostics are inexpensive to compute and don't require knowledge of * other files. Note that a non-empty result increases the likelihood of false positives * from `getSemanticDiagnostics`. * * While these represent the majority of syntax-related diagnostics, there are some * that require the type system, which will be present in `getSemanticDiagnostics`. * * @param fileName A path to the file you want syntactic diagnostics for */ getSyntacticDiagnostics(fileName: string): DiagnosticWithLocation[]; /** * Gets warnings or errors indicating type system issues in a given file. * Requesting semantic diagnostics may start up the type system and * run deferred work, so the first call may take longer than subsequent calls. * * Unlike the other get*Diagnostics functions, these diagnostics can potentially not * include a reference to a source file. Specifically, the first time this is called, * it will return global diagnostics with no associated location. * * To contrast the differences between semantic and syntactic diagnostics, consider the * sentence: "The sun is green." is syntactically correct; those are real English words with * correct sentence structure. However, it is semantically invalid, because it is not true. * * @param fileName A path to the file you want semantic diagnostics for */ getSemanticDiagnostics(fileName: string): Diagnostic[]; /** * Gets suggestion diagnostics for a specific file. These diagnostics tend to * proactively suggest refactors, as opposed to diagnostics that indicate * potentially incorrect runtime behavior. * * @param fileName A path to the file you want semantic diagnostics for */ getSuggestionDiagnostics(fileName: string): DiagnosticWithLocation[]; /** * Gets global diagnostics related to the program configuration and compiler options. */ getCompilerOptionsDiagnostics(): Diagnostic[]; /** @deprecated Use getEncodedSyntacticClassifications instead. */ getSyntacticClassifications(fileName: string, span: TextSpan): ClassifiedSpan[]; getSyntacticClassifications(fileName: string, span: TextSpan, format: SemanticClassificationFormat): ClassifiedSpan[] | ClassifiedSpan2020[]; /** @deprecated Use getEncodedSemanticClassifications instead. */ getSemanticClassifications(fileName: string, span: TextSpan): ClassifiedSpan[]; getSemanticClassifications(fileName: string, span: TextSpan, format: SemanticClassificationFormat): ClassifiedSpan[] | ClassifiedSpan2020[]; /** Encoded as triples of [start, length, ClassificationType]. */ getEncodedSyntacticClassifications(fileName: string, span: TextSpan): Classifications; /** * Gets semantic highlights information for a particular file. Has two formats, an older * version used by VS and a format used by VS Code. * * @param fileName The path to the file * @param position A text span to return results within * @param format Which format to use, defaults to "original" * @returns a number array encoded as triples of [start, length, ClassificationType, ...]. */ getEncodedSemanticClassifications(fileName: string, span: TextSpan, format?: SemanticClassificationFormat): Classifications; /** * Gets completion entries at a particular position in a file. * * @param fileName The path to the file * @param position A zero-based index of the character where you want the entries * @param options An object describing how the request was triggered and what kinds * of code actions can be returned with the completions. */ getCompletionsAtPosition(fileName: string, position: number, options: GetCompletionsAtPositionOptions | undefined): WithMetadata<CompletionInfo> | undefined; /** * Gets the extended details for a completion entry retrieved from `getCompletionsAtPosition`. * * @param fileName The path to the file * @param position A zero based index of the character where you want the entries * @param entryName The name from an existing completion which came from `getCompletionsAtPosition` * @param formatOptions How should code samples in the completions be formatted, can be undefined for backwards compatibility * @param source Source code for the current file, can be undefined for backwards compatibility * @param preferences User settings, can be undefined for backwards compatibility */ getCompletionEntryDetails(fileName: string, position: number, entryName: string, formatOptions: FormatCodeOptions | FormatCodeSettings | undefined, source: string | undefined, preferences: UserPreferences | undefined): CompletionEntryDetails | undefined; getCompletionEntrySymbol(fileName: string, position: number, name: string, source: string | undefined): Symbol | undefined; /** * Gets semantic information about the identifier at a particular position in a * file. Quick info is what you typically see when you hover in an editor. * * @param fileName The path to the file * @param position A zero-based index of the character where you want the quick info */ getQuickInfoAtPosition(fileName: string, position: number): QuickInfo | undefined; getNameOrDottedNameSpan(fileName: string, startPos: number, endPos: number): TextSpan | undefined; getBreakpointStatementAtPosition(fileName: string, position: number): TextSpan | undefined; getSignatureHelpItems(fileName: string, position: number, options: SignatureHelpItemsOptions | undefined): SignatureHelpItems | undefined; getRenameInfo(fileName: string, position: number, options?: RenameInfoOptions): RenameInfo; findRenameLocations(fileName: string, position: number, findInStrings: boolean, findInComments: boolean, providePrefixAndSuffixTextForRename?: boolean): readonly RenameLocation[] | undefined; getSmartSelectionRange(fileName: string, position: number): SelectionRange; getDefinitionAtPosition(fileName: string, position: number): readonly DefinitionInfo[] | undefined; getDefinitionAndBoundSpan(fileName: string, position: number): DefinitionInfoAndBoundSpan | undefined; getTypeDefinitionAtPosition(fileName: string, position: number): readonly DefinitionInfo[] | undefined; getImplementationAtPosition(fileName: string, position: number): readonly ImplementationLocation[] | undefined; getReferencesAtPosition(fileName: string, position: number): ReferenceEntry[] | undefined; findReferences(fileName: string, position: number): ReferencedSymbol[] | undefined; getDocumentHighlights(fileName: string, position: number, filesToSearch: string[]): DocumentHighlights[] | undefined; /** @deprecated */ getOccurrencesAtPosition(fileName: string, position: number): readonly ReferenceEntry[] | undefined; getNavigateToItems(searchValue: string, maxResultCount?: number, fileName?: string, excludeDtsFiles?: boolean): NavigateToItem[]; getNavigationBarItems(fileName: string): NavigationBarItem[]; getNavigationTree(fileName: string): NavigationTree; prepareCallHierarchy(fileName: string, position: number): CallHierarchyItem | CallHierarchyItem[] | undefined; provideCallHierarchyIncomingCalls(fileName: string, position: number): CallHierarchyIncomingCall[]; provideCallHierarchyOutgoingCalls(fileName: string, position: number): CallHierarchyOutgoingCall[]; getOutliningSpans(fileName: string): OutliningSpan[]; getTodoComments(fileName: string, descriptors: TodoCommentDescriptor[]): TodoComment[]; getBraceMatchingAtPosition(fileName: string, position: number): TextSpan[]; getIndentationAtPosition(fileName: string, position: number, options: EditorOptions | EditorSettings): number; getFormattingEditsForRange(fileName: string, start: number, end: number, options: FormatCodeOptions | FormatCodeSettings): TextChange[]; getFormattingEditsForDocument(fileName: string, options: FormatCodeOptions | FormatCodeSettings): TextChange[]; getFormattingEditsAfterKeystroke(fileName: string, position: number, key: string, options: FormatCodeOptions | FormatCodeSettings): TextChange[]; getDocCommentTemplateAtPosition(fileName: string, position: number): TextInsertion | undefined; isValidBraceCompletionAtPosition(fileName: string, position: number, openingBrace: number): boolean; /** * This will return a defined result if the position is after the `>` of the opening tag, or somewhere in the text, of a JSXElement with no closing tag. * Editors should call this after `>` is typed. */ getJsxClosingTagAtPosition(fileName: string, position: number): JsxClosingTagInfo | undefined; getSpanOfEnclosingComment(fileName: string, position: number, onlyMultiLine: boolean): TextSpan | undefined; toLineColumnOffset?(fileName: string, position: number): LineAndCharacter; getCodeFixesAtPosition(fileName: string, start: number, end: number, errorCodes: readonly number[], formatOptions: FormatCodeSettings, preferences: UserPreferences): readonly CodeFixAction[]; getCombinedCodeFix(scope: CombinedCodeFixScope, fixId: {}, formatOptions: FormatCodeSettings, preferences: UserPreferences): CombinedCodeActions; applyCodeActionCommand(action: CodeActionCommand, formatSettings?: FormatCodeSettings): Promise<ApplyCodeActionCommandResult>; applyCodeActionCommand(action: CodeActionCommand[], formatSettings?: FormatCodeSettings): Promise<ApplyCodeActionCommandResult[]>; applyCodeActionCommand(action: CodeActionCommand | CodeActionCommand[], formatSettings?: FormatCodeSettings): Promise<ApplyCodeActionCommandResult | ApplyCodeActionCommandResult[]>; /** @deprecated `fileName` will be ignored */ applyCodeActionCommand(fileName: string, action: CodeActionCommand): Promise<ApplyCodeActionCommandResult>; /** @deprecated `fileName` will be ignored */ applyCodeActionCommand(fileName: string, action: CodeActionCommand[]): Promise<ApplyCodeActionCommandResult[]>; /** @deprecated `fileName` will be ignored */ applyCodeActionCommand(fileName: string, action: CodeActionCommand | CodeActionCommand[]): Promise<ApplyCodeActionCommandResult | ApplyCodeActionCommandResult[]>; getApplicableRefactors(fileName: string, positionOrRange: number | TextRange, preferences: UserPreferences | undefined, triggerReason?: RefactorTriggerReason): ApplicableRefactorInfo[]; getEditsForRefactor(fileName: string, formatOptions: FormatCodeSettings, positionOrRange: number | TextRange, refactorName: string, actionName: string, preferences: UserPreferences | undefined): RefactorEditInfo | undefined; organizeImports(scope: OrganizeImportsScope, formatOptions: FormatCodeSettings, preferences: UserPreferences | undefined): readonly FileTextChanges[]; getEditsForFileRename(oldFilePath: string, newFilePath: string, formatOptions: FormatCodeSettings, preferences: UserPreferences | undefined): readonly FileTextChanges[]; getEmitOutput(fileName: string, emitOnlyDtsFiles?: boolean, forceDtsEmit?: boolean): EmitOutput; getProgram(): Program | undefined; toggleLineComment(fileName: string, textRange: TextRange): TextChange[]; toggleMultilineComment(fileName: string, textRange: TextRange): TextChange[]; commentSelection(fileName: string, textRange: TextRange): TextChange[]; uncommentSelection(fileName: string, textRange: TextRange): TextChange[]; dispose(): void; } interface JsxClosingTagInfo { readonly newText: string; } interface CombinedCodeFixScope { type: "file"; fileName: string; } type OrganizeImportsScope = CombinedCodeFixScope; type CompletionsTriggerCharacter = "." | '"' | "'" | "`" | "/" | "@" | "<" | "#"; interface GetCompletionsAtPositionOptions extends UserPreferences { /** * If the editor is asking for completions because a certain character was typed * (as opposed to when the user explicitly requested them) this should be set. */ triggerCharacter?: CompletionsTriggerCharacter; /** @deprecated Use includeCompletionsForModuleExports */ includeExternalModuleExports?: boolean; /** @deprecated Use includeCompletionsWithInsertText */ includeInsertTextCompletions?: boolean; } type SignatureHelpTriggerCharacter = "," | "(" | "<"; type SignatureHelpRetriggerCharacter = SignatureHelpTriggerCharacter | ")"; interface SignatureHelpItemsOptions { triggerReason?: SignatureHelpTriggerReason; } type SignatureHelpTriggerReason = SignatureHelpInvokedReason | SignatureHelpCharacterTypedReason | SignatureHelpRetriggeredReason; /** * Signals that the user manually requested signature help. * The language service will unconditionally attempt to provide a result. */ interface SignatureHelpInvokedReason { kind: "invoked"; triggerCharacter?: undefined; } /** * Signals that the signature help request came from a user typing a character. * Depending on the character and the syntactic context, the request may or may not be served a result. */ interface SignatureHelpCharacterTypedReason { kind: "characterTyped"; /** * Character that was responsible for triggering signature help. */ triggerCharacter: SignatureHelpTriggerCharacter; } /** * Signals that this signature help request came from typing a character or moving the cursor. * This should only occur if a signature help session was already active and the editor needs to see if it should adjust. * The language service will unconditionally attempt to provide a result. * `triggerCharacter` can be `undefined` for a retrigger caused by a cursor move. */ interface SignatureHelpRetriggeredReason { kind: "retrigger"; /** * Character that was responsible for triggering signature help. */ triggerCharacter?: SignatureHelpRetriggerCharacter; } interface ApplyCodeActionCommandResult { successMessage: string; } interface Classifications { spans: number[]; endOfLineState: EndOfLineState; } interface ClassifiedSpan { textSpan: TextSpan; classificationType: ClassificationTypeNames; } interface ClassifiedSpan2020 { textSpan: TextSpan; classificationType: number; } /** * Navigation bar interface designed for visual studio's dual-column layout. * This does not form a proper tree. * The navbar is returned as a list of top-level items, each of which has a list of child items. * Child items always have an empty array for their `childItems`. */ interface NavigationBarItem { text: string; kind: ScriptElementKind; kindModifiers: string; spans: TextSpan[]; childItems: NavigationBarItem[]; indent: number; bolded: boolean; grayed: boolean; } /** * Node in a tree of nested declarations in a file. * The top node is always a script or module node. */ interface NavigationTree { /** Name of the declaration, or a short description, e.g. "<class>". */ text: string; kind: ScriptElementKind; /** ScriptElementKindModifier separated by commas, e.g. "public,abstract" */ kindModifiers: string; /** * Spans of the nodes that generated this declaration. * There will be more than one if this is the result of merging. */ spans: TextSpan[]; nameSpan: TextSpan | undefined; /** Present if non-empty */ childItems?: NavigationTree[]; } interface CallHierarchyItem { name: string; kind: ScriptElementKind; kindModifiers?: string; file: string; span: TextSpan; selectionSpan: TextSpan; containerName?: string; } interface CallHierarchyIncomingCall { from: CallHierarchyItem; fromSpans: TextSpan[]; } interface CallHierarchyOutgoingCall { to: CallHierarchyItem; fromSpans: TextSpan[]; } interface TodoCommentDescriptor { text: string; priority: number; } interface TodoComment { descriptor: TodoCommentDescriptor; message: string; position: number; } interface TextChange { span: TextSpan; newText: string; } interface FileTextChanges { fileName: string; textChanges: readonly TextChange[]; isNewFile?: boolean; } interface CodeAction { /** Description of the code action to display in the UI of the editor */ description: string; /** Text changes to apply to each file as part of the code action */ changes: FileTextChanges[]; /** * If the user accepts the code fix, the editor should send the action back in a `applyAction` request. * This allows the language service to have side effects (e.g. installing dependencies) upon a code fix. */ commands?: CodeActionCommand[]; } interface CodeFixAction extends CodeAction { /** Short name to identify the fix, for use by telemetry. */ fixName: string; /** * If present, one may call 'getCombinedCodeFix' with this fixId. * This may be omitted to indicate that the code fix can't be applied in a group. */ fixId?: {}; fixAllDescription?: string; } interface CombinedCodeActions { changes: readonly FileTextChanges[]; commands?: readonly CodeActionCommand[]; } type CodeActionCommand = InstallPackageAction; interface InstallPackageAction { } /** * A set of one or more available refactoring actions, grouped under a parent refactoring. */ interface ApplicableRefactorInfo { /** * The programmatic name of the refactoring */ name: string; /** * A description of this refactoring category to show to the user. * If the refactoring gets inlined (see below), this text will not be visible. */ description: string; /** * Inlineable refactorings can have their actions hoisted out to the top level * of a context menu. Non-inlineanable refactorings should always be shown inside * their parent grouping. * * If not specified, this value is assumed to be 'true' */ inlineable?: boolean; actions: RefactorActionInfo[]; } /** * Represents a single refactoring action - for example, the "Extract Method..." refactor might * offer several actions, each corresponding to a surround class or closure to extract into. */ interface RefactorActionInfo { /** * The programmatic name of the refactoring action */ name: string; /** * A description of this refactoring action to show to the user. * If the parent refactoring is inlined away, this will be the only text shown, * so this description should make sense by itself if the parent is inlineable=true */ description: string; /** * A message to show to the user if the refactoring cannot be applied in * the current context. */ notApplicableReason?: string; } /** * A set of edits to make in response to a refactor action, plus an optional * location where renaming should be invoked from */ interface RefactorEditInfo { edits: FileTextChanges[]; renameFilename?: string; renameLocation?: number; commands?: CodeActionCommand[]; } type RefactorTriggerReason = "implicit" | "invoked"; interface TextInsertion { newText: string; /** The position in newText the caret should point to after the insertion. */ caretOffset: number; } interface DocumentSpan { textSpan: TextSpan; fileName: string; /** * If the span represents a location that was remapped (e.g. via a .d.ts.map file), * then the original filename and span will be specified here */ originalTextSpan?: TextSpan; originalFileName?: string; /** * If DocumentSpan.textSpan is the span for name of the declaration, * then this is the span for relevant declaration */ contextSpan?: TextSpan; originalContextSpan?: TextSpan; } interface RenameLocation extends DocumentSpan { readonly prefixText?: string; readonly suffixText?: string; } interface ReferenceEntry extends DocumentSpan { isWriteAccess: boolean; isDefinition: boolean; isInString?: true; } interface ImplementationLocation extends DocumentSpan { kind: ScriptElementKind; displayParts: SymbolDisplayPart[]; } enum HighlightSpanKind { none = "none", definition = "definition", reference = "reference", writtenReference = "writtenReference" } interface HighlightSpan { fileName?: string; isInString?: true; textSpan: TextSpan; contextSpan?: TextSpan; kind: HighlightSpanKind; } interface NavigateToItem { name: string; kind: ScriptElementKind; kindModifiers: string; matchKind: "exact" | "prefix" | "substring" | "camelCase"; isCaseSensitive: boolean; fileName: string; textSpan: TextSpan; containerName: string; containerKind: ScriptElementKind; } enum IndentStyle { None = 0, Block = 1, Smart = 2 } enum SemicolonPreference { Ignore = "ignore", Insert = "insert", Remove = "remove" } interface EditorOptions { BaseIndentSize?: number; IndentSize: number; TabSize: number; NewLineCharacter: string; ConvertTabsToSpaces: boolean; IndentStyle: IndentStyle; } interface EditorSettings { baseIndentSize?: number; indentSize?: number; tabSize?: number; newLineCharacter?: string; convertTabsToSpaces?: boolean; indentStyle?: IndentStyle; trimTrailingWhitespace?: boolean; } interface FormatCodeOptions extends EditorOptions { InsertSpaceAfterCommaDelimiter: boolean; InsertSpaceAfterSemicolonInForStatements: boolean; InsertSpaceBeforeAndAfterBinaryOperators: boolean; InsertSpaceAfterConstructor?: boolean; InsertSpaceAfterKeywordsInControlFlowStatements: boolean; InsertSpaceAfterFunctionKeywordForAnonymousFunctions: boolean; InsertSpaceAfterOpeningAndBeforeClosingNonemptyParenthesis: boolean; InsertSpaceAfterOpeningAndBeforeClosingNonemptyBrackets: boolean; InsertSpaceAfterOpeningAndBeforeClosingNonemptyBraces?: boolean; InsertSpaceAfterOpeningAndBeforeClosingTemplateStringBraces: boolean; InsertSpaceAfterOpeningAndBeforeClosingJsxExpressionBraces?: boolean; InsertSpaceAfterTypeAssertion?: boolean; InsertSpaceBeforeFunctionParenthesis?: boolean; PlaceOpenBraceOnNewLineForFunctions: boolean; PlaceOpenBraceOnNewLineForControlBlocks: boolean; insertSpaceBeforeTypeAnnotation?: boolean; } interface FormatCodeSettings extends EditorSettings { readonly insertSpaceAfterCommaDelimiter?: boolean; readonly insertSpaceAfterSemicolonInForStatements?: boolean; readonly insertSpaceBeforeAndAfterBinaryOperators?: boolean; readonly insertSpaceAfterConstructor?: boolean; readonly insertSpaceAfterKeywordsInControlFlowStatements?: boolean; readonly insertSpaceAfterFunctionKeywordForAnonymousFunctions?: boolean; readonly insertSpaceAfterOpeningAndBeforeClosingNonemptyParenthesis?: boolean; readonly insertSpaceAfterOpeningAndBeforeClosingNonemptyBrackets?: boolean; readonly insertSpaceAfterOpeningAndBeforeClosingNonemptyBraces?: boolean; readonly insertSpaceAfterOpeningAndBeforeClosingEmptyBraces?: boolean; readonly insertSpaceAfterOpeningAndBeforeClosingTemplateStringBraces?: boolean; readonly insertSpaceAfterOpeningAndBeforeClosingJsxExpressionBraces?: boolean; readonly insertSpaceAfterTypeAssertion?: boolean; readonly insertSpaceBeforeFunctionParenthesis?: boolean; readonly placeOpenBraceOnNewLineForFunctions?: boolean; readonly placeOpenBraceOnNewLineForControlBlocks?: boolean; readonly insertSpaceBeforeTypeAnnotation?: boolean; readonly indentMultiLineObjectLiteralBeginningOnBlankLine?: boolean; readonly semicolons?: SemicolonPreference; } function getDefaultFormatCodeSettings(newLineCharacter?: string): FormatCodeSettings; interface DefinitionInfo extends DocumentSpan { kind: ScriptElementKind; name: string; containerKind: ScriptElementKind; containerName: string; } interface DefinitionInfoAndBoundSpan { definitions?: readonly DefinitionInfo[]; textSpan: TextSpan; } interface ReferencedSymbolDefinitionInfo extends DefinitionInfo { displayParts: SymbolDisplayPart[]; } interface ReferencedSymbol { definition: ReferencedSymbolDefinitionInfo; references: ReferenceEntry[]; } enum SymbolDisplayPartKind { aliasName = 0, className = 1, enumName = 2, fieldName = 3, interfaceName = 4, keyword = 5, lineBreak = 6, numericLiteral = 7, stringLiteral = 8, localName = 9, methodName = 10, moduleName = 11, operator = 12, parameterName = 13, propertyName = 14, punctuation = 15, space = 16, text = 17, typeParameterName = 18, enumMemberName = 19, functionName = 20, regularExpressionLiteral = 21 } interface SymbolDisplayPart { text: string; kind: string; } interface JSDocTagInfo { name: string; text?: string; } interface QuickInfo { kind: ScriptElementKind; kindModifiers: string; textSpan: TextSpan; displayParts?: SymbolDisplayPart[]; documentation?: SymbolDisplayPart[]; tags?: JSDocTagInfo[]; } type RenameInfo = RenameInfoSuccess | RenameInfoFailure; interface RenameInfoSuccess { canRename: true; /** * File or directory to rename. * If set, `getEditsForFileRename` should be called instead of `findRenameLocations`. */ fileToRename?: string; displayName: string; fullDisplayName: string; kind: ScriptElementKind; kindModifiers: string; triggerSpan: TextSpan; } interface RenameInfoFailure { canRename: false; localizedErrorMessage: string; } interface RenameInfoOptions { readonly allowRenameOfImportPath?: boolean; } interface SignatureHelpParameter { name: string; documentation: SymbolDisplayPart[]; displayParts: SymbolDisplayPart[]; isOptional: boolean; } interface SelectionRange { textSpan: TextSpan; parent?: SelectionRange; } /** * Represents a single signature to show in signature help. * The id is used for subsequent calls into the language service to ask questions about the * signature help item in the context of any documents that have been updated. i.e. after * an edit has happened, while signature help is still active, the host can ask important * questions like 'what parameter is the user currently contained within?'. */ interface SignatureHelpItem { isVariadic: boolean; prefixDisplayParts: SymbolDisplayPart[]; suffixDisplayParts: SymbolDisplayPart[]; separatorDisplayParts: SymbolDisplayPart[]; parameters: SignatureHelpParameter[]; documentation: SymbolDisplayPart[]; tags: JSDocTagInfo[]; } /** * Represents a set of signature help items, and the preferred item that should be selected. */ interface SignatureHelpItems { items: SignatureHelpItem[]; applicableSpan: TextSpan; selectedItemIndex: number; argumentIndex: number; argumentCount: number; } interface CompletionInfo { /** Not true for all global completions. This will be true if the enclosing scope matches a few syntax kinds. See `isSnippetScope`. */ isGlobalCompletion: boolean; isMemberCompletion: boolean; /** * In the absence of `CompletionEntry["replacementSpan"], the editor may choose whether to use * this span or its default one. If `CompletionEntry["replacementSpan"]` is defined, that span * must be used to commit that completion entry. */ optionalReplacementSpan?: TextSpan; /** * true when the current location also allows for a new identifier */ isNewIdentifierLocation: boolean; entries: CompletionEntry[]; } interface CompletionEntry { name: string; kind: ScriptElementKind; kindModifiers?: string; sortText: string; insertText?: string; /** * An optional span that indicates the text to be replaced by this completion item. * If present, this span should be used instead of the default one. * It will be set if the required span differs from the one generated by the default replacement behavior. */ replacementSpan?: TextSpan; hasAction?: true; source?: string; isRecommended?: true; isFromUncheckedFile?: true; isPackageJsonImport?: true; } interface CompletionEntryDetails { name: string; kind: ScriptElementKind; kindModifiers: string; displayParts: SymbolDisplayPart[]; documentation?: SymbolDisplayPart[]; tags?: JSDocTagInfo[]; codeActions?: CodeAction[]; source?: SymbolDisplayPart[]; } interface OutliningSpan { /** The span of the document to actually collapse. */ textSpan: TextSpan; /** The span of the document to display when the user hovers over the collapsed span. */ hintSpan: TextSpan; /** The text to display in the editor for the collapsed region. */ bannerText: string; /** * Whether or not this region should be automatically collapsed when * the 'Collapse to Definitions' command is invoked. */ autoCollapse: boolean; /** * Classification of the contents of the span */ kind: OutliningSpanKind; } enum OutliningSpanKind { /** Single or multi-line comments */ Comment = "comment", /** Sections marked by '// #region' and '// #endregion' comments */ Region = "region", /** Declarations and expressions */ Code = "code", /** Contiguous blocks of import declarations */ Imports = "imports" } enum OutputFileType { JavaScript = 0, SourceMap = 1, Declaration = 2 } enum EndOfLineState { None = 0, InMultiLineCommentTrivia = 1, InSingleQuoteStringLiteral = 2, InDoubleQuoteStringLiteral = 3, InTemplateHeadOrNoSubstitutionTemplate = 4, InTemplateMiddleOrTail = 5, InTemplateSubstitutionPosition = 6 } enum TokenClass { Punctuation = 0, Keyword = 1, Operator = 2, Comment = 3, Whitespace = 4, Identifier = 5, NumberLiteral = 6, BigIntLiteral = 7, StringLiteral = 8, RegExpLiteral = 9 } interface ClassificationResult { finalLexState: EndOfLineState; entries: ClassificationInfo[]; } interface ClassificationInfo { length: number; classification: TokenClass; } interface Classifier { /** * Gives lexical classifications of tokens on a line without any syntactic context. * For instance, a token consisting of the text 'string' can be either an identifier * named 'string' or the keyword 'string', however, because this classifier is not aware, * it relies on certain heuristics to give acceptable results. For classifications where * speed trumps accuracy, this function is preferable; however, for true accuracy, the * syntactic classifier is ideal. In fact, in certain editing scenarios, combining the * lexical, syntactic, and semantic classifiers may issue the best user experience. * * @param text The text of a line to classify. * @param lexState The state of the lexical classifier at the end of the previous line. * @param syntacticClassifierAbsent Whether the client is *not* using a syntactic classifier. * If there is no syntactic classifier (syntacticClassifierAbsent=true), * certain heuristics may be used in its place; however, if there is a * syntactic classifier (syntacticClassifierAbsent=false), certain * classifications which may be incorrectly categorized will be given * back as Identifiers in order to allow the syntactic classifier to * subsume the classification. * @deprecated Use getLexicalClassifications instead. */ getClassificationsForLine(text: string, lexState: EndOfLineState, syntacticClassifierAbsent: boolean): ClassificationResult; getEncodedLexicalClassifications(text: string, endOfLineState: EndOfLineState, syntacticClassifierAbsent: boolean): Classifications; } enum ScriptElementKind { unknown = "", warning = "warning", /** predefined type (void) or keyword (class) */ keyword = "keyword", /** top level script node */ scriptElement = "script", /** module foo {} */ moduleElement = "module", /** class X {} */ classElement = "class", /** var x = class X {} */ localClassElement = "local class", /** interface Y {} */ interfaceElement = "interface", /** type T = ... */ typeElement = "type", /** enum E */ enumElement = "enum", enumMemberElement = "enum member", /** * Inside module and script only * const v = .. */ variableElement = "var", /** Inside function */ localVariableElement = "local var", /** * Inside module and script only * function f() { } */ functionElement = "function", /** Inside function */ localFunctionElement = "local function", /** class X { [public|private]* foo() {} } */ memberFunctionElement = "method", /** class X { [public|private]* [get|set] foo:number; } */ memberGetAccessorElement = "getter", memberSetAccessorElement = "setter", /** * class X { [public|private]* foo:number; } * interface Y { foo:number; } */ memberVariableElement = "property", /** class X { constructor() { } } */ constructorImplementationElement = "constructor", /** interface Y { ():number; } */ callSignatureElement = "call", /** interface Y { []:number; } */ indexSignatureElement = "index", /** interface Y { new():Y; } */ constructSignatureElement = "construct", /** function foo(*Y*: string) */ parameterElement = "parameter", typeParameterElement = "type parameter", primitiveType = "primitive type", label = "label", alias = "alias", constElement = "const", letElement = "let", directory = "directory", externalModuleName = "external module name", /** * <JsxTagName attribute1 attribute2={0} /> */ jsxAttribute = "JSX attribute", /** String literal */ string = "string" } enum ScriptElementKindModifier { none = "", publicMemberModifier = "public", privateMemberModifier = "private", protectedMemberModifier = "protected", exportedModifier = "export", ambientModifier = "declare", staticModifier = "static", abstractModifier = "abstract", optionalModifier = "optional", deprecatedModifier = "deprecated", dtsModifier = ".d.ts", tsModifier = ".ts", tsxModifier = ".tsx", jsModifier = ".js", jsxModifier = ".jsx", jsonModifier = ".json" } enum ClassificationTypeNames { comment = "comment", identifier = "identifier", keyword = "keyword", numericLiteral = "number", bigintLiteral = "bigint", operator = "operator", stringLiteral = "string", whiteSpace = "whitespace", text = "text", punctuation = "punctuation", className = "class name", enumName = "enum name", interfaceName = "interface name", moduleName = "module name", typeParameterName = "type parameter name", typeAliasName = "type alias name", parameterName = "parameter name", docCommentTagName = "doc comment tag name", jsxOpenTagName = "jsx open tag name", jsxCloseTagName = "jsx close tag name", jsxSelfClosingTagName = "jsx self closing tag name", jsxAttribute = "jsx attribute", jsxText = "jsx text", jsxAttributeStringLiteralValue = "jsx attribute string literal value" } enum ClassificationType { comment = 1, identifier = 2, keyword = 3, numericLiteral = 4, operator = 5, stringLiteral = 6, regularExpressionLiteral = 7, whiteSpace = 8, text = 9, punctuation = 10, className = 11, enumName = 12, interfaceName = 13, moduleName = 14, typeParameterName = 15, typeAliasName = 16, parameterName = 17, docCommentTagName = 18, jsxOpenTagName = 19, jsxCloseTagName = 20, jsxSelfClosingTagName = 21, jsxAttribute = 22, jsxText = 23, jsxAttributeStringLiteralValue = 24, bigintLiteral = 25 } } declare namespace ts { /** The classifier is used for syntactic highlighting in editors via the TSServer */ function createClassifier(): Classifier; } declare namespace ts { interface DocumentHighlights { fileName: string; highlightSpans: HighlightSpan[]; } } declare namespace ts { /** * The document registry represents a store of SourceFile objects that can be shared between * multiple LanguageService instances. A LanguageService instance holds on the SourceFile (AST) * of files in the context. * SourceFile objects account for most of the memory usage by the language service. Sharing * the same DocumentRegistry instance between different instances of LanguageService allow * for more efficient memory utilization since all projects will share at least the library * file (lib.d.ts). * * A more advanced use of the document registry is to serialize sourceFile objects to disk * and re-hydrate them when needed. * * To create a default DocumentRegistry, use createDocumentRegistry to create one, and pass it * to all subsequent createLanguageService calls. */ interface DocumentRegistry { /** * Request a stored SourceFile with a given fileName and compilationSettings. * The first call to acquire will call createLanguageServiceSourceFile to generate * the SourceFile if was not found in the registry. * * @param fileName The name of the file requested * @param compilationSettings Some compilation settings like target affects the * shape of a the resulting SourceFile. This allows the DocumentRegistry to store * multiple copies of the same file for different compilation settings. * @param scriptSnapshot Text of the file. Only used if the file was not found * in the registry and a new one was created. * @param version Current version of the file. Only used if the file was not found * in the registry and a new one was created. */ acquireDocument(fileName: string, compilationSettings: CompilerOptions, scriptSnapshot: IScriptSnapshot, version: string, scriptKind?: ScriptKind): SourceFile; acquireDocumentWithKey(fileName: string, path: Path, compilationSettings: CompilerOptions, key: DocumentRegistryBucketKey, scriptSnapshot: IScriptSnapshot, version: string, scriptKind?: ScriptKind): SourceFile; /** * Request an updated version of an already existing SourceFile with a given fileName * and compilationSettings. The update will in-turn call updateLanguageServiceSourceFile * to get an updated SourceFile. * * @param fileName The name of the file requested * @param compilationSettings Some compilation settings like target affects the * shape of a the resulting SourceFile. This allows the DocumentRegistry to store * multiple copies of the same file for different compilation settings. * @param scriptSnapshot Text of the file. * @param version Current version of the file. */ updateDocument(fileName: string, compilationSettings: CompilerOptions, scriptSnapshot: IScriptSnapshot, version: string, scriptKind?: ScriptKind): SourceFile; updateDocumentWithKey(fileName: string, path: Path, compilationSettings: CompilerOptions, key: DocumentRegistryBucketKey, scriptSnapshot: IScriptSnapshot, version: string, scriptKind?: ScriptKind): SourceFile; getKeyForCompilationSettings(settings: CompilerOptions): DocumentRegistryBucketKey; /** * Informs the DocumentRegistry that a file is not needed any longer. * * Note: It is not allowed to call release on a SourceFile that was not acquired from * this registry originally. * * @param fileName The name of the file to be released * @param compilationSettings The compilation settings used to acquire the file */ releaseDocument(fileName: string, compilationSettings: CompilerOptions): void; releaseDocumentWithKey(path: Path, key: DocumentRegistryBucketKey): void; reportStats(): string; } type DocumentRegistryBucketKey = string & { __bucketKey: any; }; function createDocumentRegistry(useCaseSensitiveFileNames?: boolean, currentDirectory?: string): DocumentRegistry; } declare namespace ts { function preProcessFile(sourceText: string, readImportFiles?: boolean, detectJavaScriptImports?: boolean): PreProcessedFileInfo; } declare namespace ts { interface TranspileOptions { compilerOptions?: CompilerOptions; fileName?: string; reportDiagnostics?: boolean; moduleName?: string; renamedDependencies?: MapLike<string>; transformers?: CustomTransformers; } interface TranspileOutput { outputText: string; diagnostics?: Diagnostic[]; sourceMapText?: string; } function transpileModule(input: string, transpileOptions: TranspileOptions): TranspileOutput; function transpile(input: string, compilerOptions?: CompilerOptions, fileName?: string, diagnostics?: Diagnostic[], moduleName?: string): string; } declare namespace ts { /** The version of the language service API */ const servicesVersion = "0.8"; function toEditorSettings(options: EditorOptions | EditorSettings): EditorSettings; function displayPartsToString(displayParts: SymbolDisplayPart[] | undefined): string; function getDefaultCompilerOptions(): CompilerOptions; function getSupportedCodeFixes(): string[]; function createLanguageServiceSourceFile(fileName: string, scriptSnapshot: IScriptSnapshot, scriptTarget: ScriptTarget, version: string, setNodeParents: boolean, scriptKind?: ScriptKind): SourceFile; function updateLanguageServiceSourceFile(sourceFile: SourceFile, scriptSnapshot: IScriptSnapshot, version: string, textChangeRange: TextChangeRange | undefined, aggressiveChecks?: boolean): SourceFile; function createLanguageService(host: LanguageServiceHost, documentRegistry?: DocumentRegistry, syntaxOnlyOrLanguageServiceMode?: boolean | LanguageServiceMode): LanguageService; /** * Get the path of the default library files (lib.d.ts) as distributed with the typescript * node package. * The functionality is not supported if the ts module is consumed outside of a node module. */ function getDefaultLibFilePath(options: CompilerOptions): string; } declare namespace ts { /** * Transform one or more nodes using the supplied transformers. * @param source A single `Node` or an array of `Node` objects. * @param transformers An array of `TransformerFactory` callbacks used to process the transformation. * @param compilerOptions Optional compiler options. */ function transform<T extends Node>(source: T | T[], transformers: TransformerFactory<T>[], compilerOptions?: CompilerOptions): TransformationResult<T>; } declare namespace ts.server { interface CompressedData { length: number; compressionKind: string; data: any; } type RequireResult = { module: {}; error: undefined; } | { module: undefined; error: { stack?: string; message?: string; }; }; interface ServerHost extends System { watchFile(path: string, callback: FileWatcherCallback, pollingInterval?: number, options?: WatchOptions): FileWatcher; watchDirectory(path: string, callback: DirectoryWatcherCallback, recursive?: boolean, options?: WatchOptions): FileWatcher; setTimeout(callback: (...args: any[]) => void, ms: number, ...args: any[]): any; clearTimeout(timeoutId: any): void; setImmediate(callback: (...args: any[]) => void, ...args: any[]): any; clearImmediate(timeoutId: any): void; gc?(): void; trace?(s: string): void; require?(initialPath: string, moduleName: string): RequireResult; } } declare namespace ts.server { enum LogLevel { terse = 0, normal = 1, requestTime = 2, verbose = 3 } const emptyArray: SortedReadonlyArray<never>; interface Logger { close(): void; hasLevel(level: LogLevel): boolean; loggingEnabled(): boolean; perftrc(s: string): void; info(s: string): void; startGroup(): void; endGroup(): void; msg(s: string, type?: Msg): void; getLogFileName(): string | undefined; } enum Msg { Err = "Err", Info = "Info", Perf = "Perf" } namespace Msg { /** @deprecated Only here for backwards-compatibility. Prefer just `Msg`. */ type Types = Msg; } function createInstallTypingsRequest(project: Project, typeAcquisition: TypeAcquisition, unresolvedImports: SortedReadonlyArray<string>, cachePath?: string): DiscoverTypings; namespace Errors { function ThrowNoProject(): never; function ThrowProjectLanguageServiceDisabled(): never; function ThrowProjectDoesNotContainDocument(fileName: string, project: Project): never; } type NormalizedPath = string & { __normalizedPathTag: any; }; function toNormalizedPath(fileName: string): NormalizedPath; function normalizedPathToPath(normalizedPath: NormalizedPath, currentDirectory: string, getCanonicalFileName: (f: string) => string): Path; function asNormalizedPath(fileName: string): NormalizedPath; interface NormalizedPathMap<T> { get(path: NormalizedPath): T | undefined; set(path: NormalizedPath, value: T): void; contains(path: NormalizedPath): boolean; remove(path: NormalizedPath): void; } function createNormalizedPathMap<T>(): NormalizedPathMap<T>; function isInferredProjectName(name: string): boolean; function makeInferredProjectName(counter: number): string; function createSortedArray<T>(): SortedArray<T>; } /** * Declaration module describing the TypeScript Server protocol */ declare namespace ts.server.protocol { enum CommandTypes { JsxClosingTag = "jsxClosingTag", Brace = "brace", BraceCompletion = "braceCompletion", GetSpanOfEnclosingComment = "getSpanOfEnclosingComment", Change = "change", Close = "close", /** @deprecated Prefer CompletionInfo -- see comment on CompletionsResponse */ Completions = "completions", CompletionInfo = "completionInfo", CompletionDetails = "completionEntryDetails", CompileOnSaveAffectedFileList = "compileOnSaveAffectedFileList", CompileOnSaveEmitFile = "compileOnSaveEmitFile", Configure = "configure", Definition = "definition", DefinitionAndBoundSpan = "definitionAndBoundSpan", Implementation = "implementation", Exit = "exit", Format = "format", Formatonkey = "formatonkey", Geterr = "geterr", GeterrForProject = "geterrForProject", SemanticDiagnosticsSync = "semanticDiagnosticsSync", SyntacticDiagnosticsSync = "syntacticDiagnosticsSync", SuggestionDiagnosticsSync = "suggestionDiagnosticsSync", NavBar = "navbar", Navto = "navto", NavTree = "navtree", NavTreeFull = "navtree-full", /** @deprecated */ Occurrences = "occurrences", DocumentHighlights = "documentHighlights", Open = "open", Quickinfo = "quickinfo", References = "references", Reload = "reload", Rename = "rename", Saveto = "saveto", SignatureHelp = "signatureHelp", Status = "status", TypeDefinition = "typeDefinition", ProjectInfo = "projectInfo", ReloadProjects = "reloadProjects", Unknown = "unknown", OpenExternalProject = "openExternalProject", OpenExternalProjects = "openExternalProjects", CloseExternalProject = "closeExternalProject", UpdateOpen = "updateOpen", GetOutliningSpans = "getOutliningSpans", TodoComments = "todoComments", Indentation = "indentation", DocCommentTemplate = "docCommentTemplate", CompilerOptionsForInferredProjects = "compilerOptionsForInferredProjects", GetCodeFixes = "getCodeFixes", GetCombinedCodeFix = "getCombinedCodeFix", ApplyCodeActionCommand = "applyCodeActionCommand", GetSupportedCodeFixes = "getSupportedCodeFixes", GetApplicableRefactors = "getApplicableRefactors", GetEditsForRefactor = "getEditsForRefactor", OrganizeImports = "organizeImports", GetEditsForFileRename = "getEditsForFileRename", ConfigurePlugin = "configurePlugin", SelectionRange = "selectionRange", ToggleLineComment = "toggleLineComment", ToggleMultilineComment = "toggleMultilineComment", CommentSelection = "commentSelection", UncommentSelection = "uncommentSelection", PrepareCallHierarchy = "prepareCallHierarchy", ProvideCallHierarchyIncomingCalls = "provideCallHierarchyIncomingCalls", ProvideCallHierarchyOutgoingCalls = "provideCallHierarchyOutgoingCalls" } /** * A TypeScript Server message */ interface Message { /** * Sequence number of the message */ seq: number; /** * One of "request", "response", or "event" */ type: "request" | "response" | "event"; } /** * Client-initiated request message */ interface Request extends Message { type: "request"; /** * The command to execute */ command: string; /** * Object containing arguments for the command */ arguments?: any; } /** * Request to reload the project structure for all the opened files */ interface ReloadProjectsRequest extends Message { command: CommandTypes.ReloadProjects; } /** * Server-initiated event message */ interface Event extends Message { type: "event"; /** * Name of event */ event: string; /** * Event-specific information */ body?: any; } /** * Response by server to client request message. */ interface Response extends Message { type: "response"; /** * Sequence number of the request message. */ request_seq: number; /** * Outcome of the request. */ success: boolean; /** * The command requested. */ command: string; /** * If success === false, this should always be provided. * Otherwise, may (or may not) contain a success message. */ message?: string; /** * Contains message body if success === true. */ body?: any; /** * Contains extra information that plugin can include to be passed on */ metadata?: unknown; /** * Exposes information about the performance of this request-response pair. */ performanceData?: PerformanceData; } interface PerformanceData { /** * Time spent updating the program graph, in milliseconds. */ updateGraphDurationMs?: number; /** * The time spent creating or updating the auto-import program, in milliseconds. */ createAutoImportProviderProgramDurationMs?: number; } /** * Arguments for FileRequest messages. */ interface FileRequestArgs { /** * The file for the request (absolute pathname required). */ file: string; projectFileName?: string; } interface StatusRequest extends Request { command: CommandTypes.Status; } interface StatusResponseBody { /** * The TypeScript version (`ts.version`). */ version: string; } /** * Response to StatusRequest */ interface StatusResponse extends Response { body: StatusResponseBody; } /** * Requests a JS Doc comment template for a given position */ interface DocCommentTemplateRequest extends FileLocationRequest { command: CommandTypes.DocCommentTemplate; } /** * Response to DocCommentTemplateRequest */ interface DocCommandTemplateResponse extends Response { body?: TextInsertion; } /** * A request to get TODO comments from the file */ interface TodoCommentRequest extends FileRequest { command: CommandTypes.TodoComments; arguments: TodoCommentRequestArgs; } /** * Arguments for TodoCommentRequest request. */ interface TodoCommentRequestArgs extends FileRequestArgs { /** * Array of target TodoCommentDescriptors that describes TODO comments to be found */ descriptors: TodoCommentDescriptor[]; } /** * Response for TodoCommentRequest request. */ interface TodoCommentsResponse extends Response { body?: TodoComment[]; } /** * A request to determine if the caret is inside a comment. */ interface SpanOfEnclosingCommentRequest extends FileLocationRequest { command: CommandTypes.GetSpanOfEnclosingComment; arguments: SpanOfEnclosingCommentRequestArgs; } interface SpanOfEnclosingCommentRequestArgs extends FileLocationRequestArgs { /** * Requires that the enclosing span be a multi-line comment, or else the request returns undefined. */ onlyMultiLine: boolean; } /** * Request to obtain outlining spans in file. */ interface OutliningSpansRequest extends FileRequest { command: CommandTypes.GetOutliningSpans; } interface OutliningSpan { /** The span of the document to actually collapse. */ textSpan: TextSpan; /** The span of the document to display when the user hovers over the collapsed span. */ hintSpan: TextSpan; /** The text to display in the editor for the collapsed region. */ bannerText: string; /** * Whether or not this region should be automatically collapsed when * the 'Collapse to Definitions' command is invoked. */ autoCollapse: boolean; /** * Classification of the contents of the span */ kind: OutliningSpanKind; } /** * Response to OutliningSpansRequest request. */ interface OutliningSpansResponse extends Response { body?: OutliningSpan[]; } /** * A request to get indentation for a location in file */ interface IndentationRequest extends FileLocationRequest { command: CommandTypes.Indentation; arguments: IndentationRequestArgs; } /** * Response for IndentationRequest request. */ interface IndentationResponse extends Response { body?: IndentationResult; } /** * Indentation result representing where indentation should be placed */ interface IndentationResult { /** * The base position in the document that the indent should be relative to */ position: number; /** * The number of columns the indent should be at relative to the position's column. */ indentation: number; } /** * Arguments for IndentationRequest request. */ interface IndentationRequestArgs extends FileLocationRequestArgs { /** * An optional set of settings to be used when computing indentation. * If argument is omitted - then it will use settings for file that were previously set via 'configure' request or global settings. */ options?: EditorSettings; } /** * Arguments for ProjectInfoRequest request. */ interface ProjectInfoRequestArgs extends FileRequestArgs { /** * Indicate if the file name list of the project is needed */ needFileNameList: boolean; } /** * A request to get the project information of the current file. */ interface ProjectInfoRequest extends Request { command: CommandTypes.ProjectInfo; arguments: ProjectInfoRequestArgs; } /** * A request to retrieve compiler options diagnostics for a project */ interface CompilerOptionsDiagnosticsRequest extends Request { arguments: CompilerOptionsDiagnosticsRequestArgs; } /** * Arguments for CompilerOptionsDiagnosticsRequest request. */ interface CompilerOptionsDiagnosticsRequestArgs { /** * Name of the project to retrieve compiler options diagnostics. */ projectFileName: string; } /** * Response message body for "projectInfo" request */ interface ProjectInfo { /** * For configured project, this is the normalized path of the 'tsconfig.json' file * For inferred project, this is undefined */ configFileName: string; /** * The list of normalized file name in the project, including 'lib.d.ts' */ fileNames?: string[]; /** * Indicates if the project has a active language service instance */ languageServiceDisabled?: boolean; } /** * Represents diagnostic info that includes location of diagnostic in two forms * - start position and length of the error span * - startLocation and endLocation - a pair of Location objects that store start/end line and offset of the error span. */ interface DiagnosticWithLinePosition { message: string; start: number; length: number; startLocation: Location; endLocation: Location; category: string; code: number; /** May store more in future. For now, this will simply be `true` to indicate when a diagnostic is an unused-identifier diagnostic. */ reportsUnnecessary?: {}; reportsDeprecated?: {}; relatedInformation?: DiagnosticRelatedInformation[]; } /** * Response message for "projectInfo" request */ interface ProjectInfoResponse extends Response { body?: ProjectInfo; } /** * Request whose sole parameter is a file name. */ interface FileRequest extends Request { arguments: FileRequestArgs; } /** * Instances of this interface specify a location in a source file: * (file, line, character offset), where line and character offset are 1-based. */ interface FileLocationRequestArgs extends FileRequestArgs { /** * The line number for the request (1-based). */ line: number; /** * The character offset (on the line) for the request (1-based). */ offset: number; } type FileLocationOrRangeRequestArgs = FileLocationRequestArgs | FileRangeRequestArgs; /** * Request refactorings at a given position or selection area. */ interface GetApplicableRefactorsRequest extends Request { command: CommandTypes.GetApplicableRefactors; arguments: GetApplicableRefactorsRequestArgs; } type GetApplicableRefactorsRequestArgs = FileLocationOrRangeRequestArgs & { triggerReason?: RefactorTriggerReason; }; type RefactorTriggerReason = "implicit" | "invoked"; /** * Response is a list of available refactorings. * Each refactoring exposes one or more "Actions"; a user selects one action to invoke a refactoring */ interface GetApplicableRefactorsResponse extends Response { body?: ApplicableRefactorInfo[]; } /** * A set of one or more available refactoring actions, grouped under a parent refactoring. */ interface ApplicableRefactorInfo { /** * The programmatic name of the refactoring */ name: string; /** * A description of this refactoring category to show to the user. * If the refactoring gets inlined (see below), this text will not be visible. */ description: string; /** * Inlineable refactorings can have their actions hoisted out to the top level * of a context menu. Non-inlineanable refactorings should always be shown inside * their parent grouping. * * If not specified, this value is assumed to be 'true' */ inlineable?: boolean; actions: RefactorActionInfo[]; } /** * Represents a single refactoring action - for example, the "Extract Method..." refactor might * offer several actions, each corresponding to a surround class or closure to extract into. */ interface RefactorActionInfo { /** * The programmatic name of the refactoring action */ name: string; /** * A description of this refactoring action to show to the user. * If the parent refactoring is inlined away, this will be the only text shown, * so this description should make sense by itself if the parent is inlineable=true */ description: string; /** * A message to show to the user if the refactoring cannot be applied in * the current context. */ notApplicableReason?: string; } interface GetEditsForRefactorRequest extends Request { command: CommandTypes.GetEditsForRefactor; arguments: GetEditsForRefactorRequestArgs; } /** * Request the edits that a particular refactoring action produces. * Callers must specify the name of the refactor and the name of the action. */ type GetEditsForRefactorRequestArgs = FileLocationOrRangeRequestArgs & { refactor: string; action: string; }; interface GetEditsForRefactorResponse extends Response { body?: RefactorEditInfo; } interface RefactorEditInfo { edits: FileCodeEdits[]; /** * An optional location where the editor should start a rename operation once * the refactoring edits have been applied */ renameLocation?: Location; renameFilename?: string; } /** * Organize imports by: * 1) Removing unused imports * 2) Coalescing imports from the same module * 3) Sorting imports */ interface OrganizeImportsRequest extends Request { command: CommandTypes.OrganizeImports; arguments: OrganizeImportsRequestArgs; } type OrganizeImportsScope = GetCombinedCodeFixScope; interface OrganizeImportsRequestArgs { scope: OrganizeImportsScope; } interface OrganizeImportsResponse extends Response { body: readonly FileCodeEdits[]; } interface GetEditsForFileRenameRequest extends Request { command: CommandTypes.GetEditsForFileRename; arguments: GetEditsForFileRenameRequestArgs; } /** Note: Paths may also be directories. */ interface GetEditsForFileRenameRequestArgs { readonly oldFilePath: string; readonly newFilePath: string; } interface GetEditsForFileRenameResponse extends Response { body: readonly FileCodeEdits[]; } /** * Request for the available codefixes at a specific position. */ interface CodeFixRequest extends Request { command: CommandTypes.GetCodeFixes; arguments: CodeFixRequestArgs; } interface GetCombinedCodeFixRequest extends Request { command: CommandTypes.GetCombinedCodeFix; arguments: GetCombinedCodeFixRequestArgs; } interface GetCombinedCodeFixResponse extends Response { body: CombinedCodeActions; } interface ApplyCodeActionCommandRequest extends Request { command: CommandTypes.ApplyCodeActionCommand; arguments: ApplyCodeActionCommandRequestArgs; } interface ApplyCodeActionCommandResponse extends Response { } interface FileRangeRequestArgs extends FileRequestArgs { /** * The line number for the request (1-based). */ startLine: number; /** * The character offset (on the line) for the request (1-based). */ startOffset: number; /** * The line number for the request (1-based). */ endLine: number; /** * The character offset (on the line) for the request (1-based). */ endOffset: number; } /** * Instances of this interface specify errorcodes on a specific location in a sourcefile. */ interface CodeFixRequestArgs extends FileRangeRequestArgs { /** * Errorcodes we want to get the fixes for. */ errorCodes: readonly number[]; } interface GetCombinedCodeFixRequestArgs { scope: GetCombinedCodeFixScope; fixId: {}; } interface GetCombinedCodeFixScope { type: "file"; args: FileRequestArgs; } interface ApplyCodeActionCommandRequestArgs { /** May also be an array of commands. */ command: {}; } /** * Response for GetCodeFixes request. */ interface GetCodeFixesResponse extends Response { body?: CodeAction[]; } /** * A request whose arguments specify a file location (file, line, col). */ interface FileLocationRequest extends FileRequest { arguments: FileLocationRequestArgs; } /** * A request to get codes of supported code fixes. */ interface GetSupportedCodeFixesRequest extends Request { command: CommandTypes.GetSupportedCodeFixes; } /** * A response for GetSupportedCodeFixesRequest request. */ interface GetSupportedCodeFixesResponse extends Response { /** * List of error codes supported by the server. */ body?: string[]; } /** * Arguments in document highlight request; include: filesToSearch, file, * line, offset. */ interface DocumentHighlightsRequestArgs extends FileLocationRequestArgs { /** * List of files to search for document highlights. */ filesToSearch: string[]; } /** * Go to definition request; value of command field is * "definition". Return response giving the file locations that * define the symbol found in file at location line, col. */ interface DefinitionRequest extends FileLocationRequest { command: CommandTypes.Definition; } interface DefinitionAndBoundSpanRequest extends FileLocationRequest { readonly command: CommandTypes.DefinitionAndBoundSpan; } interface DefinitionAndBoundSpanResponse extends Response { readonly body: DefinitionInfoAndBoundSpan; } /** * Go to type request; value of command field is * "typeDefinition". Return response giving the file locations that * define the type for the symbol found in file at location line, col. */ interface TypeDefinitionRequest extends FileLocationRequest { command: CommandTypes.TypeDefinition; } /** * Go to implementation request; value of command field is * "implementation". Return response giving the file locations that * implement the symbol found in file at location line, col. */ interface ImplementationRequest extends FileLocationRequest { command: CommandTypes.Implementation; } /** * Location in source code expressed as (one-based) line and (one-based) column offset. */ interface Location { line: number; offset: number; } /** * Object found in response messages defining a span of text in source code. */ interface TextSpan { /** * First character of the definition. */ start: Location; /** * One character past last character of the definition. */ end: Location; } /** * Object found in response messages defining a span of text in a specific source file. */ interface FileSpan extends TextSpan { /** * File containing text span. */ file: string; } interface TextSpanWithContext extends TextSpan { contextStart?: Location; contextEnd?: Location; } interface FileSpanWithContext extends FileSpan, TextSpanWithContext { } interface DefinitionInfoAndBoundSpan { definitions: readonly FileSpanWithContext[]; textSpan: TextSpan; } /** * Definition response message. Gives text range for definition. */ interface DefinitionResponse extends Response { body?: FileSpanWithContext[]; } interface DefinitionInfoAndBoundSpanResponse extends Response { body?: DefinitionInfoAndBoundSpan; } /** @deprecated Use `DefinitionInfoAndBoundSpanResponse` instead. */ type DefinitionInfoAndBoundSpanReponse = DefinitionInfoAndBoundSpanResponse; /** * Definition response message. Gives text range for definition. */ interface TypeDefinitionResponse extends Response { body?: FileSpanWithContext[]; } /** * Implementation response message. Gives text range for implementations. */ interface ImplementationResponse extends Response { body?: FileSpanWithContext[]; } /** * Request to get brace completion for a location in the file. */ interface BraceCompletionRequest extends FileLocationRequest { command: CommandTypes.BraceCompletion; arguments: BraceCompletionRequestArgs; } /** * Argument for BraceCompletionRequest request. */ interface BraceCompletionRequestArgs extends FileLocationRequestArgs { /** * Kind of opening brace */ openingBrace: string; } interface JsxClosingTagRequest extends FileLocationRequest { readonly command: CommandTypes.JsxClosingTag; readonly arguments: JsxClosingTagRequestArgs; } interface JsxClosingTagRequestArgs extends FileLocationRequestArgs { } interface JsxClosingTagResponse extends Response { readonly body: TextInsertion; } /** * @deprecated * Get occurrences request; value of command field is * "occurrences". Return response giving spans that are relevant * in the file at a given line and column. */ interface OccurrencesRequest extends FileLocationRequest { command: CommandTypes.Occurrences; } /** @deprecated */ interface OccurrencesResponseItem extends FileSpanWithContext { /** * True if the occurrence is a write location, false otherwise. */ isWriteAccess: boolean; /** * True if the occurrence is in a string, undefined otherwise; */ isInString?: true; } /** @deprecated */ interface OccurrencesResponse extends Response { body?: OccurrencesResponseItem[]; } /** * Get document highlights request; value of command field is * "documentHighlights". Return response giving spans that are relevant * in the file at a given line and column. */ interface DocumentHighlightsRequest extends FileLocationRequest { command: CommandTypes.DocumentHighlights; arguments: DocumentHighlightsRequestArgs; } /** * Span augmented with extra information that denotes the kind of the highlighting to be used for span. */ interface HighlightSpan extends TextSpanWithContext { kind: HighlightSpanKind; } /** * Represents a set of highligh spans for a give name */ interface DocumentHighlightsItem { /** * File containing highlight spans. */ file: string; /** * Spans to highlight in file. */ highlightSpans: HighlightSpan[]; } /** * Response for a DocumentHighlightsRequest request. */ interface DocumentHighlightsResponse extends Response { body?: DocumentHighlightsItem[]; } /** * Find references request; value of command field is * "references". Return response giving the file locations that * reference the symbol found in file at location line, col. */ interface ReferencesRequest extends FileLocationRequest { command: CommandTypes.References; } interface ReferencesResponseItem extends FileSpanWithContext { /** Text of line containing the reference. Including this * with the response avoids latency of editor loading files * to show text of reference line (the server already has * loaded the referencing files). */ lineText: string; /** * True if reference is a write location, false otherwise. */ isWriteAccess: boolean; /** * True if reference is a definition, false otherwise. */ isDefinition: boolean; } /** * The body of a "references" response message. */ interface ReferencesResponseBody { /** * The file locations referencing the symbol. */ refs: readonly ReferencesResponseItem[]; /** * The name of the symbol. */ symbolName: string; /** * The start character offset of the symbol (on the line provided by the references request). */ symbolStartOffset: number; /** * The full display name of the symbol. */ symbolDisplayString: string; } /** * Response to "references" request. */ interface ReferencesResponse extends Response { body?: ReferencesResponseBody; } /** * Argument for RenameRequest request. */ interface RenameRequestArgs extends FileLocationRequestArgs { /** * Should text at specified location be found/changed in comments? */ findInComments?: boolean; /** * Should text at specified location be found/changed in strings? */ findInStrings?: boolean; } /** * Rename request; value of command field is "rename". Return * response giving the file locations that reference the symbol * found in file at location line, col. Also return full display * name of the symbol so that client can print it unambiguously. */ interface RenameRequest extends FileLocationRequest { command: CommandTypes.Rename; arguments: RenameRequestArgs; } /** * Information about the item to be renamed. */ type RenameInfo = RenameInfoSuccess | RenameInfoFailure; interface RenameInfoSuccess { /** * True if item can be renamed. */ canRename: true; /** * File or directory to rename. * If set, `getEditsForFileRename` should be called instead of `findRenameLocations`. */ fileToRename?: string; /** * Display name of the item to be renamed. */ displayName: string; /** * Full display name of item to be renamed. */ fullDisplayName: string; /** * The items's kind (such as 'className' or 'parameterName' or plain 'text'). */ kind: ScriptElementKind; /** * Optional modifiers for the kind (such as 'public'). */ kindModifiers: string; /** Span of text to rename. */ triggerSpan: TextSpan; } interface RenameInfoFailure { canRename: false; /** * Error message if item can not be renamed. */ localizedErrorMessage: string; } /** * A group of text spans, all in 'file'. */ interface SpanGroup { /** The file to which the spans apply */ file: string; /** The text spans in this group */ locs: RenameTextSpan[]; } interface RenameTextSpan extends TextSpanWithContext { readonly prefixText?: string; readonly suffixText?: string; } interface RenameResponseBody { /** * Information about the item to be renamed. */ info: RenameInfo; /** * An array of span groups (one per file) that refer to the item to be renamed. */ locs: readonly SpanGroup[]; } /** * Rename response message. */ interface RenameResponse extends Response { body?: RenameResponseBody; } /** * Represents a file in external project. * External project is project whose set of files, compilation options and open\close state * is maintained by the client (i.e. if all this data come from .csproj file in Visual Studio). * External project will exist even if all files in it are closed and should be closed explicitly. * If external project includes one or more tsconfig.json/jsconfig.json files then tsserver will * create configured project for every config file but will maintain a link that these projects were created * as a result of opening external project so they should be removed once external project is closed. */ interface ExternalFile { /** * Name of file file */ fileName: string; /** * Script kind of the file */ scriptKind?: ScriptKindName | ts.ScriptKind; /** * Whether file has mixed content (i.e. .cshtml file that combines html markup with C#/JavaScript) */ hasMixedContent?: boolean; /** * Content of the file */ content?: string; } /** * Represent an external project */ interface ExternalProject { /** * Project name */ projectFileName: string; /** * List of root files in project */ rootFiles: ExternalFile[]; /** * Compiler options for the project */ options: ExternalProjectCompilerOptions; /** * @deprecated typingOptions. Use typeAcquisition instead */ typingOptions?: TypeAcquisition; /** * Explicitly specified type acquisition for the project */ typeAcquisition?: TypeAcquisition; } interface CompileOnSaveMixin { /** * If compile on save is enabled for the project */ compileOnSave?: boolean; } /** * For external projects, some of the project settings are sent together with * compiler settings. */ type ExternalProjectCompilerOptions = CompilerOptions & CompileOnSaveMixin & WatchOptions; interface FileWithProjectReferenceRedirectInfo { /** * Name of file */ fileName: string; /** * True if the file is primarily included in a referenced project */ isSourceOfProjectReferenceRedirect: boolean; } /** * Represents a set of changes that happen in project */ interface ProjectChanges { /** * List of added files */ added: string[] | FileWithProjectReferenceRedirectInfo[]; /** * List of removed files */ removed: string[] | FileWithProjectReferenceRedirectInfo[]; /** * List of updated files */ updated: string[] | FileWithProjectReferenceRedirectInfo[]; /** * List of files that have had their project reference redirect status updated * Only provided when the synchronizeProjectList request has includeProjectReferenceRedirectInfo set to true */ updatedRedirects?: FileWithProjectReferenceRedirectInfo[]; } /** * Information found in a configure request. */ interface ConfigureRequestArguments { /** * Information about the host, for example 'Emacs 24.4' or * 'Sublime Text version 3075' */ hostInfo?: string; /** * If present, tab settings apply only to this file. */ file?: string; /** * The format options to use during formatting and other code editing features. */ formatOptions?: FormatCodeSettings; preferences?: UserPreferences; /** * The host's additional supported .js file extensions */ extraFileExtensions?: FileExtensionInfo[]; watchOptions?: WatchOptions; } enum WatchFileKind { FixedPollingInterval = "FixedPollingInterval", PriorityPollingInterval = "PriorityPollingInterval", DynamicPriorityPolling = "DynamicPriorityPolling", UseFsEvents = "UseFsEvents", UseFsEventsOnParentDirectory = "UseFsEventsOnParentDirectory" } enum WatchDirectoryKind { UseFsEvents = "UseFsEvents", FixedPollingInterval = "FixedPollingInterval", DynamicPriorityPolling = "DynamicPriorityPolling" } enum PollingWatchKind { FixedInterval = "FixedInterval", PriorityInterval = "PriorityInterval", DynamicPriority = "DynamicPriority" } interface WatchOptions { watchFile?: WatchFileKind | ts.WatchFileKind; watchDirectory?: WatchDirectoryKind | ts.WatchDirectoryKind; fallbackPolling?: PollingWatchKind | ts.PollingWatchKind; synchronousWatchDirectory?: boolean; [option: string]: CompilerOptionsValue | undefined; } /** * Configure request; value of command field is "configure". Specifies * host information, such as host type, tab size, and indent size. */ interface ConfigureRequest extends Request { command: CommandTypes.Configure; arguments: ConfigureRequestArguments; } /** * Response to "configure" request. This is just an acknowledgement, so * no body field is required. */ interface ConfigureResponse extends Response { } interface ConfigurePluginRequestArguments { pluginName: string; configuration: any; } interface ConfigurePluginRequest extends Request { command: CommandTypes.ConfigurePlugin; arguments: ConfigurePluginRequestArguments; } interface ConfigurePluginResponse extends Response { } interface SelectionRangeRequest extends FileRequest { command: CommandTypes.SelectionRange; arguments: SelectionRangeRequestArgs; } interface SelectionRangeRequestArgs extends FileRequestArgs { locations: Location[]; } interface SelectionRangeResponse extends Response { body?: SelectionRange[]; } interface SelectionRange { textSpan: TextSpan; parent?: SelectionRange; } interface ToggleLineCommentRequest extends FileRequest { command: CommandTypes.ToggleLineComment; arguments: FileRangeRequestArgs; } interface ToggleMultilineCommentRequest extends FileRequest { command: CommandTypes.ToggleMultilineComment; arguments: FileRangeRequestArgs; } interface CommentSelectionRequest extends FileRequest { command: CommandTypes.CommentSelection; arguments: FileRangeRequestArgs; } interface UncommentSelectionRequest extends FileRequest { command: CommandTypes.UncommentSelection; arguments: FileRangeRequestArgs; } /** * Information found in an "open" request. */ interface OpenRequestArgs extends FileRequestArgs { /** * Used when a version of the file content is known to be more up to date than the one on disk. * Then the known content will be used upon opening instead of the disk copy */ fileContent?: string; /** * Used to specify the script kind of the file explicitly. It could be one of the following: * "TS", "JS", "TSX", "JSX" */ scriptKindName?: ScriptKindName; /** * Used to limit the searching for project config file. If given the searching will stop at this * root path; otherwise it will go all the way up to the dist root path. */ projectRootPath?: string; } type ScriptKindName = "TS" | "JS" | "TSX" | "JSX"; /** * Open request; value of command field is "open". Notify the * server that the client has file open. The server will not * monitor the filesystem for changes in this file and will assume * that the client is updating the server (using the change and/or * reload messages) when the file changes. Server does not currently * send a response to an open request. */ interface OpenRequest extends Request { command: CommandTypes.Open; arguments: OpenRequestArgs; } /** * Request to open or update external project */ interface OpenExternalProjectRequest extends Request { command: CommandTypes.OpenExternalProject; arguments: OpenExternalProjectArgs; } /** * Arguments to OpenExternalProjectRequest request */ type OpenExternalProjectArgs = ExternalProject; /** * Request to open multiple external projects */ interface OpenExternalProjectsRequest extends Request { command: CommandTypes.OpenExternalProjects; arguments: OpenExternalProjectsArgs; } /** * Arguments to OpenExternalProjectsRequest */ interface OpenExternalProjectsArgs { /** * List of external projects to open or update */ projects: ExternalProject[]; } /** * Response to OpenExternalProjectRequest request. This is just an acknowledgement, so * no body field is required. */ interface OpenExternalProjectResponse extends Response { } /** * Response to OpenExternalProjectsRequest request. This is just an acknowledgement, so * no body field is required. */ interface OpenExternalProjectsResponse extends Response { } /** * Request to close external project. */ interface CloseExternalProjectRequest extends Request { command: CommandTypes.CloseExternalProject; arguments: CloseExternalProjectRequestArgs; } /** * Arguments to CloseExternalProjectRequest request */ interface CloseExternalProjectRequestArgs { /** * Name of the project to close */ projectFileName: string; } /** * Response to CloseExternalProjectRequest request. This is just an acknowledgement, so * no body field is required. */ interface CloseExternalProjectResponse extends Response { } /** * Request to synchronize list of open files with the client */ interface UpdateOpenRequest extends Request { command: CommandTypes.UpdateOpen; arguments: UpdateOpenRequestArgs; } /** * Arguments to UpdateOpenRequest */ interface UpdateOpenRequestArgs { /** * List of newly open files */ openFiles?: OpenRequestArgs[]; /** * List of open files files that were changes */ changedFiles?: FileCodeEdits[]; /** * List of files that were closed */ closedFiles?: string[]; } /** * External projects have a typeAcquisition option so they need to be added separately to compiler options for inferred projects. */ type InferredProjectCompilerOptions = ExternalProjectCompilerOptions & TypeAcquisition; /** * Request to set compiler options for inferred projects. * External projects are opened / closed explicitly. * Configured projects are opened when user opens loose file that has 'tsconfig.json' or 'jsconfig.json' anywhere in one of containing folders. * This configuration file will be used to obtain a list of files and configuration settings for the project. * Inferred projects are created when user opens a loose file that is not the part of external project * or configured project and will contain only open file and transitive closure of referenced files if 'useOneInferredProject' is false, * or all open loose files and its transitive closure of referenced files if 'useOneInferredProject' is true. */ interface SetCompilerOptionsForInferredProjectsRequest extends Request { command: CommandTypes.CompilerOptionsForInferredProjects; arguments: SetCompilerOptionsForInferredProjectsArgs; } /** * Argument for SetCompilerOptionsForInferredProjectsRequest request. */ interface SetCompilerOptionsForInferredProjectsArgs { /** * Compiler options to be used with inferred projects. */ options: InferredProjectCompilerOptions; /** * Specifies the project root path used to scope compiler options. * It is an error to provide this property if the server has not been started with * `useInferredProjectPerProjectRoot` enabled. */ projectRootPath?: string; } /** * Response to SetCompilerOptionsForInferredProjectsResponse request. This is just an acknowledgement, so * no body field is required. */ interface SetCompilerOptionsForInferredProjectsResponse extends Response { } /** * Exit request; value of command field is "exit". Ask the server process * to exit. */ interface ExitRequest extends Request { command: CommandTypes.Exit; } /** * Close request; value of command field is "close". Notify the * server that the client has closed a previously open file. If * file is still referenced by open files, the server will resume * monitoring the filesystem for changes to file. Server does not * currently send a response to a close request. */ interface CloseRequest extends FileRequest { command: CommandTypes.Close; } /** * Request to obtain the list of files that should be regenerated if target file is recompiled. * NOTE: this us query-only operation and does not generate any output on disk. */ interface CompileOnSaveAffectedFileListRequest extends FileRequest { command: CommandTypes.CompileOnSaveAffectedFileList; } /** * Contains a list of files that should be regenerated in a project */ interface CompileOnSaveAffectedFileListSingleProject { /** * Project name */ projectFileName: string; /** * List of files names that should be recompiled */ fileNames: string[]; /** * true if project uses outFile or out compiler option */ projectUsesOutFile: boolean; } /** * Response for CompileOnSaveAffectedFileListRequest request; */ interface CompileOnSaveAffectedFileListResponse extends Response { body: CompileOnSaveAffectedFileListSingleProject[]; } /** * Request to recompile the file. All generated outputs (.js, .d.ts or .js.map files) is written on disk. */ interface CompileOnSaveEmitFileRequest extends FileRequest { command: CommandTypes.CompileOnSaveEmitFile; arguments: CompileOnSaveEmitFileRequestArgs; } /** * Arguments for CompileOnSaveEmitFileRequest */ interface CompileOnSaveEmitFileRequestArgs extends FileRequestArgs { /** * if true - then file should be recompiled even if it does not have any changes. */ forced?: boolean; includeLinePosition?: boolean; /** if true - return response as object with emitSkipped and diagnostics */ richResponse?: boolean; } interface CompileOnSaveEmitFileResponse extends Response { body: boolean | EmitResult; } interface EmitResult { emitSkipped: boolean; diagnostics: Diagnostic[] | DiagnosticWithLinePosition[]; } /** * Quickinfo request; value of command field is * "quickinfo". Return response giving a quick type and * documentation string for the symbol found in file at location * line, col. */ interface QuickInfoRequest extends FileLocationRequest { command: CommandTypes.Quickinfo; } /** * Body of QuickInfoResponse. */ interface QuickInfoResponseBody { /** * The symbol's kind (such as 'className' or 'parameterName' or plain 'text'). */ kind: ScriptElementKind; /** * Optional modifiers for the kind (such as 'public'). */ kindModifiers: string; /** * Starting file location of symbol. */ start: Location; /** * One past last character of symbol. */ end: Location; /** * Type and kind of symbol. */ displayString: string; /** * Documentation associated with symbol. */ documentation: string; /** * JSDoc tags associated with symbol. */ tags: JSDocTagInfo[]; } /** * Quickinfo response message. */ interface QuickInfoResponse extends Response { body?: QuickInfoResponseBody; } /** * Arguments for format messages. */ interface FormatRequestArgs extends FileLocationRequestArgs { /** * Last line of range for which to format text in file. */ endLine: number; /** * Character offset on last line of range for which to format text in file. */ endOffset: number; /** * Format options to be used. */ options?: FormatCodeSettings; } /** * Format request; value of command field is "format". Return * response giving zero or more edit instructions. The edit * instructions will be sorted in file order. Applying the edit * instructions in reverse to file will result in correctly * reformatted text. */ interface FormatRequest extends FileLocationRequest { command: CommandTypes.Format; arguments: FormatRequestArgs; } /** * Object found in response messages defining an editing * instruction for a span of text in source code. The effect of * this instruction is to replace the text starting at start and * ending one character before end with newText. For an insertion, * the text span is empty. For a deletion, newText is empty. */ interface CodeEdit { /** * First character of the text span to edit. */ start: Location; /** * One character past last character of the text span to edit. */ end: Location; /** * Replace the span defined above with this string (may be * the empty string). */ newText: string; } interface FileCodeEdits { fileName: string; textChanges: CodeEdit[]; } interface CodeFixResponse extends Response { /** The code actions that are available */ body?: CodeFixAction[]; } interface CodeAction { /** Description of the code action to display in the UI of the editor */ description: string; /** Text changes to apply to each file as part of the code action */ changes: FileCodeEdits[]; /** A command is an opaque object that should be passed to `ApplyCodeActionCommandRequestArgs` without modification. */ commands?: {}[]; } interface CombinedCodeActions { changes: readonly FileCodeEdits[]; commands?: readonly {}[]; } interface CodeFixAction extends CodeAction { /** Short name to identify the fix, for use by telemetry. */ fixName: string; /** * If present, one may call 'getCombinedCodeFix' with this fixId. * This may be omitted to indicate that the code fix can't be applied in a group. */ fixId?: {}; /** Should be present if and only if 'fixId' is. */ fixAllDescription?: string; } /** * Format and format on key response message. */ interface FormatResponse extends Response { body?: CodeEdit[]; } /** * Arguments for format on key messages. */ interface FormatOnKeyRequestArgs extends FileLocationRequestArgs { /** * Key pressed (';', '\n', or '}'). */ key: string; options?: FormatCodeSettings; } /** * Format on key request; value of command field is * "formatonkey". Given file location and key typed (as string), * return response giving zero or more edit instructions. The * edit instructions will be sorted in file order. Applying the * edit instructions in reverse to file will result in correctly * reformatted text. */ interface FormatOnKeyRequest extends FileLocationRequest { command: CommandTypes.Formatonkey; arguments: FormatOnKeyRequestArgs; } type CompletionsTriggerCharacter = "." | '"' | "'" | "`" | "/" | "@" | "<" | "#"; /** * Arguments for completions messages. */ interface CompletionsRequestArgs extends FileLocationRequestArgs { /** * Optional prefix to apply to possible completions. */ prefix?: string; /** * Character that was responsible for triggering completion. * Should be `undefined` if a user manually requested completion. */ triggerCharacter?: CompletionsTriggerCharacter; /** * @deprecated Use UserPreferences.includeCompletionsForModuleExports */ includeExternalModuleExports?: boolean; /** * @deprecated Use UserPreferences.includeCompletionsWithInsertText */ includeInsertTextCompletions?: boolean; } /** * Completions request; value of command field is "completions". * Given a file location (file, line, col) and a prefix (which may * be the empty string), return the possible completions that * begin with prefix. */ interface CompletionsRequest extends FileLocationRequest { command: CommandTypes.Completions | CommandTypes.CompletionInfo; arguments: CompletionsRequestArgs; } /** * Arguments for completion details request. */ interface CompletionDetailsRequestArgs extends FileLocationRequestArgs { /** * Names of one or more entries for which to obtain details. */ entryNames: (string | CompletionEntryIdentifier)[]; } interface CompletionEntryIdentifier { name: string; source?: string; } /** * Completion entry details request; value of command field is * "completionEntryDetails". Given a file location (file, line, * col) and an array of completion entry names return more * detailed information for each completion entry. */ interface CompletionDetailsRequest extends FileLocationRequest { command: CommandTypes.CompletionDetails; arguments: CompletionDetailsRequestArgs; } /** * Part of a symbol description. */ interface SymbolDisplayPart { /** * Text of an item describing the symbol. */ text: string; /** * The symbol's kind (such as 'className' or 'parameterName' or plain 'text'). */ kind: string; } /** * An item found in a completion response. */ interface CompletionEntry { /** * The symbol's name. */ name: string; /** * The symbol's kind (such as 'className' or 'parameterName'). */ kind: ScriptElementKind; /** * Optional modifiers for the kind (such as 'public'). */ kindModifiers?: string; /** * A string that is used for comparing completion items so that they can be ordered. This * is often the same as the name but may be different in certain circumstances. */ sortText: string; /** * Text to insert instead of `name`. * This is used to support bracketed completions; If `name` might be "a-b" but `insertText` would be `["a-b"]`, * coupled with `replacementSpan` to replace a dotted access with a bracket access. */ insertText?: string; /** * An optional span that indicates the text to be replaced by this completion item. * If present, this span should be used instead of the default one. * It will be set if the required span differs from the one generated by the default replacement behavior. */ replacementSpan?: TextSpan; /** * Indicates whether commiting this completion entry will require additional code actions to be * made to avoid errors. The CompletionEntryDetails will have these actions. */ hasAction?: true; /** * Identifier (not necessarily human-readable) identifying where this completion came from. */ source?: string; /** * If true, this completion should be highlighted as recommended. There will only be one of these. * This will be set when we know the user should write an expression with a certain type and that type is an enum or constructable class. * Then either that enum/class or a namespace containing it will be the recommended symbol. */ isRecommended?: true; /** * If true, this completion was generated from traversing the name table of an unchecked JS file, * and therefore may not be accurate. */ isFromUncheckedFile?: true; /** * If true, this completion was for an auto-import of a module not yet in the program, but listed * in the project package.json. */ isPackageJsonImport?: true; } /** * Additional completion entry details, available on demand */ interface CompletionEntryDetails { /** * The symbol's name. */ name: string; /** * The symbol's kind (such as 'className' or 'parameterName'). */ kind: ScriptElementKind; /** * Optional modifiers for the kind (such as 'public'). */ kindModifiers: string; /** * Display parts of the symbol (similar to quick info). */ displayParts: SymbolDisplayPart[]; /** * Documentation strings for the symbol. */ documentation?: SymbolDisplayPart[]; /** * JSDoc tags for the symbol. */ tags?: JSDocTagInfo[]; /** * The associated code actions for this entry */ codeActions?: CodeAction[]; /** * Human-readable description of the `source` from the CompletionEntry. */ source?: SymbolDisplayPart[]; } /** @deprecated Prefer CompletionInfoResponse, which supports several top-level fields in addition to the array of entries. */ interface CompletionsResponse extends Response { body?: CompletionEntry[]; } interface CompletionInfoResponse extends Response { body?: CompletionInfo; } interface CompletionInfo { readonly isGlobalCompletion: boolean; readonly isMemberCompletion: boolean; readonly isNewIdentifierLocation: boolean; /** * In the absence of `CompletionEntry["replacementSpan"]`, the editor may choose whether to use * this span or its default one. If `CompletionEntry["replacementSpan"]` is defined, that span * must be used to commit that completion entry. */ readonly optionalReplacementSpan?: TextSpan; readonly entries: readonly CompletionEntry[]; } interface CompletionDetailsResponse extends Response { body?: CompletionEntryDetails[]; } /** * Signature help information for a single parameter */ interface SignatureHelpParameter { /** * The parameter's name */ name: string; /** * Documentation of the parameter. */ documentation: SymbolDisplayPart[]; /** * Display parts of the parameter. */ displayParts: SymbolDisplayPart[]; /** * Whether the parameter is optional or not. */ isOptional: boolean; } /** * Represents a single signature to show in signature help. */ interface SignatureHelpItem { /** * Whether the signature accepts a variable number of arguments. */ isVariadic: boolean; /** * The prefix display parts. */ prefixDisplayParts: SymbolDisplayPart[]; /** * The suffix display parts. */ suffixDisplayParts: SymbolDisplayPart[]; /** * The separator display parts. */ separatorDisplayParts: SymbolDisplayPart[]; /** * The signature helps items for the parameters. */ parameters: SignatureHelpParameter[]; /** * The signature's documentation */ documentation: SymbolDisplayPart[]; /** * The signature's JSDoc tags */ tags: JSDocTagInfo[]; } /** * Signature help items found in the response of a signature help request. */ interface SignatureHelpItems { /** * The signature help items. */ items: SignatureHelpItem[]; /** * The span for which signature help should appear on a signature */ applicableSpan: TextSpan; /** * The item selected in the set of available help items. */ selectedItemIndex: number; /** * The argument selected in the set of parameters. */ argumentIndex: number; /** * The argument count */ argumentCount: number; } type SignatureHelpTriggerCharacter = "," | "(" | "<"; type SignatureHelpRetriggerCharacter = SignatureHelpTriggerCharacter | ")"; /** * Arguments of a signature help request. */ interface SignatureHelpRequestArgs extends FileLocationRequestArgs { /** * Reason why signature help was invoked. * See each individual possible */ triggerReason?: SignatureHelpTriggerReason; } type SignatureHelpTriggerReason = SignatureHelpInvokedReason | SignatureHelpCharacterTypedReason | SignatureHelpRetriggeredReason; /** * Signals that the user manually requested signature help. * The language service will unconditionally attempt to provide a result. */ interface SignatureHelpInvokedReason { kind: "invoked"; triggerCharacter?: undefined; } /** * Signals that the signature help request came from a user typing a character. * Depending on the character and the syntactic context, the request may or may not be served a result. */ interface SignatureHelpCharacterTypedReason { kind: "characterTyped"; /** * Character that was responsible for triggering signature help. */ triggerCharacter: SignatureHelpTriggerCharacter; } /** * Signals that this signature help request came from typing a character or moving the cursor. * This should only occur if a signature help session was already active and the editor needs to see if it should adjust. * The language service will unconditionally attempt to provide a result. * `triggerCharacter` can be `undefined` for a retrigger caused by a cursor move. */ interface SignatureHelpRetriggeredReason { kind: "retrigger"; /** * Character that was responsible for triggering signature help. */ triggerCharacter?: SignatureHelpRetriggerCharacter; } /** * Signature help request; value of command field is "signatureHelp". * Given a file location (file, line, col), return the signature * help. */ interface SignatureHelpRequest extends FileLocationRequest { command: CommandTypes.SignatureHelp; arguments: SignatureHelpRequestArgs; } /** * Response object for a SignatureHelpRequest. */ interface SignatureHelpResponse extends Response { body?: SignatureHelpItems; } /** * Synchronous request for semantic diagnostics of one file. */ interface SemanticDiagnosticsSyncRequest extends FileRequest { command: CommandTypes.SemanticDiagnosticsSync; arguments: SemanticDiagnosticsSyncRequestArgs; } interface SemanticDiagnosticsSyncRequestArgs extends FileRequestArgs { includeLinePosition?: boolean; } /** * Response object for synchronous sematic diagnostics request. */ interface SemanticDiagnosticsSyncResponse extends Response { body?: Diagnostic[] | DiagnosticWithLinePosition[]; } interface SuggestionDiagnosticsSyncRequest extends FileRequest { command: CommandTypes.SuggestionDiagnosticsSync; arguments: SuggestionDiagnosticsSyncRequestArgs; } type SuggestionDiagnosticsSyncRequestArgs = SemanticDiagnosticsSyncRequestArgs; type SuggestionDiagnosticsSyncResponse = SemanticDiagnosticsSyncResponse; /** * Synchronous request for syntactic diagnostics of one file. */ interface SyntacticDiagnosticsSyncRequest extends FileRequest { command: CommandTypes.SyntacticDiagnosticsSync; arguments: SyntacticDiagnosticsSyncRequestArgs; } interface SyntacticDiagnosticsSyncRequestArgs extends FileRequestArgs { includeLinePosition?: boolean; } /** * Response object for synchronous syntactic diagnostics request. */ interface SyntacticDiagnosticsSyncResponse extends Response { body?: Diagnostic[] | DiagnosticWithLinePosition[]; } /** * Arguments for GeterrForProject request. */ interface GeterrForProjectRequestArgs { /** * the file requesting project error list */ file: string; /** * Delay in milliseconds to wait before starting to compute * errors for the files in the file list */ delay: number; } /** * GeterrForProjectRequest request; value of command field is * "geterrForProject". It works similarly with 'Geterr', only * it request for every file in this project. */ interface GeterrForProjectRequest extends Request { command: CommandTypes.GeterrForProject; arguments: GeterrForProjectRequestArgs; } /** * Arguments for geterr messages. */ interface GeterrRequestArgs { /** * List of file names for which to compute compiler errors. * The files will be checked in list order. */ files: string[]; /** * Delay in milliseconds to wait before starting to compute * errors for the files in the file list */ delay: number; } /** * Geterr request; value of command field is "geterr". Wait for * delay milliseconds and then, if during the wait no change or * reload messages have arrived for the first file in the files * list, get the syntactic errors for the file, field requests, * and then get the semantic errors for the file. Repeat with a * smaller delay for each subsequent file on the files list. Best * practice for an editor is to send a file list containing each * file that is currently visible, in most-recently-used order. */ interface GeterrRequest extends Request { command: CommandTypes.Geterr; arguments: GeterrRequestArgs; } type RequestCompletedEventName = "requestCompleted"; /** * Event that is sent when server have finished processing request with specified id. */ interface RequestCompletedEvent extends Event { event: RequestCompletedEventName; body: RequestCompletedEventBody; } interface RequestCompletedEventBody { request_seq: number; } /** * Item of diagnostic information found in a DiagnosticEvent message. */ interface Diagnostic { /** * Starting file location at which text applies. */ start: Location; /** * The last file location at which the text applies. */ end: Location; /** * Text of diagnostic message. */ text: string; /** * The category of the diagnostic message, e.g. "error", "warning", or "suggestion". */ category: string; reportsUnnecessary?: {}; reportsDeprecated?: {}; /** * Any related spans the diagnostic may have, such as other locations relevant to an error, such as declarartion sites */ relatedInformation?: DiagnosticRelatedInformation[]; /** * The error code of the diagnostic message. */ code?: number; /** * The name of the plugin reporting the message. */ source?: string; } interface DiagnosticWithFileName extends Diagnostic { /** * Name of the file the diagnostic is in */ fileName: string; } /** * Represents additional spans returned with a diagnostic which are relevant to it */ interface DiagnosticRelatedInformation { /** * The category of the related information message, e.g. "error", "warning", or "suggestion". */ category: string; /** * The code used ot identify the related information */ code: number; /** * Text of related or additional information. */ message: string; /** * Associated location */ span?: FileSpan; } interface DiagnosticEventBody { /** * The file for which diagnostic information is reported. */ file: string; /** * An array of diagnostic information items. */ diagnostics: Diagnostic[]; } type DiagnosticEventKind = "semanticDiag" | "syntaxDiag" | "suggestionDiag"; /** * Event message for DiagnosticEventKind event types. * These events provide syntactic and semantic errors for a file. */ interface DiagnosticEvent extends Event { body?: DiagnosticEventBody; event: DiagnosticEventKind; } interface ConfigFileDiagnosticEventBody { /** * The file which trigged the searching and error-checking of the config file */ triggerFile: string; /** * The name of the found config file. */ configFile: string; /** * An arry of diagnostic information items for the found config file. */ diagnostics: DiagnosticWithFileName[]; } /** * Event message for "configFileDiag" event type. * This event provides errors for a found config file. */ interface ConfigFileDiagnosticEvent extends Event { body?: ConfigFileDiagnosticEventBody; event: "configFileDiag"; } type ProjectLanguageServiceStateEventName = "projectLanguageServiceState"; interface ProjectLanguageServiceStateEvent extends Event { event: ProjectLanguageServiceStateEventName; body?: ProjectLanguageServiceStateEventBody; } interface ProjectLanguageServiceStateEventBody { /** * Project name that has changes in the state of language service. * For configured projects this will be the config file path. * For external projects this will be the name of the projects specified when project was open. * For inferred projects this event is not raised. */ projectName: string; /** * True if language service state switched from disabled to enabled * and false otherwise. */ languageServiceEnabled: boolean; } type ProjectsUpdatedInBackgroundEventName = "projectsUpdatedInBackground"; interface ProjectsUpdatedInBackgroundEvent extends Event { event: ProjectsUpdatedInBackgroundEventName; body: ProjectsUpdatedInBackgroundEventBody; } interface ProjectsUpdatedInBackgroundEventBody { /** * Current set of open files */ openFiles: string[]; } type ProjectLoadingStartEventName = "projectLoadingStart"; interface ProjectLoadingStartEvent extends Event { event: ProjectLoadingStartEventName; body: ProjectLoadingStartEventBody; } interface ProjectLoadingStartEventBody { /** name of the project */ projectName: string; /** reason for loading */ reason: string; } type ProjectLoadingFinishEventName = "projectLoadingFinish"; interface ProjectLoadingFinishEvent extends Event { event: ProjectLoadingFinishEventName; body: ProjectLoadingFinishEventBody; } interface ProjectLoadingFinishEventBody { /** name of the project */ projectName: string; } type SurveyReadyEventName = "surveyReady"; interface SurveyReadyEvent extends Event { event: SurveyReadyEventName; body: SurveyReadyEventBody; } interface SurveyReadyEventBody { /** Name of the survey. This is an internal machine- and programmer-friendly name */ surveyId: string; } type LargeFileReferencedEventName = "largeFileReferenced"; interface LargeFileReferencedEvent extends Event { event: LargeFileReferencedEventName; body: LargeFileReferencedEventBody; } interface LargeFileReferencedEventBody { /** * name of the large file being loaded */ file: string; /** * size of the file */ fileSize: number; /** * max file size allowed on the server */ maxFileSize: number; } /** * Arguments for reload request. */ interface ReloadRequestArgs extends FileRequestArgs { /** * Name of temporary file from which to reload file * contents. May be same as file. */ tmpfile: string; } /** * Reload request message; value of command field is "reload". * Reload contents of file with name given by the 'file' argument * from temporary file with name given by the 'tmpfile' argument. * The two names can be identical. */ interface ReloadRequest extends FileRequest { command: CommandTypes.Reload; arguments: ReloadRequestArgs; } /** * Response to "reload" request. This is just an acknowledgement, so * no body field is required. */ interface ReloadResponse extends Response { } /** * Arguments for saveto request. */ interface SavetoRequestArgs extends FileRequestArgs { /** * Name of temporary file into which to save server's view of * file contents. */ tmpfile: string; } /** * Saveto request message; value of command field is "saveto". * For debugging purposes, save to a temporaryfile (named by * argument 'tmpfile') the contents of file named by argument * 'file'. The server does not currently send a response to a * "saveto" request. */ interface SavetoRequest extends FileRequest { command: CommandTypes.Saveto; arguments: SavetoRequestArgs; } /** * Arguments for navto request message. */ interface NavtoRequestArgs { /** * Search term to navigate to from current location; term can * be '.*' or an identifier prefix. */ searchValue: string; /** * Optional limit on the number of items to return. */ maxResultCount?: number; /** * The file for the request (absolute pathname required). */ file?: string; /** * Optional flag to indicate we want results for just the current file * or the entire project. */ currentFileOnly?: boolean; projectFileName?: string; } /** * Navto request message; value of command field is "navto". * Return list of objects giving file locations and symbols that * match the search term given in argument 'searchTerm'. The * context for the search is given by the named file. */ interface NavtoRequest extends Request { command: CommandTypes.Navto; arguments: NavtoRequestArgs; } /** * An item found in a navto response. */ interface NavtoItem extends FileSpan { /** * The symbol's name. */ name: string; /** * The symbol's kind (such as 'className' or 'parameterName'). */ kind: ScriptElementKind; /** * exact, substring, or prefix. */ matchKind: string; /** * If this was a case sensitive or insensitive match. */ isCaseSensitive: boolean; /** * Optional modifiers for the kind (such as 'public'). */ kindModifiers?: string; /** * Name of symbol's container symbol (if any); for example, * the class name if symbol is a class member. */ containerName?: string; /** * Kind of symbol's container symbol (if any). */ containerKind?: ScriptElementKind; } /** * Navto response message. Body is an array of navto items. Each * item gives a symbol that matched the search term. */ interface NavtoResponse extends Response { body?: NavtoItem[]; } /** * Arguments for change request message. */ interface ChangeRequestArgs extends FormatRequestArgs { /** * Optional string to insert at location (file, line, offset). */ insertString?: string; } /** * Change request message; value of command field is "change". * Update the server's view of the file named by argument 'file'. * Server does not currently send a response to a change request. */ interface ChangeRequest extends FileLocationRequest { command: CommandTypes.Change; arguments: ChangeRequestArgs; } /** * Response to "brace" request. */ interface BraceResponse extends Response { body?: TextSpan[]; } /** * Brace matching request; value of command field is "brace". * Return response giving the file locations of matching braces * found in file at location line, offset. */ interface BraceRequest extends FileLocationRequest { command: CommandTypes.Brace; } /** * NavBar items request; value of command field is "navbar". * Return response giving the list of navigation bar entries * extracted from the requested file. */ interface NavBarRequest extends FileRequest { command: CommandTypes.NavBar; } /** * NavTree request; value of command field is "navtree". * Return response giving the navigation tree of the requested file. */ interface NavTreeRequest extends FileRequest { command: CommandTypes.NavTree; } interface NavigationBarItem { /** * The item's display text. */ text: string; /** * The symbol's kind (such as 'className' or 'parameterName'). */ kind: ScriptElementKind; /** * Optional modifiers for the kind (such as 'public'). */ kindModifiers?: string; /** * The definition locations of the item. */ spans: TextSpan[]; /** * Optional children. */ childItems?: NavigationBarItem[]; /** * Number of levels deep this item should appear. */ indent: number; } /** protocol.NavigationTree is identical to ts.NavigationTree, except using protocol.TextSpan instead of ts.TextSpan */ interface NavigationTree { text: string; kind: ScriptElementKind; kindModifiers: string; spans: TextSpan[]; nameSpan: TextSpan | undefined; childItems?: NavigationTree[]; } type TelemetryEventName = "telemetry"; interface TelemetryEvent extends Event { event: TelemetryEventName; body: TelemetryEventBody; } interface TelemetryEventBody { telemetryEventName: string; payload: any; } type TypesInstallerInitializationFailedEventName = "typesInstallerInitializationFailed"; interface TypesInstallerInitializationFailedEvent extends Event { event: TypesInstallerInitializationFailedEventName; body: TypesInstallerInitializationFailedEventBody; } interface TypesInstallerInitializationFailedEventBody { message: string; } type TypingsInstalledTelemetryEventName = "typingsInstalled"; interface TypingsInstalledTelemetryEventBody extends TelemetryEventBody { telemetryEventName: TypingsInstalledTelemetryEventName; payload: TypingsInstalledTelemetryEventPayload; } interface TypingsInstalledTelemetryEventPayload { /** * Comma separated list of installed typing packages */ installedPackages: string; /** * true if install request succeeded, otherwise - false */ installSuccess: boolean; /** * version of typings installer */ typingsInstallerVersion: string; } type BeginInstallTypesEventName = "beginInstallTypes"; type EndInstallTypesEventName = "endInstallTypes"; interface BeginInstallTypesEvent extends Event { event: BeginInstallTypesEventName; body: BeginInstallTypesEventBody; } interface EndInstallTypesEvent extends Event { event: EndInstallTypesEventName; body: EndInstallTypesEventBody; } interface InstallTypesEventBody { /** * correlation id to match begin and end events */ eventId: number; /** * list of packages to install */ packages: readonly string[]; } interface BeginInstallTypesEventBody extends InstallTypesEventBody { } interface EndInstallTypesEventBody extends InstallTypesEventBody { /** * true if installation succeeded, otherwise false */ success: boolean; } interface NavBarResponse extends Response { body?: NavigationBarItem[]; } interface NavTreeResponse extends Response { body?: NavigationTree; } interface CallHierarchyItem { name: string; kind: ScriptElementKind; kindModifiers?: string; file: string; span: TextSpan; selectionSpan: TextSpan; containerName?: string; } interface CallHierarchyIncomingCall { from: CallHierarchyItem; fromSpans: TextSpan[]; } interface CallHierarchyOutgoingCall { to: CallHierarchyItem; fromSpans: TextSpan[]; } interface PrepareCallHierarchyRequest extends FileLocationRequest { command: CommandTypes.PrepareCallHierarchy; } interface PrepareCallHierarchyResponse extends Response { readonly body: CallHierarchyItem | CallHierarchyItem[]; } interface ProvideCallHierarchyIncomingCallsRequest extends FileLocationRequest { command: CommandTypes.ProvideCallHierarchyIncomingCalls; } interface ProvideCallHierarchyIncomingCallsResponse extends Response { readonly body: CallHierarchyIncomingCall[]; } interface ProvideCallHierarchyOutgoingCallsRequest extends FileLocationRequest { command: CommandTypes.ProvideCallHierarchyOutgoingCalls; } interface ProvideCallHierarchyOutgoingCallsResponse extends Response { readonly body: CallHierarchyOutgoingCall[]; } enum IndentStyle { None = "None", Block = "Block", Smart = "Smart" } enum SemicolonPreference { Ignore = "ignore", Insert = "insert", Remove = "remove" } interface EditorSettings { baseIndentSize?: number; indentSize?: number; tabSize?: number; newLineCharacter?: string; convertTabsToSpaces?: boolean; indentStyle?: IndentStyle | ts.IndentStyle; trimTrailingWhitespace?: boolean; } interface FormatCodeSettings extends EditorSettings { insertSpaceAfterCommaDelimiter?: boolean; insertSpaceAfterSemicolonInForStatements?: boolean; insertSpaceBeforeAndAfterBinaryOperators?: boolean; insertSpaceAfterConstructor?: boolean; insertSpaceAfterKeywordsInControlFlowStatements?: boolean; insertSpaceAfterFunctionKeywordForAnonymousFunctions?: boolean; insertSpaceAfterOpeningAndBeforeClosingEmptyBraces?: boolean; insertSpaceAfterOpeningAndBeforeClosingNonemptyParenthesis?: boolean; insertSpaceAfterOpeningAndBeforeClosingNonemptyBrackets?: boolean; insertSpaceAfterOpeningAndBeforeClosingNonemptyBraces?: boolean; insertSpaceAfterOpeningAndBeforeClosingTemplateStringBraces?: boolean; insertSpaceAfterOpeningAndBeforeClosingJsxExpressionBraces?: boolean; insertSpaceAfterTypeAssertion?: boolean; insertSpaceBeforeFunctionParenthesis?: boolean; placeOpenBraceOnNewLineForFunctions?: boolean; placeOpenBraceOnNewLineForControlBlocks?: boolean; insertSpaceBeforeTypeAnnotation?: boolean; semicolons?: SemicolonPreference; } interface UserPreferences { readonly disableSuggestions?: boolean; readonly quotePreference?: "auto" | "double" | "single"; /** * If enabled, TypeScript will search through all external modules' exports and add them to the completions list. * This affects lone identifier completions but not completions on the right hand side of `obj.`. */ readonly includeCompletionsForModuleExports?: boolean; /** * If enabled, the completion list will include completions with invalid identifier names. * For those entries, The `insertText` and `replacementSpan` properties will be set to change from `.x` property access to `["x"]`. */ readonly includeCompletionsWithInsertText?: boolean; /** * Unless this option is `false`, or `includeCompletionsWithInsertText` is not enabled, * member completion lists triggered with `.` will include entries on potentially-null and potentially-undefined * values, with insertion text to replace preceding `.` tokens with `?.`. */ readonly includeAutomaticOptionalChainCompletions?: boolean; readonly importModuleSpecifierPreference?: "auto" | "relative" | "non-relative"; /** Determines whether we import `foo/index.ts` as "foo", "foo/index", or "foo/index.js" */ readonly importModuleSpecifierEnding?: "auto" | "minimal" | "index" | "js"; readonly allowTextChangesInNewFiles?: boolean; readonly lazyConfiguredProjectsFromExternalProject?: boolean; readonly providePrefixAndSuffixTextForRename?: boolean; readonly provideRefactorNotApplicableReason?: boolean; readonly allowRenameOfImportPath?: boolean; readonly includePackageJsonAutoImports?: "auto" | "on" | "off"; } interface CompilerOptions { allowJs?: boolean; allowSyntheticDefaultImports?: boolean; allowUnreachableCode?: boolean; allowUnusedLabels?: boolean; alwaysStrict?: boolean; baseUrl?: string; charset?: string; checkJs?: boolean; declaration?: boolean; declarationDir?: string; disableSizeLimit?: boolean; downlevelIteration?: boolean; emitBOM?: boolean; emitDecoratorMetadata?: boolean; experimentalDecorators?: boolean; forceConsistentCasingInFileNames?: boolean; importHelpers?: boolean; inlineSourceMap?: boolean; inlineSources?: boolean; isolatedModules?: boolean; jsx?: JsxEmit | ts.JsxEmit; lib?: string[]; locale?: string; mapRoot?: string; maxNodeModuleJsDepth?: number; module?: ModuleKind | ts.ModuleKind; moduleResolution?: ModuleResolutionKind | ts.ModuleResolutionKind; newLine?: NewLineKind | ts.NewLineKind; noEmit?: boolean; noEmitHelpers?: boolean; noEmitOnError?: boolean; noErrorTruncation?: boolean; noFallthroughCasesInSwitch?: boolean; noImplicitAny?: boolean; noImplicitReturns?: boolean; noImplicitThis?: boolean; noUnusedLocals?: boolean; noUnusedParameters?: boolean; noImplicitUseStrict?: boolean; noLib?: boolean; noResolve?: boolean; out?: string; outDir?: string; outFile?: string; paths?: MapLike<string[]>; plugins?: PluginImport[]; preserveConstEnums?: boolean; preserveSymlinks?: boolean; project?: string; reactNamespace?: string; removeComments?: boolean; references?: ProjectReference[]; rootDir?: string; rootDirs?: string[]; skipLibCheck?: boolean; skipDefaultLibCheck?: boolean; sourceMap?: boolean; sourceRoot?: string; strict?: boolean; strictNullChecks?: boolean; suppressExcessPropertyErrors?: boolean; suppressImplicitAnyIndexErrors?: boolean; useDefineForClassFields?: boolean; target?: ScriptTarget | ts.ScriptTarget; traceResolution?: boolean; resolveJsonModule?: boolean; types?: string[]; /** Paths used to used to compute primary types search locations */ typeRoots?: string[]; [option: string]: CompilerOptionsValue | undefined; } enum JsxEmit { None = "None", Preserve = "Preserve", ReactNative = "ReactNative", React = "React" } enum ModuleKind { None = "None", CommonJS = "CommonJS", AMD = "AMD", UMD = "UMD", System = "System", ES6 = "ES6", ES2015 = "ES2015", ESNext = "ESNext" } enum ModuleResolutionKind { Classic = "Classic", Node = "Node" } enum NewLineKind { Crlf = "Crlf", Lf = "Lf" } enum ScriptTarget { ES3 = "ES3", ES5 = "ES5", ES6 = "ES6", ES2015 = "ES2015", ES2016 = "ES2016", ES2017 = "ES2017", ES2018 = "ES2018", ES2019 = "ES2019", ES2020 = "ES2020", ESNext = "ESNext" } } declare namespace ts.server { interface ScriptInfoVersion { svc: number; text: number; } function isDynamicFileName(fileName: NormalizedPath): boolean; class ScriptInfo { private readonly host; readonly fileName: NormalizedPath; readonly scriptKind: ScriptKind; readonly hasMixedContent: boolean; readonly path: Path; /** * All projects that include this file */ readonly containingProjects: Project[]; private formatSettings; private preferences; private textStorage; constructor(host: ServerHost, fileName: NormalizedPath, scriptKind: ScriptKind, hasMixedContent: boolean, path: Path, initialVersion?: ScriptInfoVersion); isScriptOpen(): boolean; open(newText: string): void; close(fileExists?: boolean): void; getSnapshot(): IScriptSnapshot; private ensureRealPath; getFormatCodeSettings(): FormatCodeSettings | undefined; getPreferences(): protocol.UserPreferences | undefined; attachToProject(project: Project): boolean; isAttached(project: Project): boolean; detachFromProject(project: Project): void; detachAllProjects(): void; getDefaultProject(): Project; registerFileUpdate(): void; setOptions(formatSettings: FormatCodeSettings, preferences: protocol.UserPreferences | undefined): void; getLatestVersion(): string; saveTo(fileName: string): void; reloadFromFile(tempFileName?: NormalizedPath): boolean; editContent(start: number, end: number, newText: string): void; markContainingProjectsAsDirty(): void; isOrphan(): boolean; /** * @param line 1 based index */ lineToTextSpan(line: number): TextSpan; /** * @param line 1 based index * @param offset 1 based index */ lineOffsetToPosition(line: number, offset: number): number; positionToLineOffset(position: number): protocol.Location; isJavaScript(): boolean; } } declare namespace ts.server { interface InstallPackageOptionsWithProject extends InstallPackageOptions { projectName: string; projectRootPath: Path; } interface ITypingsInstaller { isKnownTypesPackageName(name: string): boolean; installPackage(options: InstallPackageOptionsWithProject): Promise<ApplyCodeActionCommandResult>; enqueueInstallTypingsRequest(p: Project, typeAcquisition: TypeAcquisition, unresolvedImports: SortedReadonlyArray<string> | undefined): void; attach(projectService: ProjectService): void; onProjectClosed(p: Project): void; readonly globalTypingsCacheLocation: string | undefined; } const nullTypingsInstaller: ITypingsInstaller; } declare namespace ts.server { enum ProjectKind { Inferred = 0, Configured = 1, External = 2, AutoImportProvider = 3 } function allRootFilesAreJsOrDts(project: Project): boolean; function allFilesAreJsOrDts(project: Project): boolean; interface PluginCreateInfo { project: Project; languageService: LanguageService; languageServiceHost: LanguageServiceHost; serverHost: ServerHost; config: any; } interface PluginModule { create(createInfo: PluginCreateInfo): LanguageService; getExternalFiles?(proj: Project): string[]; onConfigurationChanged?(config: any): void; } interface PluginModuleWithName { name: string; module: PluginModule; } type PluginModuleFactory = (mod: { typescript: typeof ts; }) => PluginModule; abstract class Project implements LanguageServiceHost, ModuleResolutionHost { readonly projectName: string; readonly projectKind: ProjectKind; readonly projectService: ProjectService; private documentRegistry; private compilerOptions; compileOnSaveEnabled: boolean; protected watchOptions: WatchOptions | undefined; private rootFiles; private rootFilesMap; private program; private externalFiles; private missingFilesMap; private generatedFilesMap; private plugins; private lastFileExceededProgramSize; protected languageService: LanguageService; languageServiceEnabled: boolean; readonly trace?: (s: string) => void; readonly realpath?: (path: string) => string; private builderState; /** * Set of files names that were updated since the last call to getChangesSinceVersion. */ private updatedFileNames; /** * Set of files that was returned from the last call to getChangesSinceVersion. */ private lastReportedFileNames; /** * Last version that was reported. */ private lastReportedVersion; /** * Current project's program version. (incremented everytime new program is created that is not complete reuse from the old one) * This property is changed in 'updateGraph' based on the set of files in program */ private projectProgramVersion; /** * Current version of the project state. It is changed when: * - new root file was added/removed * - edit happen in some file that is currently included in the project. * This property is different from projectStructureVersion since in most cases edits don't affect set of files in the project */ private projectStateVersion; protected isInitialLoadPending: () => boolean; private readonly cancellationToken; isNonTsProject(): boolean; isJsOnlyProject(): boolean; static resolveModule(moduleName: string, initialDir: string, host: ServerHost, log: (message: string) => void, logErrors?: (message: string) => void): {} | undefined; isKnownTypesPackageName(name: string): boolean; installPackage(options: InstallPackageOptions): Promise<ApplyCodeActionCommandResult>; private get typingsCache(); getCompilationSettings(): CompilerOptions; getCompilerOptions(): CompilerOptions; getNewLine(): string; getProjectVersion(): string; getProjectReferences(): readonly ProjectReference[] | undefined; getScriptFileNames(): string[]; private getOrCreateScriptInfoAndAttachToProject; getScriptKind(fileName: string): ScriptKind; getScriptVersion(filename: string): string; getScriptSnapshot(filename: string): IScriptSnapshot | undefined; getCancellationToken(): HostCancellationToken; getCurrentDirectory(): string; getDefaultLibFileName(): string; useCaseSensitiveFileNames(): boolean; readDirectory(path: string, extensions?: readonly string[], exclude?: readonly string[], include?: readonly string[], depth?: number): string[]; readFile(fileName: string): string | undefined; writeFile(fileName: string, content: string): void; fileExists(file: string): boolean; resolveModuleNames(moduleNames: string[], containingFile: string, reusedNames?: string[], redirectedReference?: ResolvedProjectReference): (ResolvedModuleFull | undefined)[]; getResolvedModuleWithFailedLookupLocationsFromCache(moduleName: string, containingFile: string): ResolvedModuleWithFailedLookupLocations | undefined; resolveTypeReferenceDirectives(typeDirectiveNames: string[], containingFile: string, redirectedReference?: ResolvedProjectReference): (ResolvedTypeReferenceDirective | undefined)[]; directoryExists(path: string): boolean; getDirectories(path: string): string[]; log(s: string): void; error(s: string): void; private setInternalCompilerOptionsForEmittingJsFiles; /** * Get the errors that dont have any file name associated */ getGlobalProjectErrors(): readonly Diagnostic[]; getAllProjectErrors(): readonly Diagnostic[]; getLanguageService(ensureSynchronized?: boolean): LanguageService; getCompileOnSaveAffectedFileList(scriptInfo: ScriptInfo): string[]; /** * Returns true if emit was conducted */ emitFile(scriptInfo: ScriptInfo, writeFile: (path: string, data: string, writeByteOrderMark?: boolean) => void): EmitResult; enableLanguageService(): void; disableLanguageService(lastFileExceededProgramSize?: string): void; getProjectName(): string; protected removeLocalTypingsFromTypeAcquisition(newTypeAcquisition: TypeAcquisition): TypeAcquisition; getExternalFiles(): SortedReadonlyArray<string>; getSourceFile(path: Path): SourceFile | undefined; close(): void; private detachScriptInfoIfNotRoot; isClosed(): boolean; hasRoots(): boolean; getRootFiles(): NormalizedPath[]; getRootScriptInfos(): ScriptInfo[]; getScriptInfos(): ScriptInfo[]; getExcludedFiles(): readonly NormalizedPath[]; getFileNames(excludeFilesFromExternalLibraries?: boolean, excludeConfigFiles?: boolean): NormalizedPath[]; hasConfigFile(configFilePath: NormalizedPath): boolean; containsScriptInfo(info: ScriptInfo): boolean; containsFile(filename: NormalizedPath, requireOpen?: boolean): boolean; isRoot(info: ScriptInfo): boolean; addRoot(info: ScriptInfo, fileName?: NormalizedPath): void; addMissingFileRoot(fileName: NormalizedPath): void; removeFile(info: ScriptInfo, fileExists: boolean, detachFromProject: boolean): void; registerFileUpdate(fileName: string): void; markAsDirty(): void; /** * Updates set of files that contribute to this project * @returns: true if set of files in the project stays the same and false - otherwise. */ updateGraph(): boolean; protected removeExistingTypings(include: string[]): string[]; private updateGraphWorker; private detachScriptInfoFromProject; private addMissingFileWatcher; private isWatchedMissingFile; private createGeneratedFileWatcher; private isValidGeneratedFileWatcher; private clearGeneratedFileWatch; getScriptInfoForNormalizedPath(fileName: NormalizedPath): ScriptInfo | undefined; getScriptInfo(uncheckedFileName: string): ScriptInfo | undefined; filesToString(writeProjectFileNames: boolean): string; setCompilerOptions(compilerOptions: CompilerOptions): void; setTypeAcquisition(newTypeAcquisition: TypeAcquisition | undefined): void; getTypeAcquisition(): TypeAcquisition; protected removeRoot(info: ScriptInfo): void; protected enableGlobalPlugins(options: CompilerOptions, pluginConfigOverrides: Map<any> | undefined): void; protected enablePlugin(pluginConfigEntry: PluginImport, searchPaths: string[], pluginConfigOverrides: Map<any> | undefined): void; private enableProxy; /** Starts a new check for diagnostics. Call this if some file has updated that would cause diagnostics to be changed. */ refreshDiagnostics(): void; } /** * If a file is opened and no tsconfig (or jsconfig) is found, * the file and its imports/references are put into an InferredProject. */ class InferredProject extends Project { private static readonly newName; private _isJsInferredProject; toggleJsInferredProject(isJsInferredProject: boolean): void; setCompilerOptions(options?: CompilerOptions): void; /** this is canonical project root path */ readonly projectRootPath: string | undefined; addRoot(info: ScriptInfo): void; removeRoot(info: ScriptInfo): void; isProjectWithSingleRoot(): boolean; close(): void; getTypeAcquisition(): TypeAcquisition; } class AutoImportProviderProject extends Project { private hostProject; private static readonly newName; private rootFileNames; isOrphan(): boolean; updateGraph(): boolean; markAsDirty(): void; getScriptFileNames(): string[]; getLanguageService(): never; markAutoImportProviderAsDirty(): never; getModuleResolutionHostForAutoImportProvider(): never; getProjectReferences(): readonly ProjectReference[] | undefined; useSourceOfProjectReferenceRedirect(): boolean; getTypeAcquisition(): TypeAcquisition; } /** * If a file is opened, the server will look for a tsconfig (or jsconfig) * and if successful create a ConfiguredProject for it. * Otherwise it will create an InferredProject. */ class ConfiguredProject extends Project { private directoriesWatchedForWildcards; readonly canonicalConfigFilePath: NormalizedPath; /** Ref count to the project when opened from external project */ private externalProjectRefCount; private projectErrors; private projectReferences; /** * If the project has reload from disk pending, it reloads (and then updates graph as part of that) instead of just updating the graph * @returns: true if set of files in the project stays the same and false - otherwise. */ updateGraph(): boolean; getConfigFilePath(): NormalizedPath; getProjectReferences(): readonly ProjectReference[] | undefined; updateReferences(refs: readonly ProjectReference[] | undefined): void; /** * Get the errors that dont have any file name associated */ getGlobalProjectErrors(): readonly Diagnostic[]; /** * Get all the project errors */ getAllProjectErrors(): readonly Diagnostic[]; setProjectErrors(projectErrors: Diagnostic[]): void; close(): void; getEffectiveTypeRoots(): string[]; } /** * Project whose configuration is handled externally, such as in a '.csproj'. * These are created only if a host explicitly calls `openExternalProject`. */ class ExternalProject extends Project { externalProjectName: string; compileOnSaveEnabled: boolean; excludedFiles: readonly NormalizedPath[]; updateGraph(): boolean; getExcludedFiles(): readonly NormalizedPath[]; } } declare namespace ts.server { export const maxProgramSizeForNonTsFiles: number; export const ProjectsUpdatedInBackgroundEvent = "projectsUpdatedInBackground"; export const ProjectLoadingStartEvent = "projectLoadingStart"; export const ProjectLoadingFinishEvent = "projectLoadingFinish"; export const LargeFileReferencedEvent = "largeFileReferenced"; export const ConfigFileDiagEvent = "configFileDiag"; export const ProjectLanguageServiceStateEvent = "projectLanguageServiceState"; export const ProjectInfoTelemetryEvent = "projectInfo"; export const OpenFileInfoTelemetryEvent = "openFileInfo"; export interface ProjectsUpdatedInBackgroundEvent { eventName: typeof ProjectsUpdatedInBackgroundEvent; data: { openFiles: string[]; }; } export interface ProjectLoadingStartEvent { eventName: typeof ProjectLoadingStartEvent; data: { project: Project; reason: string; }; } export interface ProjectLoadingFinishEvent { eventName: typeof ProjectLoadingFinishEvent; data: { project: Project; }; } export interface LargeFileReferencedEvent { eventName: typeof LargeFileReferencedEvent; data: { file: string; fileSize: number; maxFileSize: number; }; } export interface ConfigFileDiagEvent { eventName: typeof ConfigFileDiagEvent; data: { triggerFile: string; configFileName: string; diagnostics: readonly Diagnostic[]; }; } export interface ProjectLanguageServiceStateEvent { eventName: typeof ProjectLanguageServiceStateEvent; data: { project: Project; languageServiceEnabled: boolean; }; } /** This will be converted to the payload of a protocol.TelemetryEvent in session.defaultEventHandler. */ export interface ProjectInfoTelemetryEvent { readonly eventName: typeof ProjectInfoTelemetryEvent; readonly data: ProjectInfoTelemetryEventData; } export interface ProjectInfoTelemetryEventData { /** Cryptographically secure hash of project file location. */ readonly projectId: string; /** Count of file extensions seen in the project. */ readonly fileStats: FileStats; /** * Any compiler options that might contain paths will be taken out. * Enum compiler options will be converted to strings. */ readonly compilerOptions: CompilerOptions; readonly extends: boolean | undefined; readonly files: boolean | undefined; readonly include: boolean | undefined; readonly exclude: boolean | undefined; readonly compileOnSave: boolean; readonly typeAcquisition: ProjectInfoTypeAcquisitionData; readonly configFileName: "tsconfig.json" | "jsconfig.json" | "other"; readonly projectType: "external" | "configured"; readonly languageServiceEnabled: boolean; /** TypeScript version used by the server. */ readonly version: string; } /** * Info that we may send about a file that was just opened. * Info about a file will only be sent once per session, even if the file changes in ways that might affect the info. * Currently this is only sent for '.js' files. */ export interface OpenFileInfoTelemetryEvent { readonly eventName: typeof OpenFileInfoTelemetryEvent; readonly data: OpenFileInfoTelemetryEventData; } export interface OpenFileInfoTelemetryEventData { readonly info: OpenFileInfo; } export interface ProjectInfoTypeAcquisitionData { readonly enable: boolean | undefined; readonly include: boolean; readonly exclude: boolean; } export interface FileStats { readonly js: number; readonly jsSize?: number; readonly jsx: number; readonly jsxSize?: number; readonly ts: number; readonly tsSize?: number; readonly tsx: number; readonly tsxSize?: number; readonly dts: number; readonly dtsSize?: number; readonly deferred: number; readonly deferredSize?: number; } export interface OpenFileInfo { readonly checkJs: boolean; } export type ProjectServiceEvent = LargeFileReferencedEvent | ProjectsUpdatedInBackgroundEvent | ProjectLoadingStartEvent | ProjectLoadingFinishEvent | ConfigFileDiagEvent | ProjectLanguageServiceStateEvent | ProjectInfoTelemetryEvent | OpenFileInfoTelemetryEvent; export type ProjectServiceEventHandler = (event: ProjectServiceEvent) => void; export interface SafeList { [name: string]: { match: RegExp; exclude?: (string | number)[][]; types?: string[]; }; } export interface TypesMapFile { typesMap: SafeList; simpleMap: { [libName: string]: string; }; } export function convertFormatOptions(protocolOptions: protocol.FormatCodeSettings): FormatCodeSettings; export function convertCompilerOptions(protocolOptions: protocol.ExternalProjectCompilerOptions): CompilerOptions & protocol.CompileOnSaveMixin; export function convertWatchOptions(protocolOptions: protocol.ExternalProjectCompilerOptions): WatchOptions | undefined; export function convertTypeAcquisition(protocolOptions: protocol.InferredProjectCompilerOptions): TypeAcquisition | undefined; export function tryConvertScriptKindName(scriptKindName: protocol.ScriptKindName | ScriptKind): ScriptKind; export function convertScriptKindName(scriptKindName: protocol.ScriptKindName): ScriptKind.Unknown | ScriptKind.JS | ScriptKind.JSX | ScriptKind.TS | ScriptKind.TSX; export interface HostConfiguration { formatCodeOptions: FormatCodeSettings; preferences: protocol.UserPreferences; hostInfo: string; extraFileExtensions?: FileExtensionInfo[]; watchOptions?: WatchOptions; } export interface OpenConfiguredProjectResult { configFileName?: NormalizedPath; configFileErrors?: readonly Diagnostic[]; } export interface ProjectServiceOptions { host: ServerHost; logger: Logger; cancellationToken: HostCancellationToken; useSingleInferredProject: boolean; useInferredProjectPerProjectRoot: boolean; typingsInstaller: ITypingsInstaller; eventHandler?: ProjectServiceEventHandler; suppressDiagnosticEvents?: boolean; throttleWaitMilliseconds?: number; globalPlugins?: readonly string[]; pluginProbeLocations?: readonly string[]; allowLocalPluginLoads?: boolean; typesMapLocation?: string; /** @deprecated use serverMode instead */ syntaxOnly?: boolean; serverMode?: LanguageServiceMode; } export class ProjectService { private readonly scriptInfoInNodeModulesWatchers; /** * Contains all the deleted script info's version information so that * it does not reset when creating script info again * (and could have potentially collided with version where contents mismatch) */ private readonly filenameToScriptInfoVersion; private readonly allJsFilesForOpenFileTelemetry; /** * maps external project file name to list of config files that were the part of this project */ private readonly externalProjectToConfiguredProjectMap; /** * external projects (configuration and list of root files is not controlled by tsserver) */ readonly externalProjects: ExternalProject[]; /** * projects built from openFileRoots */ readonly inferredProjects: InferredProject[]; /** * projects specified by a tsconfig.json file */ readonly configuredProjects: Map<ConfiguredProject>; /** * Open files: with value being project root path, and key being Path of the file that is open */ readonly openFiles: Map<NormalizedPath | undefined>; /** * Map of open files that are opened without complete path but have projectRoot as current directory */ private readonly openFilesWithNonRootedDiskPath; private compilerOptionsForInferredProjects; private compilerOptionsForInferredProjectsPerProjectRoot; private watchOptionsForInferredProjects; private watchOptionsForInferredProjectsPerProjectRoot; private typeAcquisitionForInferredProjects; private typeAcquisitionForInferredProjectsPerProjectRoot; /** * Project size for configured or external projects */ private readonly projectToSizeMap; /** * This is a map of config file paths existence that doesnt need query to disk * - The entry can be present because there is inferred project that needs to watch addition of config file to directory * In this case the exists could be true/false based on config file is present or not * - Or it is present if we have configured project open with config file at that location * In this case the exists property is always true */ private readonly configFileExistenceInfoCache; private readonly hostConfiguration; private safelist; private readonly legacySafelist; private pendingProjectUpdates; readonly currentDirectory: NormalizedPath; readonly toCanonicalFileName: (f: string) => string; readonly host: ServerHost; readonly logger: Logger; readonly cancellationToken: HostCancellationToken; readonly useSingleInferredProject: boolean; readonly useInferredProjectPerProjectRoot: boolean; readonly typingsInstaller: ITypingsInstaller; private readonly globalCacheLocationDirectoryPath; readonly throttleWaitMilliseconds?: number; private readonly eventHandler?; private readonly suppressDiagnosticEvents?; readonly globalPlugins: readonly string[]; readonly pluginProbeLocations: readonly string[]; readonly allowLocalPluginLoads: boolean; private currentPluginConfigOverrides; readonly typesMapLocation: string | undefined; /** @deprecated use serverMode instead */ readonly syntaxOnly: boolean; readonly serverMode: LanguageServiceMode; /** Tracks projects that we have already sent telemetry for. */ private readonly seenProjects; private performanceEventHandler?; constructor(opts: ProjectServiceOptions); toPath(fileName: string): Path; private loadTypesMap; updateTypingsForProject(response: SetTypings | InvalidateCachedTypings | PackageInstalledResponse): void; private delayUpdateProjectGraph; private delayUpdateProjectGraphs; setCompilerOptionsForInferredProjects(projectCompilerOptions: protocol.InferredProjectCompilerOptions, projectRootPath?: string): void; findProject(projectName: string): Project | undefined; getDefaultProjectForFile(fileName: NormalizedPath, ensureProject: boolean): Project | undefined; private doEnsureDefaultProjectForFile; getScriptInfoEnsuringProjectsUptoDate(uncheckedFileName: string): ScriptInfo | undefined; /** * Ensures the project structures are upto date * This means, * - we go through all the projects and update them if they are dirty * - if updates reflect some change in structure or there was pending request to ensure projects for open files * ensure that each open script info has project */ private ensureProjectStructuresUptoDate; getFormatCodeOptions(file: NormalizedPath): FormatCodeSettings; getPreferences(file: NormalizedPath): protocol.UserPreferences; getHostFormatCodeOptions(): FormatCodeSettings; getHostPreferences(): protocol.UserPreferences; private onSourceFileChanged; private handleSourceMapProjects; private delayUpdateSourceInfoProjects; private delayUpdateProjectsOfScriptInfoPath; private handleDeletedFile; /** * This is the callback function for the config file add/remove/change at any location * that matters to open script info but doesnt have configured project open * for the config file */ private onConfigFileChangeForOpenScriptInfo; private removeProject; private assignOrphanScriptInfosToInferredProject; /** * Remove this file from the set of open, non-configured files. * @param info The file that has been closed or newly configured */ private closeOpenFile; private deleteScriptInfo; private configFileExists; private setConfigFileExistenceByNewConfiguredProject; /** * Returns true if the configFileExistenceInfo is needed/impacted by open files that are root of inferred project */ private configFileExistenceImpactsRootOfInferredProject; private setConfigFileExistenceInfoByClosedConfiguredProject; private logConfigFileWatchUpdate; /** * Create the watcher for the configFileExistenceInfo */ private createConfigFileWatcherOfConfigFileExistence; /** * Close the config file watcher in the cached ConfigFileExistenceInfo * if there arent any open files that are root of inferred project */ private closeConfigFileWatcherOfConfigFileExistenceInfo; /** * This is called on file close, so that we stop watching the config file for this script info */ private stopWatchingConfigFilesForClosedScriptInfo; /** * This function tries to search for a tsconfig.json for the given file. * This is different from the method the compiler uses because * the compiler can assume it will always start searching in the * current directory (the directory in which tsc was invoked). * The server must start searching from the directory containing * the newly opened file. */ private forEachConfigFileLocation; /** * This function tries to search for a tsconfig.json for the given file. * This is different from the method the compiler uses because * the compiler can assume it will always start searching in the * current directory (the directory in which tsc was invoked). * The server must start searching from the directory containing * the newly opened file. * If script info is passed in, it is asserted to be open script info * otherwise just file name */ private getConfigFileNameForFile; private printProjects; private getConfiguredProjectByCanonicalConfigFilePath; private findExternalProjectByProjectName; /** Get a filename if the language service exceeds the maximum allowed program size; otherwise returns undefined. */ private getFilenameForExceededTotalSizeLimitForNonTsFiles; private createExternalProject; private addFilesToNonInferredProject; private updateNonInferredProjectFiles; private updateRootAndOptionsOfNonInferredProject; private sendConfigFileDiagEvent; private getOrCreateInferredProjectForProjectRootPathIfEnabled; private getOrCreateSingleInferredProjectIfEnabled; private getOrCreateSingleInferredWithoutProjectRoot; private createInferredProject; getScriptInfo(uncheckedFileName: string): ScriptInfo | undefined; private watchClosedScriptInfo; private watchClosedScriptInfoInNodeModules; private getModifiedTime; private refreshScriptInfo; private refreshScriptInfosInDirectory; private stopWatchingScriptInfo; private getOrCreateScriptInfoNotOpenedByClientForNormalizedPath; private getOrCreateScriptInfoOpenedByClientForNormalizedPath; getOrCreateScriptInfoForNormalizedPath(fileName: NormalizedPath, openedByClient: boolean, fileContent?: string, scriptKind?: ScriptKind, hasMixedContent?: boolean, hostToQueryFileExistsOn?: { fileExists(path: string): boolean; }): ScriptInfo | undefined; private getOrCreateScriptInfoWorker; /** * This gets the script info for the normalized path. If the path is not rooted disk path then the open script info with project root context is preferred */ getScriptInfoForNormalizedPath(fileName: NormalizedPath): ScriptInfo | undefined; getScriptInfoForPath(fileName: Path): ScriptInfo | undefined; private addSourceInfoToSourceMap; private addMissingSourceMapFile; setHostConfiguration(args: protocol.ConfigureRequestArguments): void; closeLog(): void; /** * This function rebuilds the project for every file opened by the client * This does not reload contents of open files from disk. But we could do that if needed */ reloadProjects(): void; private delayReloadConfiguredProjectForFiles; /** * This function goes through all the openFiles and tries to file the config file for them. * If the config file is found and it refers to existing project, it reloads it either immediately * or schedules it for reload depending on delayReload option * If the there is no existing project it just opens the configured project for the config file * reloadForInfo provides a way to filter out files to reload configured project for */ private reloadConfiguredProjectForFiles; /** * Remove the root of inferred project if script info is part of another project */ private removeRootOfInferredProjectIfNowPartOfOtherProject; /** * This function is to update the project structure for every inferred project. * It is called on the premise that all the configured projects are * up to date. * This will go through open files and assign them to inferred project if open file is not part of any other project * After that all the inferred project graphs are updated */ private ensureProjectForOpenFiles; /** * Open file whose contents is managed by the client * @param filename is absolute pathname * @param fileContent is a known version of the file content that is more up to date than the one on disk */ openClientFile(fileName: string, fileContent?: string, scriptKind?: ScriptKind, projectRootPath?: string): OpenConfiguredProjectResult; private findExternalProjectContainingOpenScriptInfo; private getOrCreateOpenScriptInfo; private assignProjectToOpenedScriptInfo; private createAncestorProjects; private ensureProjectChildren; private cleanupAfterOpeningFile; openClientFileWithNormalizedPath(fileName: NormalizedPath, fileContent?: string, scriptKind?: ScriptKind, hasMixedContent?: boolean, projectRootPath?: NormalizedPath): OpenConfiguredProjectResult; private removeOrphanConfiguredProjects; private removeOrphanScriptInfos; private telemetryOnOpenFile; /** * Close file whose contents is managed by the client * @param filename is absolute pathname */ closeClientFile(uncheckedFileName: string): void; private collectChanges; private closeConfiguredProjectReferencedFromExternalProject; closeExternalProject(uncheckedFileName: string): void; openExternalProjects(projects: protocol.ExternalProject[]): void; /** Makes a filename safe to insert in a RegExp */ private static readonly filenameEscapeRegexp; private static escapeFilenameForRegex; resetSafeList(): void; applySafeList(proj: protocol.ExternalProject): NormalizedPath[]; openExternalProject(proj: protocol.ExternalProject): void; hasDeferredExtension(): boolean; configurePlugin(args: protocol.ConfigurePluginRequestArguments): void; } export {}; } declare namespace ts.server { interface ServerCancellationToken extends HostCancellationToken { setRequest(requestId: number): void; resetRequest(requestId: number): void; } const nullCancellationToken: ServerCancellationToken; interface PendingErrorCheck { fileName: NormalizedPath; project: Project; } type CommandNames = protocol.CommandTypes; const CommandNames: any; function formatMessage<T extends protocol.Message>(msg: T, logger: Logger, byteLength: (s: string, encoding: string) => number, newLine: string): string; type Event = <T extends object>(body: T, eventName: string) => void; interface EventSender { event: Event; } interface SessionOptions { host: ServerHost; cancellationToken: ServerCancellationToken; useSingleInferredProject: boolean; useInferredProjectPerProjectRoot: boolean; typingsInstaller: ITypingsInstaller; byteLength: (buf: string, encoding?: string) => number; hrtime: (start?: number[]) => number[]; logger: Logger; /** * If falsy, all events are suppressed. */ canUseEvents: boolean; eventHandler?: ProjectServiceEventHandler; /** Has no effect if eventHandler is also specified. */ suppressDiagnosticEvents?: boolean; /** @deprecated use serverMode instead */ syntaxOnly?: boolean; serverMode?: LanguageServiceMode; throttleWaitMilliseconds?: number; noGetErrOnBackgroundUpdate?: boolean; globalPlugins?: readonly string[]; pluginProbeLocations?: readonly string[]; allowLocalPluginLoads?: boolean; typesMapLocation?: string; } class Session implements EventSender { private readonly gcTimer; protected projectService: ProjectService; private changeSeq; private performanceData; private currentRequestId; private errorCheck; protected host: ServerHost; private readonly cancellationToken; protected readonly typingsInstaller: ITypingsInstaller; protected byteLength: (buf: string, encoding?: string) => number; private hrtime; protected logger: Logger; protected canUseEvents: boolean; private suppressDiagnosticEvents?; private eventHandler; private readonly noGetErrOnBackgroundUpdate?; constructor(opts: SessionOptions); private sendRequestCompletedEvent; private addPerformanceData; private performanceEventHandler; private defaultEventHandler; private projectsUpdatedInBackgroundEvent; logError(err: Error, cmd: string): void; private logErrorWorker; send(msg: protocol.Message): void; event<T extends object>(body: T, eventName: string): void; /** @deprecated */ output(info: any, cmdName: string, reqSeq?: number, errorMsg?: string): void; private doOutput; private semanticCheck; private syntacticCheck; private suggestionCheck; private sendDiagnosticsEvent; /** It is the caller's responsibility to verify that `!this.suppressDiagnosticEvents`. */ private updateErrorCheck; private cleanProjects; private cleanup; private getEncodedSyntacticClassifications; private getEncodedSemanticClassifications; private getProject; private getConfigFileAndProject; private getConfigFileDiagnostics; private convertToDiagnosticsWithLinePositionFromDiagnosticFile; private getCompilerOptionsDiagnostics; private convertToDiagnosticsWithLinePosition; private getDiagnosticsWorker; private getDefinition; private mapDefinitionInfoLocations; private getDefinitionAndBoundSpan; private getEmitOutput; private mapDefinitionInfo; private static mapToOriginalLocation; private toFileSpan; private toFileSpanWithContext; private getTypeDefinition; private mapImplementationLocations; private getImplementation; private getOccurrences; private getSyntacticDiagnosticsSync; private getSemanticDiagnosticsSync; private getSuggestionDiagnosticsSync; private getJsxClosingTag; private getDocumentHighlights; private setCompilerOptionsForInferredProjects; private getProjectInfo; private getProjectInfoWorker; private getRenameInfo; private getProjects; private getDefaultProject; private getRenameLocations; private mapRenameInfo; private toSpanGroups; private getReferences; /** * @param fileName is the name of the file to be opened * @param fileContent is a version of the file content that is known to be more up to date than the one on disk */ private openClientFile; private getPosition; private getPositionInFile; private getFileAndProject; private getFileAndLanguageServiceForSyntacticOperation; private getFileAndProjectWorker; private getOutliningSpans; private getTodoComments; private getDocCommentTemplate; private getSpanOfEnclosingComment; private getIndentation; private getBreakpointStatement; private getNameOrDottedNameSpan; private isValidBraceCompletion; private getQuickInfoWorker; private getFormattingEditsForRange; private getFormattingEditsForRangeFull; private getFormattingEditsForDocumentFull; private getFormattingEditsAfterKeystrokeFull; private getFormattingEditsAfterKeystroke; private getCompletions; private getCompletionEntryDetails; private getCompileOnSaveAffectedFileList; private emitFile; private getSignatureHelpItems; private toPendingErrorCheck; private getDiagnostics; private change; private reload; private saveToTmp; private closeClientFile; private mapLocationNavigationBarItems; private getNavigationBarItems; private toLocationNavigationTree; private getNavigationTree; private getNavigateToItems; private getFullNavigateToItems; private getSupportedCodeFixes; private isLocation; private extractPositionOrRange; private getRange; private getApplicableRefactors; private getEditsForRefactor; private organizeImports; private getEditsForFileRename; private getCodeFixes; private getCombinedCodeFix; private applyCodeActionCommand; private getStartAndEndPosition; private mapCodeAction; private mapCodeFixAction; private mapTextChangesToCodeEdits; private mapTextChangeToCodeEdit; private convertTextChangeToCodeEdit; private getBraceMatching; private getDiagnosticsForProject; private configurePlugin; private getSmartSelectionRange; private toggleLineComment; private toggleMultilineComment; private commentSelection; private uncommentSelection; private mapSelectionRange; private getScriptInfoFromProjectService; private toProtocolCallHierarchyItem; private toProtocolCallHierarchyIncomingCall; private toProtocolCallHierarchyOutgoingCall; private prepareCallHierarchy; private provideCallHierarchyIncomingCalls; private provideCallHierarchyOutgoingCalls; getCanonicalFileName(fileName: string): string; exit(): void; private notRequired; private requiredResponse; private handlers; addProtocolHandler(command: string, handler: (request: protocol.Request) => HandlerResponse): void; private setCurrentRequest; private resetCurrentRequest; executeWithRequestId<T>(requestId: number, f: () => T): T; executeCommand(request: protocol.Request): HandlerResponse; onMessage(message: string): void; private getFormatOptions; private getPreferences; private getHostFormatOptions; private getHostPreferences; } interface HandlerResponse { response?: {}; responseRequired?: boolean; } } declare namespace ts { /** @deprecated Use `factory.createNodeArray` or the factory supplied by your transformation context instead. */ const createNodeArray: <T extends Node>(elements?: readonly T[] | undefined, hasTrailingComma?: boolean | undefined) => NodeArray<T>; /** @deprecated Use `factory.createNumericLiteral` or the factory supplied by your transformation context instead. */ const createNumericLiteral: (value: string | number, numericLiteralFlags?: TokenFlags | undefined) => NumericLiteral; /** @deprecated Use `factory.createBigIntLiteral` or the factory supplied by your transformation context instead. */ const createBigIntLiteral: (value: string | PseudoBigInt) => BigIntLiteral; /** @deprecated Use `factory.createStringLiteral` or the factory supplied by your transformation context instead. */ const createStringLiteral: { (text: string, isSingleQuote?: boolean | undefined): StringLiteral; (text: string, isSingleQuote?: boolean | undefined, hasExtendedUnicodeEscape?: boolean | undefined): StringLiteral; }; /** @deprecated Use `factory.createStringLiteralFromNode` or the factory supplied by your transformation context instead. */ const createStringLiteralFromNode: (sourceNode: Identifier | StringLiteral | NoSubstitutionTemplateLiteral | NumericLiteral, isSingleQuote?: boolean | undefined) => StringLiteral; /** @deprecated Use `factory.createRegularExpressionLiteral` or the factory supplied by your transformation context instead. */ const createRegularExpressionLiteral: (text: string) => RegularExpressionLiteral; /** @deprecated Use `factory.createLoopVariable` or the factory supplied by your transformation context instead. */ const createLoopVariable: () => Identifier; /** @deprecated Use `factory.createUniqueName` or the factory supplied by your transformation context instead. */ const createUniqueName: (text: string, flags?: GeneratedIdentifierFlags | undefined) => Identifier; /** @deprecated Use `factory.createPrivateIdentifier` or the factory supplied by your transformation context instead. */ const createPrivateIdentifier: (text: string) => PrivateIdentifier; /** @deprecated Use `factory.createSuper` or the factory supplied by your transformation context instead. */ const createSuper: () => SuperExpression; /** @deprecated Use `factory.createThis` or the factory supplied by your transformation context instead. */ const createThis: () => ThisExpression; /** @deprecated Use `factory.createNull` or the factory supplied by your transformation context instead. */ const createNull: () => NullLiteral; /** @deprecated Use `factory.createTrue` or the factory supplied by your transformation context instead. */ const createTrue: () => TrueLiteral; /** @deprecated Use `factory.createFalse` or the factory supplied by your transformation context instead. */ const createFalse: () => FalseLiteral; /** @deprecated Use `factory.createModifier` or the factory supplied by your transformation context instead. */ const createModifier: <T extends ModifierSyntaxKind>(kind: T) => ModifierToken<T>; /** @deprecated Use `factory.createModifiersFromModifierFlags` or the factory supplied by your transformation context instead. */ const createModifiersFromModifierFlags: (flags: ModifierFlags) => Modifier[]; /** @deprecated Use `factory.createQualifiedName` or the factory supplied by your transformation context instead. */ const createQualifiedName: (left: EntityName, right: string | Identifier) => QualifiedName; /** @deprecated Use `factory.updateQualifiedName` or the factory supplied by your transformation context instead. */ const updateQualifiedName: (node: QualifiedName, left: EntityName, right: Identifier) => QualifiedName; /** @deprecated Use `factory.createComputedPropertyName` or the factory supplied by your transformation context instead. */ const createComputedPropertyName: (expression: Expression) => ComputedPropertyName; /** @deprecated Use `factory.updateComputedPropertyName` or the factory supplied by your transformation context instead. */ const updateComputedPropertyName: (node: ComputedPropertyName, expression: Expression) => ComputedPropertyName; /** @deprecated Use `factory.createTypeParameterDeclaration` or the factory supplied by your transformation context instead. */ const createTypeParameterDeclaration: (name: string | Identifier, constraint?: TypeNode | undefined, defaultType?: TypeNode | undefined) => TypeParameterDeclaration; /** @deprecated Use `factory.updateTypeParameterDeclaration` or the factory supplied by your transformation context instead. */ const updateTypeParameterDeclaration: (node: TypeParameterDeclaration, name: Identifier, constraint: TypeNode | undefined, defaultType: TypeNode | undefined) => TypeParameterDeclaration; /** @deprecated Use `factory.createParameterDeclaration` or the factory supplied by your transformation context instead. */ const createParameter: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, dotDotDotToken: DotDotDotToken | undefined, name: string | Identifier | ObjectBindingPattern | ArrayBindingPattern, questionToken?: QuestionToken | undefined, type?: TypeNode | undefined, initializer?: Expression | undefined) => ParameterDeclaration; /** @deprecated Use `factory.updateParameterDeclaration` or the factory supplied by your transformation context instead. */ const updateParameter: (node: ParameterDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, dotDotDotToken: DotDotDotToken | undefined, name: string | Identifier | ObjectBindingPattern | ArrayBindingPattern, questionToken: QuestionToken | undefined, type: TypeNode | undefined, initializer: Expression | undefined) => ParameterDeclaration; /** @deprecated Use `factory.createDecorator` or the factory supplied by your transformation context instead. */ const createDecorator: (expression: Expression) => Decorator; /** @deprecated Use `factory.updateDecorator` or the factory supplied by your transformation context instead. */ const updateDecorator: (node: Decorator, expression: Expression) => Decorator; /** @deprecated Use `factory.createPropertyDeclaration` or the factory supplied by your transformation context instead. */ const createProperty: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | Identifier | StringLiteral | NumericLiteral | ComputedPropertyName | PrivateIdentifier, questionOrExclamationToken: QuestionToken | ExclamationToken | undefined, type: TypeNode | undefined, initializer: Expression | undefined) => PropertyDeclaration; /** @deprecated Use `factory.updatePropertyDeclaration` or the factory supplied by your transformation context instead. */ const updateProperty: (node: PropertyDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | Identifier | StringLiteral | NumericLiteral | ComputedPropertyName | PrivateIdentifier, questionOrExclamationToken: QuestionToken | ExclamationToken | undefined, type: TypeNode | undefined, initializer: Expression | undefined) => PropertyDeclaration; /** @deprecated Use `factory.createMethodDeclaration` or the factory supplied by your transformation context instead. */ const createMethod: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, asteriskToken: AsteriskToken | undefined, name: string | Identifier | StringLiteral | NumericLiteral | ComputedPropertyName | PrivateIdentifier, questionToken: QuestionToken | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: Block | undefined) => MethodDeclaration; /** @deprecated Use `factory.updateMethodDeclaration` or the factory supplied by your transformation context instead. */ const updateMethod: (node: MethodDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, asteriskToken: AsteriskToken | undefined, name: PropertyName, questionToken: QuestionToken | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: Block | undefined) => MethodDeclaration; /** @deprecated Use `factory.createConstructorDeclaration` or the factory supplied by your transformation context instead. */ const createConstructor: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, parameters: readonly ParameterDeclaration[], body: Block | undefined) => ConstructorDeclaration; /** @deprecated Use `factory.updateConstructorDeclaration` or the factory supplied by your transformation context instead. */ const updateConstructor: (node: ConstructorDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, parameters: readonly ParameterDeclaration[], body: Block | undefined) => ConstructorDeclaration; /** @deprecated Use `factory.createGetAccessorDeclaration` or the factory supplied by your transformation context instead. */ const createGetAccessor: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | Identifier | StringLiteral | NumericLiteral | ComputedPropertyName | PrivateIdentifier, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: Block | undefined) => GetAccessorDeclaration; /** @deprecated Use `factory.updateGetAccessorDeclaration` or the factory supplied by your transformation context instead. */ const updateGetAccessor: (node: GetAccessorDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: PropertyName, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: Block | undefined) => GetAccessorDeclaration; /** @deprecated Use `factory.createSetAccessorDeclaration` or the factory supplied by your transformation context instead. */ const createSetAccessor: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | Identifier | StringLiteral | NumericLiteral | ComputedPropertyName | PrivateIdentifier, parameters: readonly ParameterDeclaration[], body: Block | undefined) => SetAccessorDeclaration; /** @deprecated Use `factory.updateSetAccessorDeclaration` or the factory supplied by your transformation context instead. */ const updateSetAccessor: (node: SetAccessorDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: PropertyName, parameters: readonly ParameterDeclaration[], body: Block | undefined) => SetAccessorDeclaration; /** @deprecated Use `factory.createCallSignature` or the factory supplied by your transformation context instead. */ const createCallSignature: (typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined) => CallSignatureDeclaration; /** @deprecated Use `factory.updateCallSignature` or the factory supplied by your transformation context instead. */ const updateCallSignature: (node: CallSignatureDeclaration, typeParameters: NodeArray<TypeParameterDeclaration> | undefined, parameters: NodeArray<ParameterDeclaration>, type: TypeNode | undefined) => CallSignatureDeclaration; /** @deprecated Use `factory.createConstructSignature` or the factory supplied by your transformation context instead. */ const createConstructSignature: (typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined) => ConstructSignatureDeclaration; /** @deprecated Use `factory.updateConstructSignature` or the factory supplied by your transformation context instead. */ const updateConstructSignature: (node: ConstructSignatureDeclaration, typeParameters: NodeArray<TypeParameterDeclaration> | undefined, parameters: NodeArray<ParameterDeclaration>, type: TypeNode | undefined) => ConstructSignatureDeclaration; /** @deprecated Use `factory.updateIndexSignature` or the factory supplied by your transformation context instead. */ const updateIndexSignature: (node: IndexSignatureDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode) => IndexSignatureDeclaration; /** @deprecated Use `factory.createKeywordTypeNode` or the factory supplied by your transformation context instead. */ const createKeywordTypeNode: <TKind extends KeywordTypeSyntaxKind>(kind: TKind) => KeywordTypeNode<TKind>; /** @deprecated Use `factory.createTypePredicateNode` or the factory supplied by your transformation context instead. */ const createTypePredicateNodeWithModifier: (assertsModifier: AssertsKeyword | undefined, parameterName: string | Identifier | ThisTypeNode, type: TypeNode | undefined) => TypePredicateNode; /** @deprecated Use `factory.updateTypePredicateNode` or the factory supplied by your transformation context instead. */ const updateTypePredicateNodeWithModifier: (node: TypePredicateNode, assertsModifier: AssertsKeyword | undefined, parameterName: Identifier | ThisTypeNode, type: TypeNode | undefined) => TypePredicateNode; /** @deprecated Use `factory.createTypeReferenceNode` or the factory supplied by your transformation context instead. */ const createTypeReferenceNode: (typeName: string | Identifier | QualifiedName, typeArguments?: readonly TypeNode[] | undefined) => TypeReferenceNode; /** @deprecated Use `factory.updateTypeReferenceNode` or the factory supplied by your transformation context instead. */ const updateTypeReferenceNode: (node: TypeReferenceNode, typeName: EntityName, typeArguments: NodeArray<TypeNode> | undefined) => TypeReferenceNode; /** @deprecated Use `factory.createFunctionTypeNode` or the factory supplied by your transformation context instead. */ const createFunctionTypeNode: (typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode) => FunctionTypeNode; /** @deprecated Use `factory.updateFunctionTypeNode` or the factory supplied by your transformation context instead. */ const updateFunctionTypeNode: (node: FunctionTypeNode, typeParameters: NodeArray<TypeParameterDeclaration> | undefined, parameters: NodeArray<ParameterDeclaration>, type: TypeNode) => FunctionTypeNode; /** @deprecated Use `factory.createConstructorTypeNode` or the factory supplied by your transformation context instead. */ const createConstructorTypeNode: (typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode) => ConstructorTypeNode; /** @deprecated Use `factory.updateConstructorTypeNode` or the factory supplied by your transformation context instead. */ const updateConstructorTypeNode: (node: ConstructorTypeNode, typeParameters: NodeArray<TypeParameterDeclaration> | undefined, parameters: NodeArray<ParameterDeclaration>, type: TypeNode) => ConstructorTypeNode; /** @deprecated Use `factory.createTypeQueryNode` or the factory supplied by your transformation context instead. */ const createTypeQueryNode: (exprName: EntityName) => TypeQueryNode; /** @deprecated Use `factory.updateTypeQueryNode` or the factory supplied by your transformation context instead. */ const updateTypeQueryNode: (node: TypeQueryNode, exprName: EntityName) => TypeQueryNode; /** @deprecated Use `factory.createTypeLiteralNode` or the factory supplied by your transformation context instead. */ const createTypeLiteralNode: (members: readonly TypeElement[] | undefined) => TypeLiteralNode; /** @deprecated Use `factory.updateTypeLiteralNode` or the factory supplied by your transformation context instead. */ const updateTypeLiteralNode: (node: TypeLiteralNode, members: NodeArray<TypeElement>) => TypeLiteralNode; /** @deprecated Use `factory.createArrayTypeNode` or the factory supplied by your transformation context instead. */ const createArrayTypeNode: (elementType: TypeNode) => ArrayTypeNode; /** @deprecated Use `factory.updateArrayTypeNode` or the factory supplied by your transformation context instead. */ const updateArrayTypeNode: (node: ArrayTypeNode, elementType: TypeNode) => ArrayTypeNode; /** @deprecated Use `factory.createTupleTypeNode` or the factory supplied by your transformation context instead. */ const createTupleTypeNode: (elements: readonly (TypeNode | NamedTupleMember)[]) => TupleTypeNode; /** @deprecated Use `factory.updateTupleTypeNode` or the factory supplied by your transformation context instead. */ const updateTupleTypeNode: (node: TupleTypeNode, elements: readonly (TypeNode | NamedTupleMember)[]) => TupleTypeNode; /** @deprecated Use `factory.createOptionalTypeNode` or the factory supplied by your transformation context instead. */ const createOptionalTypeNode: (type: TypeNode) => OptionalTypeNode; /** @deprecated Use `factory.updateOptionalTypeNode` or the factory supplied by your transformation context instead. */ const updateOptionalTypeNode: (node: OptionalTypeNode, type: TypeNode) => OptionalTypeNode; /** @deprecated Use `factory.createRestTypeNode` or the factory supplied by your transformation context instead. */ const createRestTypeNode: (type: TypeNode) => RestTypeNode; /** @deprecated Use `factory.updateRestTypeNode` or the factory supplied by your transformation context instead. */ const updateRestTypeNode: (node: RestTypeNode, type: TypeNode) => RestTypeNode; /** @deprecated Use `factory.createUnionTypeNode` or the factory supplied by your transformation context instead. */ const createUnionTypeNode: (types: readonly TypeNode[]) => UnionTypeNode; /** @deprecated Use `factory.updateUnionTypeNode` or the factory supplied by your transformation context instead. */ const updateUnionTypeNode: (node: UnionTypeNode, types: NodeArray<TypeNode>) => UnionTypeNode; /** @deprecated Use `factory.createIntersectionTypeNode` or the factory supplied by your transformation context instead. */ const createIntersectionTypeNode: (types: readonly TypeNode[]) => IntersectionTypeNode; /** @deprecated Use `factory.updateIntersectionTypeNode` or the factory supplied by your transformation context instead. */ const updateIntersectionTypeNode: (node: IntersectionTypeNode, types: NodeArray<TypeNode>) => IntersectionTypeNode; /** @deprecated Use `factory.createConditionalTypeNode` or the factory supplied by your transformation context instead. */ const createConditionalTypeNode: (checkType: TypeNode, extendsType: TypeNode, trueType: TypeNode, falseType: TypeNode) => ConditionalTypeNode; /** @deprecated Use `factory.updateConditionalTypeNode` or the factory supplied by your transformation context instead. */ const updateConditionalTypeNode: (node: ConditionalTypeNode, checkType: TypeNode, extendsType: TypeNode, trueType: TypeNode, falseType: TypeNode) => ConditionalTypeNode; /** @deprecated Use `factory.createInferTypeNode` or the factory supplied by your transformation context instead. */ const createInferTypeNode: (typeParameter: TypeParameterDeclaration) => InferTypeNode; /** @deprecated Use `factory.updateInferTypeNode` or the factory supplied by your transformation context instead. */ const updateInferTypeNode: (node: InferTypeNode, typeParameter: TypeParameterDeclaration) => InferTypeNode; /** @deprecated Use `factory.createImportTypeNode` or the factory supplied by your transformation context instead. */ const createImportTypeNode: (argument: TypeNode, qualifier?: Identifier | QualifiedName | undefined, typeArguments?: readonly TypeNode[] | undefined, isTypeOf?: boolean | undefined) => ImportTypeNode; /** @deprecated Use `factory.updateImportTypeNode` or the factory supplied by your transformation context instead. */ const updateImportTypeNode: (node: ImportTypeNode, argument: TypeNode, qualifier: Identifier | QualifiedName | undefined, typeArguments: readonly TypeNode[] | undefined, isTypeOf?: boolean | undefined) => ImportTypeNode; /** @deprecated Use `factory.createParenthesizedType` or the factory supplied by your transformation context instead. */ const createParenthesizedType: (type: TypeNode) => ParenthesizedTypeNode; /** @deprecated Use `factory.updateParenthesizedType` or the factory supplied by your transformation context instead. */ const updateParenthesizedType: (node: ParenthesizedTypeNode, type: TypeNode) => ParenthesizedTypeNode; /** @deprecated Use `factory.createThisTypeNode` or the factory supplied by your transformation context instead. */ const createThisTypeNode: () => ThisTypeNode; /** @deprecated Use `factory.updateTypeOperatorNode` or the factory supplied by your transformation context instead. */ const updateTypeOperatorNode: (node: TypeOperatorNode, type: TypeNode) => TypeOperatorNode; /** @deprecated Use `factory.createIndexedAccessTypeNode` or the factory supplied by your transformation context instead. */ const createIndexedAccessTypeNode: (objectType: TypeNode, indexType: TypeNode) => IndexedAccessTypeNode; /** @deprecated Use `factory.updateIndexedAccessTypeNode` or the factory supplied by your transformation context instead. */ const updateIndexedAccessTypeNode: (node: IndexedAccessTypeNode, objectType: TypeNode, indexType: TypeNode) => IndexedAccessTypeNode; /** @deprecated Use `factory.createMappedTypeNode` or the factory supplied by your transformation context instead. */ const createMappedTypeNode: (readonlyToken: ReadonlyKeyword | PlusToken | MinusToken | undefined, typeParameter: TypeParameterDeclaration, nameType: TypeNode | undefined, questionToken: QuestionToken | PlusToken | MinusToken | undefined, type: TypeNode | undefined) => MappedTypeNode; /** @deprecated Use `factory.updateMappedTypeNode` or the factory supplied by your transformation context instead. */ const updateMappedTypeNode: (node: MappedTypeNode, readonlyToken: ReadonlyKeyword | PlusToken | MinusToken | undefined, typeParameter: TypeParameterDeclaration, nameType: TypeNode | undefined, questionToken: QuestionToken | PlusToken | MinusToken | undefined, type: TypeNode | undefined) => MappedTypeNode; /** @deprecated Use `factory.createLiteralTypeNode` or the factory supplied by your transformation context instead. */ const createLiteralTypeNode: (literal: LiteralExpression | TrueLiteral | FalseLiteral | PrefixUnaryExpression | NullLiteral) => LiteralTypeNode; /** @deprecated Use `factory.updateLiteralTypeNode` or the factory supplied by your transformation context instead. */ const updateLiteralTypeNode: (node: LiteralTypeNode, literal: LiteralExpression | TrueLiteral | FalseLiteral | PrefixUnaryExpression | NullLiteral) => LiteralTypeNode; /** @deprecated Use `factory.createObjectBindingPattern` or the factory supplied by your transformation context instead. */ const createObjectBindingPattern: (elements: readonly BindingElement[]) => ObjectBindingPattern; /** @deprecated Use `factory.updateObjectBindingPattern` or the factory supplied by your transformation context instead. */ const updateObjectBindingPattern: (node: ObjectBindingPattern, elements: readonly BindingElement[]) => ObjectBindingPattern; /** @deprecated Use `factory.createArrayBindingPattern` or the factory supplied by your transformation context instead. */ const createArrayBindingPattern: (elements: readonly ArrayBindingElement[]) => ArrayBindingPattern; /** @deprecated Use `factory.updateArrayBindingPattern` or the factory supplied by your transformation context instead. */ const updateArrayBindingPattern: (node: ArrayBindingPattern, elements: readonly ArrayBindingElement[]) => ArrayBindingPattern; /** @deprecated Use `factory.createBindingElement` or the factory supplied by your transformation context instead. */ const createBindingElement: (dotDotDotToken: DotDotDotToken | undefined, propertyName: string | Identifier | StringLiteral | NumericLiteral | ComputedPropertyName | PrivateIdentifier | undefined, name: string | Identifier | ObjectBindingPattern | ArrayBindingPattern, initializer?: Expression | undefined) => BindingElement; /** @deprecated Use `factory.updateBindingElement` or the factory supplied by your transformation context instead. */ const updateBindingElement: (node: BindingElement, dotDotDotToken: DotDotDotToken | undefined, propertyName: Identifier | StringLiteral | NumericLiteral | ComputedPropertyName | PrivateIdentifier | undefined, name: BindingName, initializer: Expression | undefined) => BindingElement; /** @deprecated Use `factory.createArrayLiteral` or the factory supplied by your transformation context instead. */ const createArrayLiteral: (elements?: readonly Expression[] | undefined, multiLine?: boolean | undefined) => ArrayLiteralExpression; /** @deprecated Use `factory.updateArrayLiteral` or the factory supplied by your transformation context instead. */ const updateArrayLiteral: (node: ArrayLiteralExpression, elements: readonly Expression[]) => ArrayLiteralExpression; /** @deprecated Use `factory.createObjectLiteral` or the factory supplied by your transformation context instead. */ const createObjectLiteral: (properties?: readonly ObjectLiteralElementLike[] | undefined, multiLine?: boolean | undefined) => ObjectLiteralExpression; /** @deprecated Use `factory.updateObjectLiteral` or the factory supplied by your transformation context instead. */ const updateObjectLiteral: (node: ObjectLiteralExpression, properties: readonly ObjectLiteralElementLike[]) => ObjectLiteralExpression; /** @deprecated Use `factory.createPropertyAccess` or the factory supplied by your transformation context instead. */ const createPropertyAccess: (expression: Expression, name: string | Identifier | PrivateIdentifier) => PropertyAccessExpression; /** @deprecated Use `factory.updatePropertyAccess` or the factory supplied by your transformation context instead. */ const updatePropertyAccess: (node: PropertyAccessExpression, expression: Expression, name: Identifier | PrivateIdentifier) => PropertyAccessExpression; /** @deprecated Use `factory.createPropertyAccessChain` or the factory supplied by your transformation context instead. */ const createPropertyAccessChain: (expression: Expression, questionDotToken: QuestionDotToken | undefined, name: string | Identifier | PrivateIdentifier) => PropertyAccessChain; /** @deprecated Use `factory.updatePropertyAccessChain` or the factory supplied by your transformation context instead. */ const updatePropertyAccessChain: (node: PropertyAccessChain, expression: Expression, questionDotToken: QuestionDotToken | undefined, name: Identifier | PrivateIdentifier) => PropertyAccessChain; /** @deprecated Use `factory.createElementAccess` or the factory supplied by your transformation context instead. */ const createElementAccess: (expression: Expression, index: number | Expression) => ElementAccessExpression; /** @deprecated Use `factory.updateElementAccess` or the factory supplied by your transformation context instead. */ const updateElementAccess: (node: ElementAccessExpression, expression: Expression, argumentExpression: Expression) => ElementAccessExpression; /** @deprecated Use `factory.createElementAccessChain` or the factory supplied by your transformation context instead. */ const createElementAccessChain: (expression: Expression, questionDotToken: QuestionDotToken | undefined, index: number | Expression) => ElementAccessChain; /** @deprecated Use `factory.updateElementAccessChain` or the factory supplied by your transformation context instead. */ const updateElementAccessChain: (node: ElementAccessChain, expression: Expression, questionDotToken: QuestionDotToken | undefined, argumentExpression: Expression) => ElementAccessChain; /** @deprecated Use `factory.createCall` or the factory supplied by your transformation context instead. */ const createCall: (expression: Expression, typeArguments: readonly TypeNode[] | undefined, argumentsArray: readonly Expression[] | undefined) => CallExpression; /** @deprecated Use `factory.updateCall` or the factory supplied by your transformation context instead. */ const updateCall: (node: CallExpression, expression: Expression, typeArguments: readonly TypeNode[] | undefined, argumentsArray: readonly Expression[]) => CallExpression; /** @deprecated Use `factory.createCallChain` or the factory supplied by your transformation context instead. */ const createCallChain: (expression: Expression, questionDotToken: QuestionDotToken | undefined, typeArguments: readonly TypeNode[] | undefined, argumentsArray: readonly Expression[] | undefined) => CallChain; /** @deprecated Use `factory.updateCallChain` or the factory supplied by your transformation context instead. */ const updateCallChain: (node: CallChain, expression: Expression, questionDotToken: QuestionDotToken | undefined, typeArguments: readonly TypeNode[] | undefined, argumentsArray: readonly Expression[]) => CallChain; /** @deprecated Use `factory.createNew` or the factory supplied by your transformation context instead. */ const createNew: (expression: Expression, typeArguments: readonly TypeNode[] | undefined, argumentsArray: readonly Expression[] | undefined) => NewExpression; /** @deprecated Use `factory.updateNew` or the factory supplied by your transformation context instead. */ const updateNew: (node: NewExpression, expression: Expression, typeArguments: readonly TypeNode[] | undefined, argumentsArray: readonly Expression[] | undefined) => NewExpression; /** @deprecated Use `factory.createTypeAssertion` or the factory supplied by your transformation context instead. */ const createTypeAssertion: (type: TypeNode, expression: Expression) => TypeAssertion; /** @deprecated Use `factory.updateTypeAssertion` or the factory supplied by your transformation context instead. */ const updateTypeAssertion: (node: TypeAssertion, type: TypeNode, expression: Expression) => TypeAssertion; /** @deprecated Use `factory.createParen` or the factory supplied by your transformation context instead. */ const createParen: (expression: Expression) => ParenthesizedExpression; /** @deprecated Use `factory.updateParen` or the factory supplied by your transformation context instead. */ const updateParen: (node: ParenthesizedExpression, expression: Expression) => ParenthesizedExpression; /** @deprecated Use `factory.createFunctionExpression` or the factory supplied by your transformation context instead. */ const createFunctionExpression: (modifiers: readonly Modifier[] | undefined, asteriskToken: AsteriskToken | undefined, name: string | Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[] | undefined, type: TypeNode | undefined, body: Block) => FunctionExpression; /** @deprecated Use `factory.updateFunctionExpression` or the factory supplied by your transformation context instead. */ const updateFunctionExpression: (node: FunctionExpression, modifiers: readonly Modifier[] | undefined, asteriskToken: AsteriskToken | undefined, name: Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: Block) => FunctionExpression; /** @deprecated Use `factory.createDelete` or the factory supplied by your transformation context instead. */ const createDelete: (expression: Expression) => DeleteExpression; /** @deprecated Use `factory.updateDelete` or the factory supplied by your transformation context instead. */ const updateDelete: (node: DeleteExpression, expression: Expression) => DeleteExpression; /** @deprecated Use `factory.createTypeOf` or the factory supplied by your transformation context instead. */ const createTypeOf: (expression: Expression) => TypeOfExpression; /** @deprecated Use `factory.updateTypeOf` or the factory supplied by your transformation context instead. */ const updateTypeOf: (node: TypeOfExpression, expression: Expression) => TypeOfExpression; /** @deprecated Use `factory.createVoid` or the factory supplied by your transformation context instead. */ const createVoid: (expression: Expression) => VoidExpression; /** @deprecated Use `factory.updateVoid` or the factory supplied by your transformation context instead. */ const updateVoid: (node: VoidExpression, expression: Expression) => VoidExpression; /** @deprecated Use `factory.createAwait` or the factory supplied by your transformation context instead. */ const createAwait: (expression: Expression) => AwaitExpression; /** @deprecated Use `factory.updateAwait` or the factory supplied by your transformation context instead. */ const updateAwait: (node: AwaitExpression, expression: Expression) => AwaitExpression; /** @deprecated Use `factory.createPrefix` or the factory supplied by your transformation context instead. */ const createPrefix: (operator: PrefixUnaryOperator, operand: Expression) => PrefixUnaryExpression; /** @deprecated Use `factory.updatePrefix` or the factory supplied by your transformation context instead. */ const updatePrefix: (node: PrefixUnaryExpression, operand: Expression) => PrefixUnaryExpression; /** @deprecated Use `factory.createPostfix` or the factory supplied by your transformation context instead. */ const createPostfix: (operand: Expression, operator: PostfixUnaryOperator) => PostfixUnaryExpression; /** @deprecated Use `factory.updatePostfix` or the factory supplied by your transformation context instead. */ const updatePostfix: (node: PostfixUnaryExpression, operand: Expression) => PostfixUnaryExpression; /** @deprecated Use `factory.createBinary` or the factory supplied by your transformation context instead. */ const createBinary: (left: Expression, operator: SyntaxKind.CommaToken | SyntaxKind.LessThanToken | SyntaxKind.GreaterThanToken | SyntaxKind.LessThanEqualsToken | SyntaxKind.GreaterThanEqualsToken | SyntaxKind.EqualsEqualsToken | SyntaxKind.ExclamationEqualsToken | SyntaxKind.EqualsEqualsEqualsToken | SyntaxKind.ExclamationEqualsEqualsToken | SyntaxKind.PlusToken | SyntaxKind.MinusToken | SyntaxKind.AsteriskToken | SyntaxKind.AsteriskAsteriskToken | SyntaxKind.SlashToken | SyntaxKind.PercentToken | SyntaxKind.LessThanLessThanToken | SyntaxKind.GreaterThanGreaterThanToken | SyntaxKind.GreaterThanGreaterThanGreaterThanToken | SyntaxKind.AmpersandToken | SyntaxKind.BarToken | SyntaxKind.CaretToken | SyntaxKind.AmpersandAmpersandToken | SyntaxKind.BarBarToken | SyntaxKind.QuestionQuestionToken | SyntaxKind.EqualsToken | SyntaxKind.PlusEqualsToken | SyntaxKind.MinusEqualsToken | SyntaxKind.AsteriskEqualsToken | SyntaxKind.AsteriskAsteriskEqualsToken | SyntaxKind.SlashEqualsToken | SyntaxKind.PercentEqualsToken | SyntaxKind.LessThanLessThanEqualsToken | SyntaxKind.GreaterThanGreaterThanEqualsToken | SyntaxKind.GreaterThanGreaterThanGreaterThanEqualsToken | SyntaxKind.AmpersandEqualsToken | SyntaxKind.BarEqualsToken | SyntaxKind.BarBarEqualsToken | SyntaxKind.AmpersandAmpersandEqualsToken | SyntaxKind.QuestionQuestionEqualsToken | SyntaxKind.CaretEqualsToken | SyntaxKind.InKeyword | SyntaxKind.InstanceOfKeyword | BinaryOperatorToken, right: Expression) => BinaryExpression; /** @deprecated Use `factory.updateConditional` or the factory supplied by your transformation context instead. */ const updateConditional: (node: ConditionalExpression, condition: Expression, questionToken: QuestionToken, whenTrue: Expression, colonToken: ColonToken, whenFalse: Expression) => ConditionalExpression; /** @deprecated Use `factory.createTemplateExpression` or the factory supplied by your transformation context instead. */ const createTemplateExpression: (head: TemplateHead, templateSpans: readonly TemplateSpan[]) => TemplateExpression; /** @deprecated Use `factory.updateTemplateExpression` or the factory supplied by your transformation context instead. */ const updateTemplateExpression: (node: TemplateExpression, head: TemplateHead, templateSpans: readonly TemplateSpan[]) => TemplateExpression; /** @deprecated Use `factory.createTemplateHead` or the factory supplied by your transformation context instead. */ const createTemplateHead: { (text: string, rawText?: string | undefined, templateFlags?: TokenFlags | undefined): TemplateHead; (text: string | undefined, rawText: string, templateFlags?: TokenFlags | undefined): TemplateHead; }; /** @deprecated Use `factory.createTemplateMiddle` or the factory supplied by your transformation context instead. */ const createTemplateMiddle: { (text: string, rawText?: string | undefined, templateFlags?: TokenFlags | undefined): TemplateMiddle; (text: string | undefined, rawText: string, templateFlags?: TokenFlags | undefined): TemplateMiddle; }; /** @deprecated Use `factory.createTemplateTail` or the factory supplied by your transformation context instead. */ const createTemplateTail: { (text: string, rawText?: string | undefined, templateFlags?: TokenFlags | undefined): TemplateTail; (text: string | undefined, rawText: string, templateFlags?: TokenFlags | undefined): TemplateTail; }; /** @deprecated Use `factory.createNoSubstitutionTemplateLiteral` or the factory supplied by your transformation context instead. */ const createNoSubstitutionTemplateLiteral: { (text: string, rawText?: string | undefined): NoSubstitutionTemplateLiteral; (text: string | undefined, rawText: string): NoSubstitutionTemplateLiteral; }; /** @deprecated Use `factory.updateYield` or the factory supplied by your transformation context instead. */ const updateYield: (node: YieldExpression, asteriskToken: AsteriskToken | undefined, expression: Expression | undefined) => YieldExpression; /** @deprecated Use `factory.createSpread` or the factory supplied by your transformation context instead. */ const createSpread: (expression: Expression) => SpreadElement; /** @deprecated Use `factory.updateSpread` or the factory supplied by your transformation context instead. */ const updateSpread: (node: SpreadElement, expression: Expression) => SpreadElement; /** @deprecated Use `factory.createOmittedExpression` or the factory supplied by your transformation context instead. */ const createOmittedExpression: () => OmittedExpression; /** @deprecated Use `factory.createAsExpression` or the factory supplied by your transformation context instead. */ const createAsExpression: (expression: Expression, type: TypeNode) => AsExpression; /** @deprecated Use `factory.updateAsExpression` or the factory supplied by your transformation context instead. */ const updateAsExpression: (node: AsExpression, expression: Expression, type: TypeNode) => AsExpression; /** @deprecated Use `factory.createNonNullExpression` or the factory supplied by your transformation context instead. */ const createNonNullExpression: (expression: Expression) => NonNullExpression; /** @deprecated Use `factory.updateNonNullExpression` or the factory supplied by your transformation context instead. */ const updateNonNullExpression: (node: NonNullExpression, expression: Expression) => NonNullExpression; /** @deprecated Use `factory.createNonNullChain` or the factory supplied by your transformation context instead. */ const createNonNullChain: (expression: Expression) => NonNullChain; /** @deprecated Use `factory.updateNonNullChain` or the factory supplied by your transformation context instead. */ const updateNonNullChain: (node: NonNullChain, expression: Expression) => NonNullChain; /** @deprecated Use `factory.createMetaProperty` or the factory supplied by your transformation context instead. */ const createMetaProperty: (keywordToken: SyntaxKind.ImportKeyword | SyntaxKind.NewKeyword, name: Identifier) => MetaProperty; /** @deprecated Use `factory.updateMetaProperty` or the factory supplied by your transformation context instead. */ const updateMetaProperty: (node: MetaProperty, name: Identifier) => MetaProperty; /** @deprecated Use `factory.createTemplateSpan` or the factory supplied by your transformation context instead. */ const createTemplateSpan: (expression: Expression, literal: TemplateMiddle | TemplateTail) => TemplateSpan; /** @deprecated Use `factory.updateTemplateSpan` or the factory supplied by your transformation context instead. */ const updateTemplateSpan: (node: TemplateSpan, expression: Expression, literal: TemplateMiddle | TemplateTail) => TemplateSpan; /** @deprecated Use `factory.createSemicolonClassElement` or the factory supplied by your transformation context instead. */ const createSemicolonClassElement: () => SemicolonClassElement; /** @deprecated Use `factory.createBlock` or the factory supplied by your transformation context instead. */ const createBlock: (statements: readonly Statement[], multiLine?: boolean | undefined) => Block; /** @deprecated Use `factory.updateBlock` or the factory supplied by your transformation context instead. */ const updateBlock: (node: Block, statements: readonly Statement[]) => Block; /** @deprecated Use `factory.createVariableStatement` or the factory supplied by your transformation context instead. */ const createVariableStatement: (modifiers: readonly Modifier[] | undefined, declarationList: VariableDeclarationList | readonly VariableDeclaration[]) => VariableStatement; /** @deprecated Use `factory.updateVariableStatement` or the factory supplied by your transformation context instead. */ const updateVariableStatement: (node: VariableStatement, modifiers: readonly Modifier[] | undefined, declarationList: VariableDeclarationList) => VariableStatement; /** @deprecated Use `factory.createEmptyStatement` or the factory supplied by your transformation context instead. */ const createEmptyStatement: () => EmptyStatement; /** @deprecated Use `factory.createExpressionStatement` or the factory supplied by your transformation context instead. */ const createExpressionStatement: (expression: Expression) => ExpressionStatement; /** @deprecated Use `factory.updateExpressionStatement` or the factory supplied by your transformation context instead. */ const updateExpressionStatement: (node: ExpressionStatement, expression: Expression) => ExpressionStatement; /** @deprecated Use `factory.createExpressionStatement` or the factory supplied by your transformation context instead. */ const createStatement: (expression: Expression) => ExpressionStatement; /** @deprecated Use `factory.updateExpressionStatement` or the factory supplied by your transformation context instead. */ const updateStatement: (node: ExpressionStatement, expression: Expression) => ExpressionStatement; /** @deprecated Use `factory.createIf` or the factory supplied by your transformation context instead. */ const createIf: (expression: Expression, thenStatement: Statement, elseStatement?: Statement | undefined) => IfStatement; /** @deprecated Use `factory.updateIf` or the factory supplied by your transformation context instead. */ const updateIf: (node: IfStatement, expression: Expression, thenStatement: Statement, elseStatement: Statement | undefined) => IfStatement; /** @deprecated Use `factory.createDo` or the factory supplied by your transformation context instead. */ const createDo: (statement: Statement, expression: Expression) => DoStatement; /** @deprecated Use `factory.updateDo` or the factory supplied by your transformation context instead. */ const updateDo: (node: DoStatement, statement: Statement, expression: Expression) => DoStatement; /** @deprecated Use `factory.createWhile` or the factory supplied by your transformation context instead. */ const createWhile: (expression: Expression, statement: Statement) => WhileStatement; /** @deprecated Use `factory.updateWhile` or the factory supplied by your transformation context instead. */ const updateWhile: (node: WhileStatement, expression: Expression, statement: Statement) => WhileStatement; /** @deprecated Use `factory.createFor` or the factory supplied by your transformation context instead. */ const createFor: (initializer: Expression | VariableDeclarationList | undefined, condition: Expression | undefined, incrementor: Expression | undefined, statement: Statement) => ForStatement; /** @deprecated Use `factory.updateFor` or the factory supplied by your transformation context instead. */ const updateFor: (node: ForStatement, initializer: Expression | VariableDeclarationList | undefined, condition: Expression | undefined, incrementor: Expression | undefined, statement: Statement) => ForStatement; /** @deprecated Use `factory.createForIn` or the factory supplied by your transformation context instead. */ const createForIn: (initializer: ForInitializer, expression: Expression, statement: Statement) => ForInStatement; /** @deprecated Use `factory.updateForIn` or the factory supplied by your transformation context instead. */ const updateForIn: (node: ForInStatement, initializer: ForInitializer, expression: Expression, statement: Statement) => ForInStatement; /** @deprecated Use `factory.createForOf` or the factory supplied by your transformation context instead. */ const createForOf: (awaitModifier: AwaitKeyword | undefined, initializer: ForInitializer, expression: Expression, statement: Statement) => ForOfStatement; /** @deprecated Use `factory.updateForOf` or the factory supplied by your transformation context instead. */ const updateForOf: (node: ForOfStatement, awaitModifier: AwaitKeyword | undefined, initializer: ForInitializer, expression: Expression, statement: Statement) => ForOfStatement; /** @deprecated Use `factory.createContinue` or the factory supplied by your transformation context instead. */ const createContinue: (label?: string | Identifier | undefined) => ContinueStatement; /** @deprecated Use `factory.updateContinue` or the factory supplied by your transformation context instead. */ const updateContinue: (node: ContinueStatement, label: Identifier | undefined) => ContinueStatement; /** @deprecated Use `factory.createBreak` or the factory supplied by your transformation context instead. */ const createBreak: (label?: string | Identifier | undefined) => BreakStatement; /** @deprecated Use `factory.updateBreak` or the factory supplied by your transformation context instead. */ const updateBreak: (node: BreakStatement, label: Identifier | undefined) => BreakStatement; /** @deprecated Use `factory.createReturn` or the factory supplied by your transformation context instead. */ const createReturn: (expression?: Expression | undefined) => ReturnStatement; /** @deprecated Use `factory.updateReturn` or the factory supplied by your transformation context instead. */ const updateReturn: (node: ReturnStatement, expression: Expression | undefined) => ReturnStatement; /** @deprecated Use `factory.createWith` or the factory supplied by your transformation context instead. */ const createWith: (expression: Expression, statement: Statement) => WithStatement; /** @deprecated Use `factory.updateWith` or the factory supplied by your transformation context instead. */ const updateWith: (node: WithStatement, expression: Expression, statement: Statement) => WithStatement; /** @deprecated Use `factory.createSwitch` or the factory supplied by your transformation context instead. */ const createSwitch: (expression: Expression, caseBlock: CaseBlock) => SwitchStatement; /** @deprecated Use `factory.updateSwitch` or the factory supplied by your transformation context instead. */ const updateSwitch: (node: SwitchStatement, expression: Expression, caseBlock: CaseBlock) => SwitchStatement; /** @deprecated Use `factory.createLabel` or the factory supplied by your transformation context instead. */ const createLabel: (label: string | Identifier, statement: Statement) => LabeledStatement; /** @deprecated Use `factory.updateLabel` or the factory supplied by your transformation context instead. */ const updateLabel: (node: LabeledStatement, label: Identifier, statement: Statement) => LabeledStatement; /** @deprecated Use `factory.createThrow` or the factory supplied by your transformation context instead. */ const createThrow: (expression: Expression) => ThrowStatement; /** @deprecated Use `factory.updateThrow` or the factory supplied by your transformation context instead. */ const updateThrow: (node: ThrowStatement, expression: Expression) => ThrowStatement; /** @deprecated Use `factory.createTry` or the factory supplied by your transformation context instead. */ const createTry: (tryBlock: Block, catchClause: CatchClause | undefined, finallyBlock: Block | undefined) => TryStatement; /** @deprecated Use `factory.updateTry` or the factory supplied by your transformation context instead. */ const updateTry: (node: TryStatement, tryBlock: Block, catchClause: CatchClause | undefined, finallyBlock: Block | undefined) => TryStatement; /** @deprecated Use `factory.createDebuggerStatement` or the factory supplied by your transformation context instead. */ const createDebuggerStatement: () => DebuggerStatement; /** @deprecated Use `factory.createVariableDeclarationList` or the factory supplied by your transformation context instead. */ const createVariableDeclarationList: (declarations: readonly VariableDeclaration[], flags?: NodeFlags | undefined) => VariableDeclarationList; /** @deprecated Use `factory.updateVariableDeclarationList` or the factory supplied by your transformation context instead. */ const updateVariableDeclarationList: (node: VariableDeclarationList, declarations: readonly VariableDeclaration[]) => VariableDeclarationList; /** @deprecated Use `factory.createFunctionDeclaration` or the factory supplied by your transformation context instead. */ const createFunctionDeclaration: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, asteriskToken: AsteriskToken | undefined, name: string | Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: Block | undefined) => FunctionDeclaration; /** @deprecated Use `factory.updateFunctionDeclaration` or the factory supplied by your transformation context instead. */ const updateFunctionDeclaration: (node: FunctionDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, asteriskToken: AsteriskToken | undefined, name: Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: Block | undefined) => FunctionDeclaration; /** @deprecated Use `factory.createClassDeclaration` or the factory supplied by your transformation context instead. */ const createClassDeclaration: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, heritageClauses: readonly HeritageClause[] | undefined, members: readonly ClassElement[]) => ClassDeclaration; /** @deprecated Use `factory.updateClassDeclaration` or the factory supplied by your transformation context instead. */ const updateClassDeclaration: (node: ClassDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, heritageClauses: readonly HeritageClause[] | undefined, members: readonly ClassElement[]) => ClassDeclaration; /** @deprecated Use `factory.createInterfaceDeclaration` or the factory supplied by your transformation context instead. */ const createInterfaceDeclaration: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | Identifier, typeParameters: readonly TypeParameterDeclaration[] | undefined, heritageClauses: readonly HeritageClause[] | undefined, members: readonly TypeElement[]) => InterfaceDeclaration; /** @deprecated Use `factory.updateInterfaceDeclaration` or the factory supplied by your transformation context instead. */ const updateInterfaceDeclaration: (node: InterfaceDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: Identifier, typeParameters: readonly TypeParameterDeclaration[] | undefined, heritageClauses: readonly HeritageClause[] | undefined, members: readonly TypeElement[]) => InterfaceDeclaration; /** @deprecated Use `factory.createTypeAliasDeclaration` or the factory supplied by your transformation context instead. */ const createTypeAliasDeclaration: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | Identifier, typeParameters: readonly TypeParameterDeclaration[] | undefined, type: TypeNode) => TypeAliasDeclaration; /** @deprecated Use `factory.updateTypeAliasDeclaration` or the factory supplied by your transformation context instead. */ const updateTypeAliasDeclaration: (node: TypeAliasDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: Identifier, typeParameters: readonly TypeParameterDeclaration[] | undefined, type: TypeNode) => TypeAliasDeclaration; /** @deprecated Use `factory.createEnumDeclaration` or the factory supplied by your transformation context instead. */ const createEnumDeclaration: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | Identifier, members: readonly EnumMember[]) => EnumDeclaration; /** @deprecated Use `factory.updateEnumDeclaration` or the factory supplied by your transformation context instead. */ const updateEnumDeclaration: (node: EnumDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: Identifier, members: readonly EnumMember[]) => EnumDeclaration; /** @deprecated Use `factory.createModuleDeclaration` or the factory supplied by your transformation context instead. */ const createModuleDeclaration: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: ModuleName, body: Identifier | ModuleBlock | NamespaceDeclaration | JSDocNamespaceDeclaration | undefined, flags?: NodeFlags | undefined) => ModuleDeclaration; /** @deprecated Use `factory.updateModuleDeclaration` or the factory supplied by your transformation context instead. */ const updateModuleDeclaration: (node: ModuleDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: ModuleName, body: Identifier | ModuleBlock | NamespaceDeclaration | JSDocNamespaceDeclaration | undefined) => ModuleDeclaration; /** @deprecated Use `factory.createModuleBlock` or the factory supplied by your transformation context instead. */ const createModuleBlock: (statements: readonly Statement[]) => ModuleBlock; /** @deprecated Use `factory.updateModuleBlock` or the factory supplied by your transformation context instead. */ const updateModuleBlock: (node: ModuleBlock, statements: readonly Statement[]) => ModuleBlock; /** @deprecated Use `factory.createCaseBlock` or the factory supplied by your transformation context instead. */ const createCaseBlock: (clauses: readonly CaseOrDefaultClause[]) => CaseBlock; /** @deprecated Use `factory.updateCaseBlock` or the factory supplied by your transformation context instead. */ const updateCaseBlock: (node: CaseBlock, clauses: readonly CaseOrDefaultClause[]) => CaseBlock; /** @deprecated Use `factory.createNamespaceExportDeclaration` or the factory supplied by your transformation context instead. */ const createNamespaceExportDeclaration: (name: string | Identifier) => NamespaceExportDeclaration; /** @deprecated Use `factory.updateNamespaceExportDeclaration` or the factory supplied by your transformation context instead. */ const updateNamespaceExportDeclaration: (node: NamespaceExportDeclaration, name: Identifier) => NamespaceExportDeclaration; /** @deprecated Use `factory.createImportEqualsDeclaration` or the factory supplied by your transformation context instead. */ const createImportEqualsDeclaration: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: string | Identifier, moduleReference: ModuleReference) => ImportEqualsDeclaration; /** @deprecated Use `factory.updateImportEqualsDeclaration` or the factory supplied by your transformation context instead. */ const updateImportEqualsDeclaration: (node: ImportEqualsDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, name: Identifier, moduleReference: ModuleReference) => ImportEqualsDeclaration; /** @deprecated Use `factory.createImportDeclaration` or the factory supplied by your transformation context instead. */ const createImportDeclaration: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, importClause: ImportClause | undefined, moduleSpecifier: Expression) => ImportDeclaration; /** @deprecated Use `factory.updateImportDeclaration` or the factory supplied by your transformation context instead. */ const updateImportDeclaration: (node: ImportDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, importClause: ImportClause | undefined, moduleSpecifier: Expression) => ImportDeclaration; /** @deprecated Use `factory.createNamespaceImport` or the factory supplied by your transformation context instead. */ const createNamespaceImport: (name: Identifier) => NamespaceImport; /** @deprecated Use `factory.updateNamespaceImport` or the factory supplied by your transformation context instead. */ const updateNamespaceImport: (node: NamespaceImport, name: Identifier) => NamespaceImport; /** @deprecated Use `factory.createNamedImports` or the factory supplied by your transformation context instead. */ const createNamedImports: (elements: readonly ImportSpecifier[]) => NamedImports; /** @deprecated Use `factory.updateNamedImports` or the factory supplied by your transformation context instead. */ const updateNamedImports: (node: NamedImports, elements: readonly ImportSpecifier[]) => NamedImports; /** @deprecated Use `factory.createImportSpecifier` or the factory supplied by your transformation context instead. */ const createImportSpecifier: (propertyName: Identifier | undefined, name: Identifier) => ImportSpecifier; /** @deprecated Use `factory.updateImportSpecifier` or the factory supplied by your transformation context instead. */ const updateImportSpecifier: (node: ImportSpecifier, propertyName: Identifier | undefined, name: Identifier) => ImportSpecifier; /** @deprecated Use `factory.createExportAssignment` or the factory supplied by your transformation context instead. */ const createExportAssignment: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, isExportEquals: boolean | undefined, expression: Expression) => ExportAssignment; /** @deprecated Use `factory.updateExportAssignment` or the factory supplied by your transformation context instead. */ const updateExportAssignment: (node: ExportAssignment, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, expression: Expression) => ExportAssignment; /** @deprecated Use `factory.createNamedExports` or the factory supplied by your transformation context instead. */ const createNamedExports: (elements: readonly ExportSpecifier[]) => NamedExports; /** @deprecated Use `factory.updateNamedExports` or the factory supplied by your transformation context instead. */ const updateNamedExports: (node: NamedExports, elements: readonly ExportSpecifier[]) => NamedExports; /** @deprecated Use `factory.createExportSpecifier` or the factory supplied by your transformation context instead. */ const createExportSpecifier: (propertyName: string | Identifier | undefined, name: string | Identifier) => ExportSpecifier; /** @deprecated Use `factory.updateExportSpecifier` or the factory supplied by your transformation context instead. */ const updateExportSpecifier: (node: ExportSpecifier, propertyName: Identifier | undefined, name: Identifier) => ExportSpecifier; /** @deprecated Use `factory.createExternalModuleReference` or the factory supplied by your transformation context instead. */ const createExternalModuleReference: (expression: Expression) => ExternalModuleReference; /** @deprecated Use `factory.updateExternalModuleReference` or the factory supplied by your transformation context instead. */ const updateExternalModuleReference: (node: ExternalModuleReference, expression: Expression) => ExternalModuleReference; /** @deprecated Use `factory.createJSDocTypeExpression` or the factory supplied by your transformation context instead. */ const createJSDocTypeExpression: (type: TypeNode) => JSDocTypeExpression; /** @deprecated Use `factory.createJSDocTypeTag` or the factory supplied by your transformation context instead. */ const createJSDocTypeTag: (tagName: Identifier | undefined, typeExpression: JSDocTypeExpression, comment?: string | undefined) => JSDocTypeTag; /** @deprecated Use `factory.createJSDocReturnTag` or the factory supplied by your transformation context instead. */ const createJSDocReturnTag: (tagName: Identifier | undefined, typeExpression?: JSDocTypeExpression | undefined, comment?: string | undefined) => JSDocReturnTag; /** @deprecated Use `factory.createJSDocThisTag` or the factory supplied by your transformation context instead. */ const createJSDocThisTag: (tagName: Identifier | undefined, typeExpression: JSDocTypeExpression, comment?: string | undefined) => JSDocThisTag; /** @deprecated Use `factory.createJSDocComment` or the factory supplied by your transformation context instead. */ const createJSDocComment: (comment?: string | undefined, tags?: readonly JSDocTag[] | undefined) => JSDoc; /** @deprecated Use `factory.createJSDocParameterTag` or the factory supplied by your transformation context instead. */ const createJSDocParameterTag: (tagName: Identifier | undefined, name: EntityName, isBracketed: boolean, typeExpression?: JSDocTypeExpression | undefined, isNameFirst?: boolean | undefined, comment?: string | undefined) => JSDocParameterTag; /** @deprecated Use `factory.createJSDocClassTag` or the factory supplied by your transformation context instead. */ const createJSDocClassTag: (tagName: Identifier | undefined, comment?: string | undefined) => JSDocClassTag; /** @deprecated Use `factory.createJSDocAugmentsTag` or the factory supplied by your transformation context instead. */ const createJSDocAugmentsTag: (tagName: Identifier | undefined, className: ExpressionWithTypeArguments & { readonly expression: Identifier | PropertyAccessEntityNameExpression; }, comment?: string | undefined) => JSDocAugmentsTag; /** @deprecated Use `factory.createJSDocEnumTag` or the factory supplied by your transformation context instead. */ const createJSDocEnumTag: (tagName: Identifier | undefined, typeExpression: JSDocTypeExpression, comment?: string | undefined) => JSDocEnumTag; /** @deprecated Use `factory.createJSDocTemplateTag` or the factory supplied by your transformation context instead. */ const createJSDocTemplateTag: (tagName: Identifier | undefined, constraint: JSDocTypeExpression | undefined, typeParameters: readonly TypeParameterDeclaration[], comment?: string | undefined) => JSDocTemplateTag; /** @deprecated Use `factory.createJSDocTypedefTag` or the factory supplied by your transformation context instead. */ const createJSDocTypedefTag: (tagName: Identifier | undefined, typeExpression?: JSDocTypeLiteral | JSDocTypeExpression | undefined, fullName?: Identifier | JSDocNamespaceDeclaration | undefined, comment?: string | undefined) => JSDocTypedefTag; /** @deprecated Use `factory.createJSDocCallbackTag` or the factory supplied by your transformation context instead. */ const createJSDocCallbackTag: (tagName: Identifier | undefined, typeExpression: JSDocSignature, fullName?: Identifier | JSDocNamespaceDeclaration | undefined, comment?: string | undefined) => JSDocCallbackTag; /** @deprecated Use `factory.createJSDocSignature` or the factory supplied by your transformation context instead. */ const createJSDocSignature: (typeParameters: readonly JSDocTemplateTag[] | undefined, parameters: readonly JSDocParameterTag[], type?: JSDocReturnTag | undefined) => JSDocSignature; /** @deprecated Use `factory.createJSDocPropertyTag` or the factory supplied by your transformation context instead. */ const createJSDocPropertyTag: (tagName: Identifier | undefined, name: EntityName, isBracketed: boolean, typeExpression?: JSDocTypeExpression | undefined, isNameFirst?: boolean | undefined, comment?: string | undefined) => JSDocPropertyTag; /** @deprecated Use `factory.createJSDocTypeLiteral` or the factory supplied by your transformation context instead. */ const createJSDocTypeLiteral: (jsDocPropertyTags?: readonly JSDocPropertyLikeTag[] | undefined, isArrayType?: boolean | undefined) => JSDocTypeLiteral; /** @deprecated Use `factory.createJSDocImplementsTag` or the factory supplied by your transformation context instead. */ const createJSDocImplementsTag: (tagName: Identifier | undefined, className: ExpressionWithTypeArguments & { readonly expression: Identifier | PropertyAccessEntityNameExpression; }, comment?: string | undefined) => JSDocImplementsTag; /** @deprecated Use `factory.createJSDocAuthorTag` or the factory supplied by your transformation context instead. */ const createJSDocAuthorTag: (tagName: Identifier | undefined, comment?: string | undefined) => JSDocAuthorTag; /** @deprecated Use `factory.createJSDocPublicTag` or the factory supplied by your transformation context instead. */ const createJSDocPublicTag: (tagName: Identifier | undefined, comment?: string | undefined) => JSDocPublicTag; /** @deprecated Use `factory.createJSDocPrivateTag` or the factory supplied by your transformation context instead. */ const createJSDocPrivateTag: (tagName: Identifier | undefined, comment?: string | undefined) => JSDocPrivateTag; /** @deprecated Use `factory.createJSDocProtectedTag` or the factory supplied by your transformation context instead. */ const createJSDocProtectedTag: (tagName: Identifier | undefined, comment?: string | undefined) => JSDocProtectedTag; /** @deprecated Use `factory.createJSDocReadonlyTag` or the factory supplied by your transformation context instead. */ const createJSDocReadonlyTag: (tagName: Identifier | undefined, comment?: string | undefined) => JSDocReadonlyTag; /** @deprecated Use `factory.createJSDocUnknownTag` or the factory supplied by your transformation context instead. */ const createJSDocTag: (tagName: Identifier, comment?: string | undefined) => JSDocUnknownTag; /** @deprecated Use `factory.createJsxElement` or the factory supplied by your transformation context instead. */ const createJsxElement: (openingElement: JsxOpeningElement, children: readonly JsxChild[], closingElement: JsxClosingElement) => JsxElement; /** @deprecated Use `factory.updateJsxElement` or the factory supplied by your transformation context instead. */ const updateJsxElement: (node: JsxElement, openingElement: JsxOpeningElement, children: readonly JsxChild[], closingElement: JsxClosingElement) => JsxElement; /** @deprecated Use `factory.createJsxSelfClosingElement` or the factory supplied by your transformation context instead. */ const createJsxSelfClosingElement: (tagName: JsxTagNameExpression, typeArguments: readonly TypeNode[] | undefined, attributes: JsxAttributes) => JsxSelfClosingElement; /** @deprecated Use `factory.updateJsxSelfClosingElement` or the factory supplied by your transformation context instead. */ const updateJsxSelfClosingElement: (node: JsxSelfClosingElement, tagName: JsxTagNameExpression, typeArguments: readonly TypeNode[] | undefined, attributes: JsxAttributes) => JsxSelfClosingElement; /** @deprecated Use `factory.createJsxOpeningElement` or the factory supplied by your transformation context instead. */ const createJsxOpeningElement: (tagName: JsxTagNameExpression, typeArguments: readonly TypeNode[] | undefined, attributes: JsxAttributes) => JsxOpeningElement; /** @deprecated Use `factory.updateJsxOpeningElement` or the factory supplied by your transformation context instead. */ const updateJsxOpeningElement: (node: JsxOpeningElement, tagName: JsxTagNameExpression, typeArguments: readonly TypeNode[] | undefined, attributes: JsxAttributes) => JsxOpeningElement; /** @deprecated Use `factory.createJsxClosingElement` or the factory supplied by your transformation context instead. */ const createJsxClosingElement: (tagName: JsxTagNameExpression) => JsxClosingElement; /** @deprecated Use `factory.updateJsxClosingElement` or the factory supplied by your transformation context instead. */ const updateJsxClosingElement: (node: JsxClosingElement, tagName: JsxTagNameExpression) => JsxClosingElement; /** @deprecated Use `factory.createJsxFragment` or the factory supplied by your transformation context instead. */ const createJsxFragment: (openingFragment: JsxOpeningFragment, children: readonly JsxChild[], closingFragment: JsxClosingFragment) => JsxFragment; /** @deprecated Use `factory.createJsxText` or the factory supplied by your transformation context instead. */ const createJsxText: (text: string, containsOnlyTriviaWhiteSpaces?: boolean | undefined) => JsxText; /** @deprecated Use `factory.updateJsxText` or the factory supplied by your transformation context instead. */ const updateJsxText: (node: JsxText, text: string, containsOnlyTriviaWhiteSpaces?: boolean | undefined) => JsxText; /** @deprecated Use `factory.createJsxOpeningFragment` or the factory supplied by your transformation context instead. */ const createJsxOpeningFragment: () => JsxOpeningFragment; /** @deprecated Use `factory.createJsxJsxClosingFragment` or the factory supplied by your transformation context instead. */ const createJsxJsxClosingFragment: () => JsxClosingFragment; /** @deprecated Use `factory.updateJsxFragment` or the factory supplied by your transformation context instead. */ const updateJsxFragment: (node: JsxFragment, openingFragment: JsxOpeningFragment, children: readonly JsxChild[], closingFragment: JsxClosingFragment) => JsxFragment; /** @deprecated Use `factory.createJsxAttribute` or the factory supplied by your transformation context instead. */ const createJsxAttribute: (name: Identifier, initializer: StringLiteral | JsxExpression | undefined) => JsxAttribute; /** @deprecated Use `factory.updateJsxAttribute` or the factory supplied by your transformation context instead. */ const updateJsxAttribute: (node: JsxAttribute, name: Identifier, initializer: StringLiteral | JsxExpression | undefined) => JsxAttribute; /** @deprecated Use `factory.createJsxAttributes` or the factory supplied by your transformation context instead. */ const createJsxAttributes: (properties: readonly JsxAttributeLike[]) => JsxAttributes; /** @deprecated Use `factory.updateJsxAttributes` or the factory supplied by your transformation context instead. */ const updateJsxAttributes: (node: JsxAttributes, properties: readonly JsxAttributeLike[]) => JsxAttributes; /** @deprecated Use `factory.createJsxSpreadAttribute` or the factory supplied by your transformation context instead. */ const createJsxSpreadAttribute: (expression: Expression) => JsxSpreadAttribute; /** @deprecated Use `factory.updateJsxSpreadAttribute` or the factory supplied by your transformation context instead. */ const updateJsxSpreadAttribute: (node: JsxSpreadAttribute, expression: Expression) => JsxSpreadAttribute; /** @deprecated Use `factory.createJsxExpression` or the factory supplied by your transformation context instead. */ const createJsxExpression: (dotDotDotToken: DotDotDotToken | undefined, expression: Expression | undefined) => JsxExpression; /** @deprecated Use `factory.updateJsxExpression` or the factory supplied by your transformation context instead. */ const updateJsxExpression: (node: JsxExpression, expression: Expression | undefined) => JsxExpression; /** @deprecated Use `factory.createCaseClause` or the factory supplied by your transformation context instead. */ const createCaseClause: (expression: Expression, statements: readonly Statement[]) => CaseClause; /** @deprecated Use `factory.updateCaseClause` or the factory supplied by your transformation context instead. */ const updateCaseClause: (node: CaseClause, expression: Expression, statements: readonly Statement[]) => CaseClause; /** @deprecated Use `factory.createDefaultClause` or the factory supplied by your transformation context instead. */ const createDefaultClause: (statements: readonly Statement[]) => DefaultClause; /** @deprecated Use `factory.updateDefaultClause` or the factory supplied by your transformation context instead. */ const updateDefaultClause: (node: DefaultClause, statements: readonly Statement[]) => DefaultClause; /** @deprecated Use `factory.createHeritageClause` or the factory supplied by your transformation context instead. */ const createHeritageClause: (token: SyntaxKind.ExtendsKeyword | SyntaxKind.ImplementsKeyword, types: readonly ExpressionWithTypeArguments[]) => HeritageClause; /** @deprecated Use `factory.updateHeritageClause` or the factory supplied by your transformation context instead. */ const updateHeritageClause: (node: HeritageClause, types: readonly ExpressionWithTypeArguments[]) => HeritageClause; /** @deprecated Use `factory.createCatchClause` or the factory supplied by your transformation context instead. */ const createCatchClause: (variableDeclaration: string | VariableDeclaration | undefined, block: Block) => CatchClause; /** @deprecated Use `factory.updateCatchClause` or the factory supplied by your transformation context instead. */ const updateCatchClause: (node: CatchClause, variableDeclaration: VariableDeclaration | undefined, block: Block) => CatchClause; /** @deprecated Use `factory.createPropertyAssignment` or the factory supplied by your transformation context instead. */ const createPropertyAssignment: (name: string | Identifier | StringLiteral | NumericLiteral | ComputedPropertyName | PrivateIdentifier, initializer: Expression) => PropertyAssignment; /** @deprecated Use `factory.updatePropertyAssignment` or the factory supplied by your transformation context instead. */ const updatePropertyAssignment: (node: PropertyAssignment, name: PropertyName, initializer: Expression) => PropertyAssignment; /** @deprecated Use `factory.createShorthandPropertyAssignment` or the factory supplied by your transformation context instead. */ const createShorthandPropertyAssignment: (name: string | Identifier, objectAssignmentInitializer?: Expression | undefined) => ShorthandPropertyAssignment; /** @deprecated Use `factory.updateShorthandPropertyAssignment` or the factory supplied by your transformation context instead. */ const updateShorthandPropertyAssignment: (node: ShorthandPropertyAssignment, name: Identifier, objectAssignmentInitializer: Expression | undefined) => ShorthandPropertyAssignment; /** @deprecated Use `factory.createSpreadAssignment` or the factory supplied by your transformation context instead. */ const createSpreadAssignment: (expression: Expression) => SpreadAssignment; /** @deprecated Use `factory.updateSpreadAssignment` or the factory supplied by your transformation context instead. */ const updateSpreadAssignment: (node: SpreadAssignment, expression: Expression) => SpreadAssignment; /** @deprecated Use `factory.createEnumMember` or the factory supplied by your transformation context instead. */ const createEnumMember: (name: string | Identifier | StringLiteral | NumericLiteral | ComputedPropertyName | PrivateIdentifier, initializer?: Expression | undefined) => EnumMember; /** @deprecated Use `factory.updateEnumMember` or the factory supplied by your transformation context instead. */ const updateEnumMember: (node: EnumMember, name: PropertyName, initializer: Expression | undefined) => EnumMember; /** @deprecated Use `factory.updateSourceFile` or the factory supplied by your transformation context instead. */ const updateSourceFileNode: (node: SourceFile, statements: readonly Statement[], isDeclarationFile?: boolean | undefined, referencedFiles?: readonly FileReference[] | undefined, typeReferences?: readonly FileReference[] | undefined, hasNoDefaultLib?: boolean | undefined, libReferences?: readonly FileReference[] | undefined) => SourceFile; /** @deprecated Use `factory.createNotEmittedStatement` or the factory supplied by your transformation context instead. */ const createNotEmittedStatement: (original: Node) => NotEmittedStatement; /** @deprecated Use `factory.createPartiallyEmittedExpression` or the factory supplied by your transformation context instead. */ const createPartiallyEmittedExpression: (expression: Expression, original?: Node | undefined) => PartiallyEmittedExpression; /** @deprecated Use `factory.updatePartiallyEmittedExpression` or the factory supplied by your transformation context instead. */ const updatePartiallyEmittedExpression: (node: PartiallyEmittedExpression, expression: Expression) => PartiallyEmittedExpression; /** @deprecated Use `factory.createCommaList` or the factory supplied by your transformation context instead. */ const createCommaList: (elements: readonly Expression[]) => CommaListExpression; /** @deprecated Use `factory.updateCommaList` or the factory supplied by your transformation context instead. */ const updateCommaList: (node: CommaListExpression, elements: readonly Expression[]) => CommaListExpression; /** @deprecated Use `factory.createBundle` or the factory supplied by your transformation context instead. */ const createBundle: (sourceFiles: readonly SourceFile[], prepends?: readonly (UnparsedSource | InputFiles)[] | undefined) => Bundle; /** @deprecated Use `factory.updateBundle` or the factory supplied by your transformation context instead. */ const updateBundle: (node: Bundle, sourceFiles: readonly SourceFile[], prepends?: readonly (UnparsedSource | InputFiles)[] | undefined) => Bundle; /** @deprecated Use `factory.createImmediatelyInvokedFunctionExpression` or the factory supplied by your transformation context instead. */ const createImmediatelyInvokedFunctionExpression: { (statements: readonly Statement[]): CallExpression; (statements: readonly Statement[], param: ParameterDeclaration, paramValue: Expression): CallExpression; }; /** @deprecated Use `factory.createImmediatelyInvokedArrowFunction` or the factory supplied by your transformation context instead. */ const createImmediatelyInvokedArrowFunction: { (statements: readonly Statement[]): CallExpression; (statements: readonly Statement[], param: ParameterDeclaration, paramValue: Expression): CallExpression; }; /** @deprecated Use `factory.createVoidZero` or the factory supplied by your transformation context instead. */ const createVoidZero: () => VoidExpression; /** @deprecated Use `factory.createExportDefault` or the factory supplied by your transformation context instead. */ const createExportDefault: (expression: Expression) => ExportAssignment; /** @deprecated Use `factory.createExternalModuleExport` or the factory supplied by your transformation context instead. */ const createExternalModuleExport: (exportName: Identifier) => ExportDeclaration; /** @deprecated Use `factory.createNamespaceExport` or the factory supplied by your transformation context instead. */ const createNamespaceExport: (name: Identifier) => NamespaceExport; /** @deprecated Use `factory.updateNamespaceExport` or the factory supplied by your transformation context instead. */ const updateNamespaceExport: (node: NamespaceExport, name: Identifier) => NamespaceExport; /** @deprecated Use `factory.createToken` or the factory supplied by your transformation context instead. */ const createToken: <TKind extends SyntaxKind>(kind: TKind) => Token<TKind>; /** @deprecated Use `factory.createIdentifier` or the factory supplied by your transformation context instead. */ const createIdentifier: (text: string) => Identifier; /** @deprecated Use `factory.createTempVariable` or the factory supplied by your transformation context instead. */ const createTempVariable: (recordTempVariable: ((node: Identifier) => void) | undefined) => Identifier; /** @deprecated Use `factory.getGeneratedNameForNode` or the factory supplied by your transformation context instead. */ const getGeneratedNameForNode: (node: Node | undefined) => Identifier; /** @deprecated Use `factory.createUniqueName(text, GeneratedIdentifierFlags.Optimistic)` or the factory supplied by your transformation context instead. */ const createOptimisticUniqueName: (text: string) => Identifier; /** @deprecated Use `factory.createUniqueName(text, GeneratedIdentifierFlags.Optimistic | GeneratedIdentifierFlags.FileLevel)` or the factory supplied by your transformation context instead. */ const createFileLevelUniqueName: (text: string) => Identifier; /** @deprecated Use `factory.createIndexSignature` or the factory supplied by your transformation context instead. */ const createIndexSignature: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode) => IndexSignatureDeclaration; /** @deprecated Use `factory.createTypePredicateNode` or the factory supplied by your transformation context instead. */ const createTypePredicateNode: (parameterName: Identifier | ThisTypeNode | string, type: TypeNode) => TypePredicateNode; /** @deprecated Use `factory.updateTypePredicateNode` or the factory supplied by your transformation context instead. */ const updateTypePredicateNode: (node: TypePredicateNode, parameterName: Identifier | ThisTypeNode, type: TypeNode) => TypePredicateNode; /** @deprecated Use `factory.createStringLiteral`, `factory.createStringLiteralFromNode`, `factory.createNumericLiteral`, `factory.createBigIntLiteral`, `factory.createTrue`, `factory.createFalse`, or the factory supplied by your transformation context instead. */ const createLiteral: { (value: string | StringLiteral | NoSubstitutionTemplateLiteral | NumericLiteral | Identifier): StringLiteral; (value: number | PseudoBigInt): NumericLiteral; (value: boolean): BooleanLiteral; (value: string | number | PseudoBigInt | boolean): PrimaryExpression; }; /** @deprecated Use `factory.createMethodSignature` or the factory supplied by your transformation context instead. */ const createMethodSignature: (typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, name: string | PropertyName, questionToken: QuestionToken | undefined) => MethodSignature; /** @deprecated Use `factory.updateMethodSignature` or the factory supplied by your transformation context instead. */ const updateMethodSignature: (node: MethodSignature, typeParameters: NodeArray<TypeParameterDeclaration> | undefined, parameters: NodeArray<ParameterDeclaration>, type: TypeNode | undefined, name: PropertyName, questionToken: QuestionToken | undefined) => MethodSignature; /** @deprecated Use `factory.createTypeOperatorNode` or the factory supplied by your transformation context instead. */ const createTypeOperatorNode: { (type: TypeNode): TypeOperatorNode; (operator: SyntaxKind.KeyOfKeyword | SyntaxKind.UniqueKeyword | SyntaxKind.ReadonlyKeyword, type: TypeNode): TypeOperatorNode; }; /** @deprecated Use `factory.createTaggedTemplate` or the factory supplied by your transformation context instead. */ const createTaggedTemplate: { (tag: Expression, template: TemplateLiteral): TaggedTemplateExpression; (tag: Expression, typeArguments: readonly TypeNode[] | undefined, template: TemplateLiteral): TaggedTemplateExpression; }; /** @deprecated Use `factory.updateTaggedTemplate` or the factory supplied by your transformation context instead. */ const updateTaggedTemplate: { (node: TaggedTemplateExpression, tag: Expression, template: TemplateLiteral): TaggedTemplateExpression; (node: TaggedTemplateExpression, tag: Expression, typeArguments: readonly TypeNode[] | undefined, template: TemplateLiteral): TaggedTemplateExpression; }; /** @deprecated Use `factory.updateBinary` or the factory supplied by your transformation context instead. */ const updateBinary: (node: BinaryExpression, left: Expression, right: Expression, operator?: BinaryOperator | BinaryOperatorToken) => BinaryExpression; /** @deprecated Use `factory.createConditional` or the factory supplied by your transformation context instead. */ const createConditional: { (condition: Expression, whenTrue: Expression, whenFalse: Expression): ConditionalExpression; (condition: Expression, questionToken: QuestionToken, whenTrue: Expression, colonToken: ColonToken, whenFalse: Expression): ConditionalExpression; }; /** @deprecated Use `factory.createYield` or the factory supplied by your transformation context instead. */ const createYield: { (expression?: Expression | undefined): YieldExpression; (asteriskToken: AsteriskToken | undefined, expression: Expression): YieldExpression; }; /** @deprecated Use `factory.createClassExpression` or the factory supplied by your transformation context instead. */ const createClassExpression: (modifiers: readonly Modifier[] | undefined, name: string | Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, heritageClauses: readonly HeritageClause[] | undefined, members: readonly ClassElement[]) => ClassExpression; /** @deprecated Use `factory.updateClassExpression` or the factory supplied by your transformation context instead. */ const updateClassExpression: (node: ClassExpression, modifiers: readonly Modifier[] | undefined, name: Identifier | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, heritageClauses: readonly HeritageClause[] | undefined, members: readonly ClassElement[]) => ClassExpression; /** @deprecated Use `factory.createPropertySignature` or the factory supplied by your transformation context instead. */ const createPropertySignature: (modifiers: readonly Modifier[] | undefined, name: PropertyName | string, questionToken: QuestionToken | undefined, type: TypeNode | undefined, initializer?: Expression | undefined) => PropertySignature; /** @deprecated Use `factory.updatePropertySignature` or the factory supplied by your transformation context instead. */ const updatePropertySignature: (node: PropertySignature, modifiers: readonly Modifier[] | undefined, name: PropertyName, questionToken: QuestionToken | undefined, type: TypeNode | undefined, initializer: Expression | undefined) => PropertySignature; /** @deprecated Use `factory.createExpressionWithTypeArguments` or the factory supplied by your transformation context instead. */ const createExpressionWithTypeArguments: (typeArguments: readonly TypeNode[] | undefined, expression: Expression) => ExpressionWithTypeArguments; /** @deprecated Use `factory.updateExpressionWithTypeArguments` or the factory supplied by your transformation context instead. */ const updateExpressionWithTypeArguments: (node: ExpressionWithTypeArguments, typeArguments: readonly TypeNode[] | undefined, expression: Expression) => ExpressionWithTypeArguments; /** @deprecated Use `factory.createArrowFunction` or the factory supplied by your transformation context instead. */ const createArrowFunction: { (modifiers: readonly Modifier[] | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, equalsGreaterThanToken: EqualsGreaterThanToken | undefined, body: ConciseBody): ArrowFunction; (modifiers: readonly Modifier[] | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: ConciseBody): ArrowFunction; }; /** @deprecated Use `factory.updateArrowFunction` or the factory supplied by your transformation context instead. */ const updateArrowFunction: { (node: ArrowFunction, modifiers: readonly Modifier[] | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, equalsGreaterThanToken: EqualsGreaterThanToken, body: ConciseBody): ArrowFunction; (node: ArrowFunction, modifiers: readonly Modifier[] | undefined, typeParameters: readonly TypeParameterDeclaration[] | undefined, parameters: readonly ParameterDeclaration[], type: TypeNode | undefined, body: ConciseBody): ArrowFunction; }; /** @deprecated Use `factory.createVariableDeclaration` or the factory supplied by your transformation context instead. */ const createVariableDeclaration: { (name: string | BindingName, type?: TypeNode | undefined, initializer?: Expression | undefined): VariableDeclaration; (name: string | BindingName, exclamationToken: ExclamationToken | undefined, type: TypeNode | undefined, initializer: Expression | undefined): VariableDeclaration; }; /** @deprecated Use `factory.updateVariableDeclaration` or the factory supplied by your transformation context instead. */ const updateVariableDeclaration: { (node: VariableDeclaration, name: BindingName, type: TypeNode | undefined, initializer: Expression | undefined): VariableDeclaration; (node: VariableDeclaration, name: BindingName, exclamationToken: ExclamationToken | undefined, type: TypeNode | undefined, initializer: Expression | undefined): VariableDeclaration; }; /** @deprecated Use `factory.createImportClause` or the factory supplied by your transformation context instead. */ const createImportClause: (name: Identifier | undefined, namedBindings: NamedImportBindings | undefined, isTypeOnly?: any) => ImportClause; /** @deprecated Use `factory.updateImportClause` or the factory supplied by your transformation context instead. */ const updateImportClause: (node: ImportClause, name: Identifier | undefined, namedBindings: NamedImportBindings | undefined, isTypeOnly: boolean) => ImportClause; /** @deprecated Use `factory.createExportDeclaration` or the factory supplied by your transformation context instead. */ const createExportDeclaration: (decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, exportClause: NamedExportBindings | undefined, moduleSpecifier?: Expression | undefined, isTypeOnly?: any) => ExportDeclaration; /** @deprecated Use `factory.updateExportDeclaration` or the factory supplied by your transformation context instead. */ const updateExportDeclaration: (node: ExportDeclaration, decorators: readonly Decorator[] | undefined, modifiers: readonly Modifier[] | undefined, exportClause: NamedExportBindings | undefined, moduleSpecifier: Expression | undefined, isTypeOnly: boolean) => ExportDeclaration; /** @deprecated Use `factory.createJSDocParameterTag` or the factory supplied by your transformation context instead. */ const createJSDocParamTag: (name: EntityName, isBracketed: boolean, typeExpression?: JSDocTypeExpression | undefined, comment?: string | undefined) => JSDocParameterTag; /** @deprecated Use `factory.createComma` or the factory supplied by your transformation context instead. */ const createComma: (left: Expression, right: Expression) => Expression; /** @deprecated Use `factory.createLessThan` or the factory supplied by your transformation context instead. */ const createLessThan: (left: Expression, right: Expression) => Expression; /** @deprecated Use `factory.createAssignment` or the factory supplied by your transformation context instead. */ const createAssignment: (left: Expression, right: Expression) => BinaryExpression; /** @deprecated Use `factory.createStrictEquality` or the factory supplied by your transformation context instead. */ const createStrictEquality: (left: Expression, right: Expression) => BinaryExpression; /** @deprecated Use `factory.createStrictInequality` or the factory supplied by your transformation context instead. */ const createStrictInequality: (left: Expression, right: Expression) => BinaryExpression; /** @deprecated Use `factory.createAdd` or the factory supplied by your transformation context instead. */ const createAdd: (left: Expression, right: Expression) => BinaryExpression; /** @deprecated Use `factory.createSubtract` or the factory supplied by your transformation context instead. */ const createSubtract: (left: Expression, right: Expression) => BinaryExpression; /** @deprecated Use `factory.createLogicalAnd` or the factory supplied by your transformation context instead. */ const createLogicalAnd: (left: Expression, right: Expression) => BinaryExpression; /** @deprecated Use `factory.createLogicalOr` or the factory supplied by your transformation context instead. */ const createLogicalOr: (left: Expression, right: Expression) => BinaryExpression; /** @deprecated Use `factory.createPostfixIncrement` or the factory supplied by your transformation context instead. */ const createPostfixIncrement: (operand: Expression) => PostfixUnaryExpression; /** @deprecated Use `factory.createLogicalNot` or the factory supplied by your transformation context instead. */ const createLogicalNot: (operand: Expression) => PrefixUnaryExpression; /** @deprecated Use an appropriate `factory` method instead. */ const createNode: (kind: SyntaxKind, pos?: any, end?: any) => Node; /** * Creates a shallow, memberwise clone of a node ~for mutation~ with its `pos`, `end`, and `parent` set. * * NOTE: It is unsafe to change any properties of a `Node` that relate to its AST children, as those changes won't be * captured with respect to transformations. * * @deprecated Use `factory.cloneNode` instead and use `setCommentRange` or `setSourceMapRange` and avoid setting `parent`. */ const getMutableClone: <T extends Node>(node: T) => T; /** @deprecated Use `isTypeAssertionExpression` instead. */ const isTypeAssertion: (node: Node) => node is TypeAssertion; /** * @deprecated Use `ts.ReadonlyESMap<K, V>` instead. */ interface ReadonlyMap<T> extends ReadonlyESMap<string, T> { } /** * @deprecated Use `ts.ESMap<K, V>` instead. */ interface Map<T> extends ESMap<string, T> { } } export = ts; export as namespace ts;
LastToken = 156, FirstTriviaToken = 2, LastTriviaToken = 7,
attention.py
import tensorflow as tf tfk = tf.keras from .shaping import move_dim def move_ch2h(maybe_headed_tensor, channels_dim=-1, head_dim=1): if maybe_headed_tensor.shape.rank == 4: return move_dim(maybe_headed_tensor, from_dim=channels_dim, to_dim=head_dim) else: return maybe_headed_tensor def merge_attention_heads(merge_type, headed_tensor): if merge_type == 'mean': return tf.reduce_mean(headed_tensor, axis=1) elif merge_type == 'max': return tf.reduce_max(headed_tensor, axis=1) elif merge_type == 'sum': return tf.reduce_sum(headed_tensor, axis=1) elif merge_type == 'prod': return tf.reduce_prod(headed_tensor, axis=1) else: raise ValueError(f'Unknown merge type "{merge_type}"') def dot_product_attention(query, key, value, mask = None, attn_mask = None,
bias = None, scale_logits = True, clip_logits_value = None, causal = False, pad = False, merge_heads = None, attn_scale_factor = None, return_logits = False, return_matrix = False, big_number = 1e9, scale_degree = False, ): query_shape = query.shape key_shape = key.shape value_shape = value.shape input_rank = query_shape.rank attention_dim = query_shape[-1] if pad: paddings = [(0,0)]*(input_rank-2) + [(1,0),(0,0)] key = tf.pad(key, paddings) value = tf.pad(value, paddings) # Create Priliminary Logits attention_logits = tf.matmul(query, key, transpose_b=True) # Scaling for dot product if scale_logits: attention_logits = attention_logits*(attention_dim**-.5) # Clipping for numerical stability if clip_logits_value is not None: if not isinstance(clip_logits_value, list): if isinstance(clip_logits_value, tuple): clip_logits_value = list(clip_logits_value) else: clip_logits_value = [-clip_logits_value, clip_logits_value, 0] if len(clip_logits_value) == 2: clip_logits_value.append(0) if len(clip_logits_value) < 3: raise ValueError # Clip before if clip_logits_value is not None and (not clip_logits_value[2]): attention_logits = tf.clip_by_value(attention_logits, *clip_logits_value[:2]) # Scale factor and bias if scale_factor is not None: scale_factor = move_ch2h(scale_factor) attention_logits = attention_logits * scale_factor if bias is not None: bias = move_ch2h(bias) attention_logits = attention_logits + bias # Save for returning the logits logits_matrix = attention_logits # Clip after if clip_logits_value is not None and clip_logits_value[2]: attention_logits = tf.clip_by_value(attention_logits, *clip_logits_value[:2]) # Masking if not mask is None: mask_rank = mask.shape.rank mask_slice = [Ellipsis]+[None]*(input_rank-mask_rank)+[slice(None)] mask = mask[mask_slice] if not mask.dtype is attention_logits.dtype: mask = tf.cast(mask, attention_logits.dtype) attention_logits = attention_logits + (mask-1)*big_number if not attn_mask is None: attn_mask = move_ch2h(attn_mask) if not attn_mask.dtype is attention_logits.dtype: attn_mask = tf.cast(attn_mask, attention_logits.dtype) attention_logits = attention_logits + (attn_mask-1)*big_number if causal: causal_mask_shape = [query.shape[-2], key.shape[-2]] if None in causal_mask_shape: causal_mask_shape = tf.shape(attention_logits)[-2:] causal_mask = tf.ones(causal_mask_shape, dtype=attention_logits.dtype) causal_mask = tf.linalg.band_part(causal_mask,-1,0) attention_logits = attention_logits + (causal_mask-1)*big_number # Softmax Attention attention_matrix = tf.nn.softmax(attention_logits, axis=-1) # Merge Heads if merge_heads is not None: attention_matrix = merge_attention_heads(merge_type=merge_heads, headed_tensor=attention_matrix) # Scale Attention Matrix if attn_scale_factor is not None: attn_scale_factor = move_ch2h(attn_scale_factor) attention_matrix = attention_matrix * attn_scale_factor output = tf.matmul(attention_matrix, value) if (attn_scale_factor is not None) and scale_degree: if mask is None: degree = tf.reduce_sum(attn_scale_factor, axis=-1, keepdims=True) else: degree = tf.reduce_sum(attn_scale_factor * mask, axis=-1, keepdims=True) output = output * tf.math.log(1+degree) if merge_heads is None: output.set_shape(query_shape[:-1]+value_shape[-1:]) else: output.set_shape(query_shape[0:1]+query_shape[2:-1]+value_shape[-1:]) # Format Outputs outputs = output if return_logits or return_matrix: outputs = (outputs,) if return_logits: logits = move_dim(logits_matrix, from_dim=1, to_dim=4) outputs = outputs + (logits,) if return_matrix: outputs = outputs + (attention_matrix,) return outputs
scale_factor = None,
_response.py
import json import random import math import os from crawling._twitter import twitter_crawling class Response(object): def __init__(self, token): self.name = "" self.token = token self.greetingList = ['Hello {}, welcome to the Equifax Hackathon channel! Have fun :). You can type help for more details!' , 'Nice to see you here, {} ! What can I do for you (please type help!)' , 'I am willing to do anything for you {} ! Type help so I can help you!'] self.help_msg = {"text": 'Don\'t Worry {} ! I will show you how to communicate with me :).', "attachments":[{"pretext": "Command line:", "color": "#36a64f", "text": "hi: Say hello to me, so that I know you are here!"}, {"color": "#36a64f", "text": "print message: I will grab all detailed ID message for you, such as channel id or user id :)"}, {"color": "#e2ffb6", "text": "help: I can show you all commands I can understand :)"}, {"color": "#415677", "text": "show name or nameID: I can know that your target ID"}, {"color": "#b27485", "text": "select dataLocation: I can know where I can grab data for you"} ]} self.select_msg = {"text": "Where do you want to grab personal information for {} ?", "attachments": [{"pretext": "You can choose:", "color": "#36a64f", "text": "Facebook + limits"}, {"color": "#36a64f", "text": "Twitter + limits"}, {"color": "#415677", "text": "Craigslist"} ]} def
(self, data, channel, sc, user): type = data["type"] user_info = sc.api_read("users.info", token = self.token, user = user) username = user_info["user"]["name"] if type == "hello": sc.rtm_send_message(channel, self.greetingList[int(math.floor(random.random()*3))].format(username)) if "user" in data.keys() and data["user"] == user: if (type == "message"): text = data["text"].lower() if (text.startswith("hi")): sc.rtm_send_message(channel, "I am CodingMonkey Bot. Nice to meet you here {0}!".format(username)) if (text.startswith("print")): sc.rtm_send_message(channel, data[text[5:].strip()]) if (text.startswith("help")): sc.api_call("chat.postMessage", token = self.token, channel = channel, username = "codingmonkey", text = self.help_msg["text"].format(username), attachments = self.help_msg["attachments"]) if (text.startswith("show")): command_msg = str(text).split(' ') self.name = command_msg[1] sc.api_call("chat.postMessage", token = self.token, channel = channel, username = "codingmonkey", text = self.select_msg["text"].format(username), attachments = self.select_msg["attachments"]) if (text.startswith("select")): command_msg = str(text).split(' ') if (command_msg[1].lower() == "twitter"): twi = twitter_crawling() limits = 5 if len(command_msg) == 3: limits = int(command_msg[2]) twitter_info = json.dumps(twi.spiderInfo(self.name, limits)) sc.api_call("chat.postMessage", token = self.token, channel = channel, username = "codingmonkey", text = "Here are the results in Twitter:", attachments = twitter_info) elif (command_msg[1].lower() == "facebook"): root = os.getcwd() relative_path = "slackclient/data/facebookY.json" abs_path = os.path.join(root, relative_path) with open(abs_path) as facebook_file: facebook_info = json.load(facebook_file) facebook_info = json.dumps(facebook_info) sc.api_call("chat.postMessage", token = self.token, channel = channel, username = "codingmonkey", text = "Here are the results in Facebook:", attachments = facebook_info) elif (command_msg[1].lower() == "craigslist"): root = os.getcwd() relative_path = "slackclient/data/craigslist.json" abs_path = os.path.join(root, relative_path) with open(abs_path) as craigslist_file: craigslist_info = json.load(craigslist_file) craigslist_info = json.dumps(craigslist_info) craigslist_info = craigslist_info.replace("'", "%100") sc.api_call("chat.postMessage", token = self.token, channel = channel, username = "codingmonkey", text = "Here are the results in Craigslist:", attachments = craigslist_info)
response