prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>filter_linux.go<|end_file_name|><|fim▁begin|>package netlink import ( "bytes" "encoding/binary" "errors" "fmt" "syscall" "unsafe" "github.com/vishvananda/netlink/nl" "golang.org/x/sys/unix" ) // Constants used in TcU32Sel.Flags. const ( TC_U32_TERMINAL = nl.TC_U32_TERMINAL TC_U32_OFFSET = nl.TC_U32_OFFSET TC_U32_VAROFFSET = nl.TC_U32_VAROFFSET TC_U32_EAT = nl.TC_U32_EAT ) // Fw filter filters on firewall marks // NOTE: this is in filter_linux because it refers to nl.TcPolice which // is defined in nl/tc_linux.go type Fw struct { FilterAttrs ClassId uint32 // TODO remove nl type from interface Police nl.TcPolice InDev string // TODO Action Mask uint32 AvRate uint32 Rtab [256]uint32 Ptab [256]uint32 } func NewFw(attrs FilterAttrs, fattrs FilterFwAttrs) (*Fw, error) { var rtab [256]uint32 var ptab [256]uint32 rcellLog := -1 pcellLog := -1 avrate := fattrs.AvRate / 8 police := nl.TcPolice{} police.Rate.Rate = fattrs.Rate / 8 police.PeakRate.Rate = fattrs.PeakRate / 8 buffer := fattrs.Buffer linklayer := nl.LINKLAYER_ETHERNET if fattrs.LinkLayer != nl.LINKLAYER_UNSPEC { linklayer = fattrs.LinkLayer } police.Action = int32(fattrs.Action) if police.Rate.Rate != 0 { police.Rate.Mpu = fattrs.Mpu police.Rate.Overhead = fattrs.Overhead if CalcRtable(&police.Rate, rtab[:], rcellLog, fattrs.Mtu, linklayer) < 0 { return nil, errors.New("TBF: failed to calculate rate table") } police.Burst = uint32(Xmittime(uint64(police.Rate.Rate), uint32(buffer))) } police.Mtu = fattrs.Mtu if police.PeakRate.Rate != 0 { police.PeakRate.Mpu = fattrs.Mpu police.PeakRate.Overhead = fattrs.Overhead if CalcRtable(&police.PeakRate, ptab[:], pcellLog, fattrs.Mtu, linklayer) < 0 { return nil, errors.New("POLICE: failed to calculate peak rate table") } } return &Fw{ FilterAttrs: attrs, ClassId: fattrs.ClassId, InDev: fattrs.InDev, Mask: fattrs.Mask, Police: police, AvRate: avrate, Rtab: rtab, Ptab: ptab, }, nil } func (filter *Fw) Attrs() *FilterAttrs { return &filter.FilterAttrs } func (filter *Fw) Type() string { return "fw" } // FilterDel will delete a filter from the system. // Equivalent to: `tc filter del $filter` func FilterDel(filter Filter) error { return pkgHandle.FilterDel(filter) } // FilterDel will delete a filter from the system. // Equivalent to: `tc filter del $filter` func (h *Handle) FilterDel(filter Filter) error { req := h.newNetlinkRequest(unix.RTM_DELTFILTER, unix.NLM_F_ACK) base := filter.Attrs() msg := &nl.TcMsg{ Family: nl.FAMILY_ALL, Ifindex: int32(base.LinkIndex), Handle: base.Handle, Parent: base.Parent, Info: MakeHandle(base.Priority, nl.Swap16(base.Protocol)), } req.AddData(msg) _, err := req.Execute(unix.NETLINK_ROUTE, 0) return err } // FilterAdd will add a filter to the system. // Equivalent to: `tc filter add $filter` func FilterAdd(filter Filter) error { return pkgHandle.FilterAdd(filter) } // FilterAdd will add a filter to the system. // Equivalent to: `tc filter add $filter` func (h *Handle) FilterAdd(filter Filter) error { native = nl.NativeEndian() req := h.newNetlinkRequest(unix.RTM_NEWTFILTER, unix.NLM_F_CREATE|unix.NLM_F_EXCL|unix.NLM_F_ACK) base := filter.Attrs() msg := &nl.TcMsg{ Family: nl.FAMILY_ALL, Ifindex: int32(base.LinkIndex), Handle: base.Handle, Parent: base.Parent, Info: MakeHandle(base.Priority, nl.Swap16(base.Protocol)), } req.AddData(msg) req.AddData(nl.NewRtAttr(nl.TCA_KIND, nl.ZeroTerminated(filter.Type()))) options := nl.NewRtAttr(nl.TCA_OPTIONS, nil) switch filter := filter.(type) { case *U32: // Convert TcU32Sel into nl.TcU32Sel as it is without copy. sel := (*nl.TcU32Sel)(unsafe.Pointer(filter.Sel)) if sel == nil { // match all sel = &nl.TcU32Sel{ Nkeys: 1, Flags: nl.TC_U32_TERMINAL, } sel.Keys = append(sel.Keys, nl.TcU32Key{}) } if native != networkOrder { // Copy TcU32Sel. cSel := *sel keys := make([]nl.TcU32Key, cap(sel.Keys)) copy(keys, sel.Keys) cSel.Keys = keys sel = &cSel // Handle the endianness of attributes sel.Offmask = native.Uint16(htons(sel.Offmask)) sel.Hmask = native.Uint32(htonl(sel.Hmask)) for i, key := range sel.Keys { sel.Keys[i].Mask = native.Uint32(htonl(key.Mask)) sel.Keys[i].Val = native.Uint32(htonl(key.Val)) } } sel.Nkeys = uint8(len(sel.Keys)) options.AddRtAttr(nl.TCA_U32_SEL, sel.Serialize()) if filter.ClassId != 0 { options.AddRtAttr(nl.TCA_U32_CLASSID, nl.Uint32Attr(filter.ClassId)) } if filter.Divisor != 0 { if (filter.Divisor-1)&filter.Divisor != 0 { return fmt.Errorf("illegal divisor %d. Must be a power of 2", filter.Divisor) } options.AddRtAttr(nl.TCA_U32_DIVISOR, nl.Uint32Attr(filter.Divisor)) } if filter.Hash != 0 { options.AddRtAttr(nl.TCA_U32_HASH, nl.Uint32Attr(filter.Hash)) } actionsAttr := options.AddRtAttr(nl.TCA_U32_ACT, nil) // backwards compatibility if filter.RedirIndex != 0 { filter.Actions = append([]Action{NewMirredAction(filter.RedirIndex)}, filter.Actions...) } if err := EncodeActions(actionsAttr, filter.Actions); err != nil { return err } case *Fw: if filter.Mask != 0 { b := make([]byte, 4) native.PutUint32(b, filter.Mask) options.AddRtAttr(nl.TCA_FW_MASK, b) } if filter.InDev != "" { options.AddRtAttr(nl.TCA_FW_INDEV, nl.ZeroTerminated(filter.InDev)) } if (filter.Police != nl.TcPolice{}) { police := options.AddRtAttr(nl.TCA_FW_POLICE, nil) police.AddRtAttr(nl.TCA_POLICE_TBF, filter.Police.Serialize()) if (filter.Police.Rate != nl.TcRateSpec{}) { payload := SerializeRtab(filter.Rtab) police.AddRtAttr(nl.TCA_POLICE_RATE, payload) } if (filter.Police.PeakRate != nl.TcRateSpec{}) { payload := SerializeRtab(filter.Ptab) police.AddRtAttr(nl.TCA_POLICE_PEAKRATE, payload) } } if filter.ClassId != 0 { b := make([]byte, 4) native.PutUint32(b, filter.ClassId) options.AddRtAttr(nl.TCA_FW_CLASSID, b) } case *BpfFilter: var bpfFlags uint32 if filter.ClassId != 0 { options.AddRtAttr(nl.TCA_BPF_CLASSID, nl.Uint32Attr(filter.ClassId)) } if filter.Fd >= 0 { options.AddRtAttr(nl.TCA_BPF_FD, nl.Uint32Attr((uint32(filter.Fd)))) } if filter.Name != "" { options.AddRtAttr(nl.TCA_BPF_NAME, nl.ZeroTerminated(filter.Name)) } if filter.DirectAction { bpfFlags |= nl.TCA_BPF_FLAG_ACT_DIRECT } options.AddRtAttr(nl.TCA_BPF_FLAGS, nl.Uint32Attr(bpfFlags)) case *MatchAll: actionsAttr := options.AddRtAttr(nl.TCA_MATCHALL_ACT, nil) if err := EncodeActions(actionsAttr, filter.Actions); err != nil { return err } if filter.ClassId != 0 { options.AddRtAttr(nl.TCA_MATCHALL_CLASSID, nl.Uint32Attr(filter.ClassId)) } } req.AddData(options) _, err := req.Execute(unix.NETLINK_ROUTE, 0) return err } // FilterList gets a list of filters in the system. // Equivalent to: `tc filter show`. // Generally returns nothing if link and parent are not specified. func FilterList(link Link, parent uint32) ([]Filter, error) { return pkgHandle.FilterList(link, parent) } // FilterList gets a list of filters in the system. // Equivalent to: `tc filter show`. // Generally returns nothing if link and parent are not specified. func (h *Handle) FilterList(link Link, parent uint32) ([]Filter, error) { req := h.newNetlinkRequest(unix.RTM_GETTFILTER, unix.NLM_F_DUMP) msg := &nl.TcMsg{ Family: nl.FAMILY_ALL, Parent: parent, } if link != nil { base := link.Attrs() h.ensureIndex(base) msg.Ifindex = int32(base.Index) } req.AddData(msg) msgs, err := req.Execute(unix.NETLINK_ROUTE, unix.RTM_NEWTFILTER) if err != nil { return nil, err } var res []Filter for _, m := range msgs { msg := nl.DeserializeTcMsg(m) attrs, err := nl.ParseRouteAttr(m[msg.Len():]) if err != nil { return nil, err } base := FilterAttrs{ LinkIndex: int(msg.Ifindex), Handle: msg.Handle, Parent: msg.Parent, } base.Priority, base.Protocol = MajorMinor(msg.Info) base.Protocol = nl.Swap16(base.Protocol) var filter Filter filterType := "" detailed := false for _, attr := range attrs { switch attr.Attr.Type { case nl.TCA_KIND: filterType = string(attr.Value[:len(attr.Value)-1]) switch filterType { case "u32": filter = &U32{} case "fw": filter = &Fw{} case "bpf": filter = &BpfFilter{} case "matchall": filter = &MatchAll{} default: filter = &GenericFilter{FilterType: filterType} } case nl.TCA_OPTIONS: data, err := nl.ParseRouteAttr(attr.Value) if err != nil { return nil, err } switch filterType { case "u32": detailed, err = parseU32Data(filter, data) if err != nil { return nil, err } case "fw": detailed, err = parseFwData(filter, data) if err != nil { return nil, err } case "bpf": detailed, err = parseBpfData(filter, data) if err != nil { return nil, err } case "matchall": detailed, err = parseMatchAllData(filter, data) if err != nil { return nil, err } default: detailed = true } } } // only return the detailed version of the filter if detailed { *filter.Attrs() = base res = append(res, filter) } } return res, nil } func toTcGen(attrs *ActionAttrs, tcgen *nl.TcGen) { tcgen.Index = uint32(attrs.Index) tcgen.Capab = uint32(attrs.Capab) tcgen.Action = int32(attrs.Action) tcgen.Refcnt = int32(attrs.Refcnt) tcgen.Bindcnt = int32(attrs.Bindcnt) } func toAttrs(tcgen *nl.TcGen, attrs *ActionAttrs) { attrs.Index = int(tcgen.Index) attrs.Capab = int(tcgen.Capab) attrs.Action = TcAct(tcgen.Action) attrs.Refcnt = int(tcgen.Refcnt) attrs.Bindcnt = int(tcgen.Bindcnt) } func EncodeActions(attr *nl.RtAttr, actions []Action) error { tabIndex := int(nl.TCA_ACT_TAB) for _, action := range actions { switch action := action.(type) { default: return fmt.Errorf("unknown action type %s", action.Type()) case *MirredAction: table := attr.AddRtAttr(tabIndex, nil) tabIndex++ table.AddRtAttr(nl.TCA_ACT_KIND, nl.ZeroTerminated("mirred")) aopts := table.AddRtAttr(nl.TCA_ACT_OPTIONS, nil) mirred := nl.TcMirred{ Eaction: int32(action.MirredAction), Ifindex: uint32(action.Ifindex), } toTcGen(action.Attrs(), &mirred.TcGen) aopts.AddRtAttr(nl.TCA_MIRRED_PARMS, mirred.Serialize()) case *BpfAction: table := attr.AddRtAttr(tabIndex, nil) tabIndex++ table.AddRtAttr(nl.TCA_ACT_KIND, nl.ZeroTerminated("bpf")) aopts := table.AddRtAttr(nl.TCA_ACT_OPTIONS, nil) gen := nl.TcGen{} toTcGen(action.Attrs(), &gen) aopts.AddRtAttr(nl.TCA_ACT_BPF_PARMS, gen.Serialize()) aopts.AddRtAttr(nl.TCA_ACT_BPF_FD, nl.Uint32Attr(uint32(action.Fd))) aopts.AddRtAttr(nl.TCA_ACT_BPF_NAME, nl.ZeroTerminated(action.Name)) case *GenericAction: table := attr.AddRtAttr(tabIndex, nil) tabIndex++ table.AddRtAttr(nl.TCA_ACT_KIND, nl.ZeroTerminated("gact")) aopts := table.AddRtAttr(nl.TCA_ACT_OPTIONS, nil) gen := nl.TcGen{} toTcGen(action.Attrs(), &gen) aopts.AddRtAttr(nl.TCA_GACT_PARMS, gen.Serialize()) } } return nil } func parseActions(tables []syscall.NetlinkRouteAttr) ([]Action, error) { var actions []Action for _, table := range tables { var action Action var actionType string aattrs, err := nl.ParseRouteAttr(table.Value) if err != nil {<|fim▁hole|> return nil, err } nextattr: for _, aattr := range aattrs { switch aattr.Attr.Type { case nl.TCA_KIND: actionType = string(aattr.Value[:len(aattr.Value)-1]) // only parse if the action is mirred or bpf switch actionType { case "mirred": action = &MirredAction{} case "bpf": action = &BpfAction{} case "gact": action = &GenericAction{} default: break nextattr } case nl.TCA_OPTIONS: adata, err := nl.ParseRouteAttr(aattr.Value) if err != nil { return nil, err } for _, adatum := range adata { switch actionType { case "mirred": switch adatum.Attr.Type { case nl.TCA_MIRRED_PARMS: mirred := *nl.DeserializeTcMirred(adatum.Value) toAttrs(&mirred.TcGen, action.Attrs()) action.(*MirredAction).ActionAttrs = ActionAttrs{} action.(*MirredAction).Ifindex = int(mirred.Ifindex) action.(*MirredAction).MirredAction = MirredAct(mirred.Eaction) } case "bpf": switch adatum.Attr.Type { case nl.TCA_ACT_BPF_PARMS: gen := *nl.DeserializeTcGen(adatum.Value) toAttrs(&gen, action.Attrs()) case nl.TCA_ACT_BPF_FD: action.(*BpfAction).Fd = int(native.Uint32(adatum.Value[0:4])) case nl.TCA_ACT_BPF_NAME: action.(*BpfAction).Name = string(adatum.Value[:len(adatum.Value)-1]) } case "gact": switch adatum.Attr.Type { case nl.TCA_GACT_PARMS: gen := *nl.DeserializeTcGen(adatum.Value) toAttrs(&gen, action.Attrs()) } } } } } actions = append(actions, action) } return actions, nil } func parseU32Data(filter Filter, data []syscall.NetlinkRouteAttr) (bool, error) { native = nl.NativeEndian() u32 := filter.(*U32) detailed := false for _, datum := range data { switch datum.Attr.Type { case nl.TCA_U32_SEL: detailed = true sel := nl.DeserializeTcU32Sel(datum.Value) u32.Sel = (*TcU32Sel)(unsafe.Pointer(sel)) if native != networkOrder { // Handle the endianness of attributes u32.Sel.Offmask = native.Uint16(htons(sel.Offmask)) u32.Sel.Hmask = native.Uint32(htonl(sel.Hmask)) for i, key := range u32.Sel.Keys { u32.Sel.Keys[i].Mask = native.Uint32(htonl(key.Mask)) u32.Sel.Keys[i].Val = native.Uint32(htonl(key.Val)) } } case nl.TCA_U32_ACT: tables, err := nl.ParseRouteAttr(datum.Value) if err != nil { return detailed, err } u32.Actions, err = parseActions(tables) if err != nil { return detailed, err } for _, action := range u32.Actions { if action, ok := action.(*MirredAction); ok { u32.RedirIndex = int(action.Ifindex) } } case nl.TCA_U32_CLASSID: u32.ClassId = native.Uint32(datum.Value) case nl.TCA_U32_DIVISOR: u32.Divisor = native.Uint32(datum.Value) case nl.TCA_U32_HASH: u32.Hash = native.Uint32(datum.Value) } } return detailed, nil } func parseFwData(filter Filter, data []syscall.NetlinkRouteAttr) (bool, error) { native = nl.NativeEndian() fw := filter.(*Fw) detailed := true for _, datum := range data { switch datum.Attr.Type { case nl.TCA_FW_MASK: fw.Mask = native.Uint32(datum.Value[0:4]) case nl.TCA_FW_CLASSID: fw.ClassId = native.Uint32(datum.Value[0:4]) case nl.TCA_FW_INDEV: fw.InDev = string(datum.Value[:len(datum.Value)-1]) case nl.TCA_FW_POLICE: adata, _ := nl.ParseRouteAttr(datum.Value) for _, aattr := range adata { switch aattr.Attr.Type { case nl.TCA_POLICE_TBF: fw.Police = *nl.DeserializeTcPolice(aattr.Value) case nl.TCA_POLICE_RATE: fw.Rtab = DeserializeRtab(aattr.Value) case nl.TCA_POLICE_PEAKRATE: fw.Ptab = DeserializeRtab(aattr.Value) } } } } return detailed, nil } func parseBpfData(filter Filter, data []syscall.NetlinkRouteAttr) (bool, error) { native = nl.NativeEndian() bpf := filter.(*BpfFilter) detailed := true for _, datum := range data { switch datum.Attr.Type { case nl.TCA_BPF_FD: bpf.Fd = int(native.Uint32(datum.Value[0:4])) case nl.TCA_BPF_NAME: bpf.Name = string(datum.Value[:len(datum.Value)-1]) case nl.TCA_BPF_CLASSID: bpf.ClassId = native.Uint32(datum.Value[0:4]) case nl.TCA_BPF_FLAGS: flags := native.Uint32(datum.Value[0:4]) if (flags & nl.TCA_BPF_FLAG_ACT_DIRECT) != 0 { bpf.DirectAction = true } } } return detailed, nil } func parseMatchAllData(filter Filter, data []syscall.NetlinkRouteAttr) (bool, error) { native = nl.NativeEndian() matchall := filter.(*MatchAll) detailed := true for _, datum := range data { switch datum.Attr.Type { case nl.TCA_MATCHALL_CLASSID: matchall.ClassId = native.Uint32(datum.Value[0:4]) case nl.TCA_MATCHALL_ACT: tables, err := nl.ParseRouteAttr(datum.Value) if err != nil { return detailed, err } matchall.Actions, err = parseActions(tables) if err != nil { return detailed, err } } } return detailed, nil } func AlignToAtm(size uint) uint { var linksize, cells int cells = int(size / nl.ATM_CELL_PAYLOAD) if (size % nl.ATM_CELL_PAYLOAD) > 0 { cells++ } linksize = cells * nl.ATM_CELL_SIZE return uint(linksize) } func AdjustSize(sz uint, mpu uint, linklayer int) uint { if sz < mpu { sz = mpu } switch linklayer { case nl.LINKLAYER_ATM: return AlignToAtm(sz) default: return sz } } func CalcRtable(rate *nl.TcRateSpec, rtab []uint32, cellLog int, mtu uint32, linklayer int) int { bps := rate.Rate mpu := rate.Mpu var sz uint if mtu == 0 { mtu = 2047 } if cellLog < 0 { cellLog = 0 for (mtu >> uint(cellLog)) > 255 { cellLog++ } } for i := 0; i < 256; i++ { sz = AdjustSize(uint((i+1)<<uint32(cellLog)), uint(mpu), linklayer) rtab[i] = uint32(Xmittime(uint64(bps), uint32(sz))) } rate.CellAlign = -1 rate.CellLog = uint8(cellLog) rate.Linklayer = uint8(linklayer & nl.TC_LINKLAYER_MASK) return cellLog } func DeserializeRtab(b []byte) [256]uint32 { var rtab [256]uint32 native := nl.NativeEndian() r := bytes.NewReader(b) _ = binary.Read(r, native, &rtab) return rtab } func SerializeRtab(rtab [256]uint32) []byte { native := nl.NativeEndian() var w bytes.Buffer _ = binary.Write(&w, native, rtab) return w.Bytes() }<|fim▁end|>
<|file_name|>batch.py<|end_file_name|><|fim▁begin|>""" batch.py Batch simulation for M1 model using NetPyNE """ from netpyne.batch import Batch import numpy as np def runBatch(b, label, setup='mpi_bulletin'): b.batchLabel = label b.saveFolder = 'data/'+b.batchLabel b.method = 'grid' if setup == 'mpi_bulletin': b.runCfg = {'type': 'mpi_bulletin', 'script': 'init.py', 'skip': True} elif setup == 'hpc_slurm_comet': b.runCfg = {'type': 'hpc_slurm', 'allocation': 'csd403', 'walltime': '6:00:00', 'nodes': 1, 'coresPerNode': 24, 'email': '[email protected]', 'folder': '/home/salvadord/netpyne/examples/batchCell', # startup folder 'script': 'init.py', 'mpiCommand': 'ibrun'} # specific command for Comet b.run() # run batch def runBatchComet(b, label): b.batchLabel = label b.saveFolder = 'data/'+b.batchLabel b.method = 'grid' b.runCfg = {'type': 'mpi_bulletin', 'script': 'init.py', 'skip': True} b.run() # run batch def batchNa(): params = {'dendNa': [0.025, 0.03, 0.035, 0.4], ('IClamp1', 'amp'): list(np.arange(-2.0, 8.0, 0.5)/10.0)} initCfg = {'duration': 1.1, 'tau1NMDA': 15} b = Batch(params=params, initCfg=initCfg) runBatch(b, 'batchNa', setup='mpi_bulletin') <|fim▁hole|> ('NetStim1', 'weight'): list(np.arange(1.0, 10.0, 1.0)/1e4)} initCfg = {'duration': 1.1} b = Batch(params=params, initCfg=initCfg) runBatch(b, 'batchNMDA', setup='mpi_bulletin') def batchNMDAMapping(): params = {'tau1NMDA': [10, 15, 20, 25]} initCfg = {'duration': 1100} from cfg import cfg from netParams import netParams b = Batch(params=params, initCfg=initCfg, cfg=cfg, netParams=netParams) runBatch(b, 'batchNMDAMapping', setup='mpi_bulletin') # Main code if __name__ == '__main__': # batchNa() batchNMDAMapping()<|fim▁end|>
def batchNMDA(): params = {'tau1NMDA': [10, 15, 20, 25],
<|file_name|>tsxSpreadAttributesResolution5.tsx<|end_file_name|><|fim▁begin|>// @filename: file.tsx // @jsx: preserve // @noLib: true // @skipLibCheck: true // @libFiles: react.d.ts,lib.d.ts import React = require('react'); interface PoisonedProp { x: string; y: 2; } class Poisoned extends React.Component<PoisonedProp, {}> { render() { return <div>Hello</div>; } } let obj = { x: "hello world", y: 2 }; // Error as "obj" has type { x: string; y: number } let p = <Poisoned {...obj} />; class EmptyProp extends React.Component<{}, {}> { render() { return <div>Default hi</div>; } greeting: string; } let o = {<|fim▁hole|>} // Ok let e = <EmptyProp {...o} />;<|fim▁end|>
prop1: false
<|file_name|>reference.py<|end_file_name|><|fim▁begin|>from six import string_types from pypif.obj.common.display_item import DisplayItem from pypif.obj.common.name import Name from pypif.obj.common.pages import Pages from pypif.obj.common.pio import Pio class Reference(Pio): """ Information about a referenced publication. """ def __init__(self, doi=None, isbn=None, issn=None, url=None, title=None, publisher=None, journal=None, volume=None, issue=None, year=None, figure=None, table=None, pages=None, authors=None, editors=None, affiliations=None, acknowledgements=None, references=None, tags=None, **kwargs): """ Constructor. :param doi: String with DOI of the published work :param isbn: String with ISBN of the published work :param issn: String with ISSN of the published work :param url: String with URL to the published work :param title: String with title of the published work. :param publisher: String with publisher of the work. :param journal: String with the journal in which the work was published. :param volume: String with the volume in which the work was published. :param issue: String with the issue in which the work was published. :param year: String with the year in which the work was published. :param figure: Dictionary or :class:`.DisplayItem` object with the figure to reference. :param table: Dictionary or :class:`.DisplayItem` object with the table to reference. :param pages: String, integer, dictionary, or :class:`.Pages` object with the starting and ending pages for the published work. :param authors: List of strings, dictionaries, or :class:`.Name` objects with information about the authors. :param editors: List of strings, dictionaries, or :class:`.Name` objects with information about the editors. :param affiliations: List of strings with affiliations. :param acknowledgements: List of strings with acknowledgements. :param references: List of dictionaries or :class:`.Reference` objects with works cited by this published work. :param tags: List of strings or numbers that are tags for this object. :param kwargs: Dictionary of fields that are not supported. """ super(Reference, self).__init__(tags=tags, **kwargs) self._doi = None self.doi = doi self._isbn = None self.isbn = isbn self._issn = None self.issn = issn self._url = None self.url = url self._title = None self.title = title self._publisher = None self.publisher = publisher self._journal = None self.journal = journal self._volume = None self.volume = volume self._issue = None self.issue = issue self._year = None self.year = year self._figure = None self.figure = figure self._table = None self.table = table self._pages = None self.pages = pages self._authors = None self.authors = authors self._editors = None self.editors = editors self._affiliations = None self.affiliations = affiliations self._acknowledgements = None self.acknowledgements = acknowledgements self._references = None self.references = references @property def doi(self): return self._doi @doi.setter def doi(self, doi): self._validate_type('doi', doi, string_types) self._doi = doi @doi.deleter def doi(self): self._doi = None @property def isbn(self): return self._isbn @isbn.setter def isbn(self, isbn): self._validate_type('isbn', isbn, string_types) self._isbn = isbn @isbn.deleter def isbn(self): self._isbn = None @property def issn(self): return self.issn @issn.setter def issn(self, issn): self._validate_type('issn', issn, string_types) self._issn = issn @issn.deleter def issn(self): self._issn = None @property def url(self): return self._url @url.setter def url(self, url): self._validate_type('url', url, string_types) self._url = url @url.deleter def url(self): self._url = None @property def title(self): return self._title @title.setter def title(self, title):<|fim▁hole|> @title.deleter def title(self): self._title = None @property def publisher(self): return self._publisher @publisher.setter def publisher(self, publisher): self._validate_type('publisher', publisher, string_types) self._publisher = publisher @publisher.deleter def publisher(self): self._publisher = None @property def journal(self): return self._journal @journal.setter def journal(self, journal): self._validate_type('journal', journal, string_types) self._journal = journal @journal.deleter def journal(self): self._journal = None @property def volume(self): return self._volume @volume.setter def volume(self, volume): self._validate_type('volume', volume, string_types) self._volume = volume @volume.deleter def volume(self): self._volume = None @property def issue(self): return self._issue @issue.setter def issue(self, issue): self._validate_type('issue', issue, string_types) self._issue = issue @issue.deleter def issue(self): self._issue = None @property def year(self): return self._year @year.setter def year(self, year): self._validate_type('year', year, string_types) self._year = year @year.deleter def year(self): self._year = None @property def figure(self): return self._figure @figure.setter def figure(self, figure): self._validate_type('figure', figure, dict, DisplayItem) self._figure = self._get_object(DisplayItem, figure) @figure.deleter def figure(self): self._figure = None @property def table(self): return self._table @table.setter def table(self, table): self._validate_type('table', table, dict, DisplayItem) self._table = self._get_object(DisplayItem, table) @table.deleter def table(self): self._table = None @property def pages(self): return self._pages @pages.setter def pages(self, pages): self._validate_type('pages', pages, string_types, int, dict, Pages) self._pages = self._get_object(Pages, pages) @pages.deleter def pages(self): self._pages = None @property def authors(self): return self._authors @authors.setter def authors(self, authors): self._validate_list_type('authors', authors, string_types, dict, Name) self._authors = self._get_object(Name, authors) @authors.deleter def authors(self): self._authors = None @property def editors(self): return self._editors @editors.setter def editors(self, editors): self._validate_list_type('editors', editors, string_types, dict, Name) self._editors = self._get_object(Name, editors) @editors.deleter def editors(self): self._editors = None @property def affiliations(self): return self._affiliations @affiliations.setter def affiliations(self, affiliations): self._validate_list_type('affiliations', affiliations, string_types) self._affiliations = affiliations @affiliations.deleter def affiliations(self): self._affiliations = None @property def acknowledgements(self): return self._acknowledgements @acknowledgements.setter def acknowledgements(self, acknowledgements): self._validate_list_type('acknowledgements', acknowledgements, string_types) self._acknowledgements = acknowledgements @acknowledgements.deleter def acknowledgements(self): self._acknowledgements = None @property def references(self): return self._references @references.setter def references(self, references): self._validate_list_type('references', references, dict, Reference) self._references = self._get_object(Reference, references)<|fim▁end|>
self._validate_type('title', title, string_types) self._title = title
<|file_name|>rtdeps.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! This module contains the linkage attributes to all runtime dependencies of //! the standard library This varies per-platform, but these libraries are //! necessary for running libstd. // All platforms need to link to rustrt #[link(name = "rustrt", kind = "static")] extern {} // LLVM implements the `frem` instruction as a call to `fmod`, which lives in // libm. Hence, we must explicitly link to it. // // On linux librt and libdl are indirect dependencies via rustrt, // and binutils 2.22+ won't add them automatically #[cfg(target_os = "linux")] #[link(name = "dl")] #[link(name = "pthread")] extern {} #[cfg(target_os = "android")] #[link(name = "dl")] #[link(name = "log")]<|fim▁hole|> #[cfg(target_os = "freebsd")] #[link(name = "execinfo")] #[link(name = "pthread")] extern {} #[cfg(target_os = "macos")] #[link(name = "System")] extern {}<|fim▁end|>
extern {}
<|file_name|>bitcoin_el_GR.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="el_GR" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Number7</source> <translation>Σχετικά με το Number7</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Number7&lt;/b&gt; version</source> <translation>Έκδοση Number7</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</translation> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation>Πνευματική ιδιοκτησία </translation> </message> <message> <location line="+0"/> <source>The Number7 developers</source> <translation>Οι Number7 προγραμματιστές </translation> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Βιβλίο Διευθύνσεων</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Διπλό-κλικ για επεξεργασία της διεύθυνσης ή της ετικέτας</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Δημιούργησε νέα διεύθυνση</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Αντέγραψε την επιλεγμένη διεύθυνση στο πρόχειρο του συστήματος</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Νέα διεύθυνση</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your Number7 addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Αυτές είναι οι Number7 διευθύνσεις σας για να λαμβάνετε πληρωμές. Δίνοντας μία ξεχωριστή διεύθυνση σε κάθε αποστολέα, θα μπορείτε να ελέγχετε ποιος σας πληρώνει.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>&amp;Αντιγραφή διεύθυνσης</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Δείξε &amp;QR κωδικα</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a Number7 address</source> <translation>Υπογράψτε ένα μήνυμα για ν&apos; αποδείξετε πως σας ανήκει μια συγκεκριμένη διεύθυνση Number7</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>&amp;Υπέγραψε το μήνυμα</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>Αντιγραφη της επιλεγμενης διεύθυνσης στο πρόχειρο του συστηματος</translation> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation>Εξαγωγή δεδομένων καρτέλας σε αρχείο</translation> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation>&amp;Εξαγωγή</translation> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified Number7 address</source> <translation>Υπογράψτε ένα μήνυμα για ν&apos; αποδείξετε πως ανήκει μια συγκεκριμένη διεύθυνση Number7</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>&amp;Επιβεβαίωση μηνύματος</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Διαγραφή</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your Number7 addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Αυτές είναι οι Number7 διευθύνσεις σας για να λαμβάνετε πληρωμές. Δίνοντας μία ξεχωριστή διεύθυνση σε κάθε αποστολέα, θα μπορείτε να ελέγχετε ποιος σας πληρώνει.</translation> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>Αντιγραφή &amp;επιγραφής</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>&amp;Επεξεργασία</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation>Αποστολή νομισμάτων</translation> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Εξαγωγή Δεδομενων Βιβλίου Διευθύνσεων</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Αρχείο οριοθετημένο με κόμματα (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Εξαγωγή λαθών</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Αδυναμία εγγραφής στο αρχείο %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Ετικέτα</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Διεύθυνση</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(χωρίς ετικέτα)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Φράση πρόσβασης </translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Βάλτε κωδικό πρόσβασης</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Νέος κωδικός πρόσβασης</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Επανέλαβε τον νέο κωδικό πρόσβασης</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Εισάγετε τον νέο κωδικό πρόσβασης στον πορτοφόλι &lt;br/&gt; Παρακαλώ χρησιμοποιείστε ένα κωδικό με &lt;b&gt; 10 ή περισσότερους τυχαίους χαρακτήρες&lt;/b&gt; ή &lt;b&gt; οχτώ ή παραπάνω λέξεις&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Κρυπτογράφησε το πορτοφόλι</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Αυτη η ενεργεία χρειάζεται τον κωδικό του πορτοφολιού για να ξεκλειδώσει το πορτοφόλι.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Ξεκλειδωσε το πορτοφολι</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Αυτη η ενεργεια χρειάζεται τον κωδικο του πορτοφολιου για να αποκρυπτογραφησειι το πορτοφολι.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Αποκρυπτογράφησε το πορτοφολι</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Άλλαξε κωδικο πρόσβασης</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Εισάγετε τον παλιό και τον νεο κωδικο στο πορτοφολι.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Επιβεβαίωσε την κρυπτογραφηση του πορτοφολιού</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR NUMBER7S&lt;/b&gt;!</source> <translation>Προσοχη: Εαν κρυπτογραφησεις το πορτοφολι σου και χάσεις τον κωδικο σου θα χάσεις &lt;b&gt; ΟΛΑ ΣΟΥ ΤΑ NUMBER7S&lt;/b&gt;! Είσαι σίγουρος ότι θέλεις να κρυπτογραφησεις το πορτοφολι;</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Είστε σίγουροι ότι θέλετε να κρυπτογραφήσετε το πορτοφόλι σας;</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>ΣΗΜΑΝΤΙΚΟ: Τα προηγούμενα αντίγραφα ασφαλείας που έχετε κάνει από το αρχείο του πορτοφόλιου σας θα πρέπει να αντικατασταθουν με το νέο που δημιουργείται, κρυπτογραφημένο αρχείο πορτοφόλιου. Για λόγους ασφαλείας, τα προηγούμενα αντίγραφα ασφαλείας του μη κρυπτογραφημένου αρχείου πορτοφόλιου θα καταστουν άχρηστα μόλις αρχίσετε να χρησιμοποιείτε το νέο κρυπτογραφημένο πορτοφόλι. </translation> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Προσοχη: το πλήκτρο Caps Lock είναι ενεργο.</translation> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Κρυπτογραφημενο πορτοφολι</translation> </message> <message> <location line="-56"/> <source>Number7 will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your number7s from being stolen by malware infecting your computer.</source> <translation>Το Number7 θα κλεισει τώρα για να τελειώσει την διαδικασία κρυπτογραφησης. Θυμησου ότι κρυπτογραφώντας το πορτοφολι σου δεν μπορείς να προστατέψεις πλήρως τα number7s σου από κλοπή στην περίπτωση όπου μολυνθεί ο υπολογιστής σου με κακόβουλο λογισμικο.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Η κρυπτογραφηση του πορτοφολιού απέτυχε</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Η κρυπτογράφηση του πορτοφολιού απέτυχε λογω εσωτερικού σφάλματος. Το πορτοφολι δεν κρυπτογραφηθηκε.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>Οι εισαχθέντες κωδικοί δεν ταιριάζουν.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>το ξεκλείδωμα του πορτοφολιού απέτυχε</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Ο κωδικος που εισήχθη για την αποκρυπτογραφηση του πορτοφολιού ήταν λαθος.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Η αποκρυπτογραφηση του πορτοφολιού απέτυχε</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Ο κωδικος του πορτοφολιού άλλαξε με επιτυχία.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation>Υπογραφή &amp;Μηνύματος...</translation> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Συγχρονισμός με το δίκτυο...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Επισκόπηση</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Εμφάνισε γενική εικονα του πορτοφολιού</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Συναλλαγές</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Περιήγηση στο ιστορικο συνναλαγων</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Εξεργασια της λιστας των αποθηκευμενων διευθύνσεων και ετικετων</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Εμφάνισε την λίστα των διευθύνσεων για την παραλαβή πληρωμων</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>Έ&amp;ξοδος</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Εξοδος από την εφαρμογή</translation> </message> <message> <location line="+4"/> <source>Show information about Number7</source> <translation>Εμφάνισε πληροφορίες σχετικά με το Number7</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>Σχετικά με &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Εμφάνισε πληροφορίες σχετικά με Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Επιλογές...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Κρυπτογράφησε το πορτοφόλι</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Αντίγραφο ασφαλείας του πορτοφολιού</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Άλλαξε κωδικο πρόσβασης</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation>Εισαγωγή μπλοκ από τον σκληρο δίσκο ... </translation> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation>Φόρτωση ευρετηρίου μπλοκ στον σκληρο δισκο...</translation> </message> <message> <location line="-347"/> <source>Send coins to a Number7 address</source> <translation>Στείλε νομισματα σε μια διεύθυνση number7</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for Number7</source> <translation>Επεργασία ρυθμισεων επιλογών για το Number7</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation>Δημιουργία αντιγράφου ασφαλείας πορτοφολιού σε άλλη τοποθεσία</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Αλλαγή του κωδικού κρυπτογράφησης του πορτοφολιού</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>&amp;Παράθυρο αποσφαλμάτωσης</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Άνοιγμα κονσόλας αποσφαλμάτωσης και διαγνωστικών</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation>&amp;Επιβεβαίωση μηνύματος</translation> </message> <message> <location line="-165"/> <location line="+530"/> <source>Number7</source> <translation>Number7</translation> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>Πορτοφόλι</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation>&amp;Αποστολή</translation> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation>&amp;Παραλαβή </translation> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation>&amp;Διεύθυνσεις</translation> </message> <message> <location line="+22"/> <source>&amp;About Number7</source> <translation>&amp;Σχετικα:Number7</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;Εμφάνισε/Κρύψε</translation> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation>Εμφάνιση ή αποκρύψη του κεντρικου παράθυρου </translation> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation>Κρυπτογραφήστε τα ιδιωτικά κλειδιά που ανήκουν στο πορτοφόλι σας </translation> </message> <message> <location line="+7"/> <source>Sign messages with your Number7 addresses to prove you own them</source> <translation>Υπογράψτε ένα μήνυμα για να βεβαιώσετε πως είστε ο κάτοχος αυτής της διεύθυνσης</translation> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified Number7 addresses</source> <translation>Υπογράψτε ένα μήνυμα για ν&apos; αποδείξετε πως ανήκει μια συγκεκριμένη διεύθυνση Number7</translation> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Αρχείο</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Ρυθμίσεις</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>&amp;Βοήθεια</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Εργαλειοθήκη καρτελών</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+47"/> <source>Number7 client</source> <translation>Πελάτης Number7</translation> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to Number7 network</source> <translation><numerusform>%n ενεργή σύνδεση στο δίκτυο Number7</numerusform><numerusform>%n ενεργές συνδέσεις στο δίκτυο Βitcoin</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation>Η πηγή του μπλοκ δεν ειναι διαθέσιμη... </translation> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation>Μεταποιημένα %1 απο % 2 (κατ &apos;εκτίμηση) μπλοκ της ιστορίας της συναλλαγής. </translation> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation>Έγινε λήψη %1 μπλοκ ιστορικού συναλλαγών</translation> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation><numerusform>%n ώρες </numerusform><numerusform>%n ώρες </numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation><numerusform>%n ημέρες </numerusform><numerusform>%n ημέρες </numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation><numerusform>%n εβδομαδες</numerusform><numerusform>%n εβδομαδες</numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation>%1 πίσω</translation> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation>Το τελευταίο μπλοκ που ελήφθη δημιουργήθηκε %1 πριν.</translation> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation>Οι συναλλαγές μετά από αυτό δεν θα είναι ακόμη ορατες.</translation> </message> <message> <location line="+22"/> <source>Error</source> <translation>Σφάλμα</translation> </message> <message> <location line="+3"/> <source>Warning</source> <translation>Προειδοποίηση</translation> </message> <message> <location line="+3"/> <source>Information</source> <translation>Πληροφορία</translation> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation>Η συναλλαγή ξεπερνάει το όριο. Μπορεί να ολοκληρωθεί με μια αμοιβή των %1, η οποία αποδίδεται στους κόμβους που επεξεργάζονται τις συναλλαγές και βοηθούν στην υποστήριξη του δικτύου. Θέλετε να συνεχίσετε;</translation> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Ενημερωμένο</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Ενημέρωση...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>Επιβεβαίωση αμοιβής συναλλαγής</translation> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Η συναλλαγή απεστάλη</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Εισερχόμενη συναλλαγή</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Ημερομηνία: %1 Ποσό: %2 Τύπος: %3 Διεύθυνση: %4 </translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation>Χειρισμός URI</translation> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid Number7 address or malformed URI parameters.</source> <translation>Το URI δεν μπορεί να αναλυθεί! Αυτό μπορεί να προκληθεί από μια μη έγκυρη διεύθυνση Number7 ή ακατάλληλη παραμέτρο URI.</translation> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Το πορτοφόλι είναι &lt;b&gt;κρυπτογραφημένο&lt;/b&gt; και &lt;b&gt;ξεκλείδωτο&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Το πορτοφόλι είναι &lt;b&gt;κρυπτογραφημένο&lt;/b&gt; και &lt;b&gt;κλειδωμένο&lt;/b&gt;</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. Number7 can no longer continue safely and will quit.</source> <translation>Παρουσιάστηκε ανεπανόρθωτο σφάλμα. Το Number7 δεν μπορεί πλέον να συνεχίσει με ασφάλεια και θα τερματισθει.</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation>Ειδοποίηση Δικτύου</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Επεξεργασία Διεύθυνσης</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Επιγραφή</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>Η επιγραφή που σχετίζεται με αυτή την καταχώρηση του βιβλίου διευθύνσεων</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Διεύθυνση</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Η διεύθυνση που σχετίζεται με αυτή την καταχώρηση του βιβλίου διευθύνσεων. Μπορεί να τροποποιηθεί μόνο για τις διευθύνσεις αποστολής.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Νέα διεύθυνση λήψης</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Νέα διεύθυνση αποστολής</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Επεξεργασία διεύθυνσης λήψης</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Επεξεργασία διεύθυνσης αποστολής</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Η διεύθυνση &quot;%1&quot; βρίσκεται ήδη στο βιβλίο διευθύνσεων.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Number7 address.</source> <translation>Η διεύθυνση &quot;%1&quot; δεν είναι έγκυρη Number7 διεύθυνση.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Δεν είναι δυνατό το ξεκλείδωμα του πορτοφολιού.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Η δημιουργία νέου κλειδιού απέτυχε.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>Number7-Qt</source> <translation>number7-qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>έκδοση</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Χρήση:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>επιλογής γραμμής εντολών</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>επιλογές UI</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Όρισε γλώσσα, για παράδειγμα &quot;de_DE&quot;(προεπιλογή:τοπικές ρυθμίσεις)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Έναρξη ελαχιστοποιημένο</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Εμφάνισε την οθόνη εκκίνησης κατά την εκκίνηση(προεπιλογή:1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Ρυθμίσεις</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Κύριο</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation>Η προαιρετική αμοιβή για κάθε kB επισπεύδει την επεξεργασία των συναλλαγών σας. Οι περισσότερες συναλλαγές είναι 1 kB. </translation> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Αμοιβή &amp;συναλλαγής</translation> </message> <message> <location line="+31"/> <source>Automatically start Number7 after logging in to the system.</source> <translation>Αυτόματη εκκίνηση του Number7 μετά την εισαγωγή στο σύστημα</translation> </message> <message> <location line="+3"/> <source>&amp;Start Number7 on system login</source> <translation>&amp;Έναρξη του Βιtcoin κατά την εκκίνηση του συστήματος</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation>Επαναφορα όλων των επιλογων του πελάτη σε default.</translation> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation>Επαναφορα ρυθμίσεων</translation> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation>&amp;Δίκτυο</translation> </message> <message> <location line="+6"/> <source>Automatically open the Number7 client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Αυτόματο άνοιγμα των θυρών Number7 στον δρομολογητή. Λειτουργεί μόνο αν ο δρομολογητής σας υποστηρίζει τη λειτουργία UPnP.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Απόδοση θυρών με χρήστη &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the Number7 network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Σύνδεση στο Number7 δίκτυο μέσω διαμεσολαβητή SOCKS4 (π.χ. για σύνδεση μέσω Tor)</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Σύνδεση μέσω διαμεσολαβητή SOCKS</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>&amp;IP διαμεσολαβητή:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>Διεύθυνση IP του διαμεσολαβητή (π.χ. 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Θύρα:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Θύρα διαμεσολαβητή</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>SOCKS &amp;Έκδοση:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>SOCKS εκδοση του διαμεσολαβητη (e.g. 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Παράθυρο</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Εμφάνιση μόνο εικονιδίου στην περιοχή ειδοποιήσεων κατά την ελαχιστοποίηση</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Ελαχιστοποίηση στην περιοχή ειδοποιήσεων αντί της γραμμής εργασιών</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Ελαχιστοποίηση αντί για έξοδο κατά το κλείσιμο του παραθύρου</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>Ε&amp;λαχιστοποίηση κατά το κλείσιμο</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>%Απεικόνιση</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>Γλώσσα περιβάλλοντος εργασίας: </translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Number7.</source> <translation>Εδώ μπορεί να ρυθμιστεί η γλώσσα διεπαφής χρήστη. Αυτή η ρύθμιση θα ισχύσει μετά την επανεκκίνηση του Number7.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Μονάδα μέτρησης:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Διαλέξτε την προεπιλεγμένη υποδιαίρεση που θα εμφανίζεται όταν στέλνετε νομίσματα.</translation> </message> <message> <location line="+9"/> <source>Whether to show Number7 addresses in the transaction list or not.</source> <translation>Επιλέξτε αν θέλετε να εμφανίζονται οι διευθύνσεις Number7 στη λίστα συναλλαγών.</translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>Εμφάνιση διευθύνσεων στη λίστα συναλλαγών</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;ΟΚ</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Ακύρωση</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Εφαρμογή</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation>προεπιλογή</translation> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation>Επιβεβαιώση των επιλογων επαναφοράς </translation> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation>Για ορισμένες ρυθμίσεις πρεπει η επανεκκίνηση να τεθεί σε ισχύ.</translation> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation>Θέλετε να προχωρήσετε;</translation> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>Προειδοποίηση</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Number7.</source> <translation>Αυτή η ρύθμιση θα ισχύσει μετά την επανεκκίνηση του Number7.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>Δεν είναι έγκυρη η διεύθυνση διαμεσολαβητή</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Φόρμα</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Number7 network after a connection is established, but this process has not completed yet.</source> <translation>Οι πληροφορίες που εμφανίζονται μπορεί να είναι ξεπερασμένες. Το πορτοφόλι σας συγχρονίζεται αυτόματα με το δίκτυο Number7 μετά από μια σύνδεση, αλλά αυτή η διαδικασία δεν έχει ακόμη ολοκληρωθεί. </translation> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>Υπόλοιπο</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Ανεπιβεβαίωτες</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>Πορτοφόλι</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation>Ανώριμος</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>Εξορυγμενο υπόλοιπο που δεν έχει ακόμα ωριμάσει </translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Πρόσφατες συναλλαγές&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>Το τρέχον υπόλοιπο</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Το άθροισμα των συναλλαγών που δεν έχουν ακόμα επιβεβαιωθεί και δεν προσμετρώνται στο τρέχον υπόλοιπό σας</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>εκτός συγχρονισμού</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start number7: click-to-pay handler</source> <translation>Δεν είναι δυνατή η εκκίνηση του Number7: click-to-pay handler</translation> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>Κώδικας QR</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Αίτηση πληρωμής</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Ποσό:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Επιγραφή:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Μήνυμα:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Αποθήκευση ως...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Σφάλμα κατά την κωδικοποίηση του URI σε κώδικα QR</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>Το αναγραφόμενο ποσό δεν είναι έγκυρο, παρακαλούμε να το ελέγξετε.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Το αποτέλεσμα της διεύθυνσης είναι πολύ μεγάλο. Μειώστε το μέγεθος για το κείμενο της ετικέτας/ μηνύματος.</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Αποθήκευση κώδικα QR</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>Εικόνες PNG (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Όνομα Πελάτη</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation>Μη διαθέσιμο</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Έκδοση Πελάτη</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Πληροφορία</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Χρησιμοποιηση της OpenSSL εκδοσης</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>Χρόνος εκκίνησης</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Δίκτυο</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Αριθμός συνδέσεων</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>Στο testnet</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>αλυσίδα εμποδισμού</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Τρέχον αριθμός μπλοκ</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Κατ&apos; εκτίμηση συνολικά μπλοκς</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Χρόνος τελευταίου μπλοκ</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Άνοιγμα</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>επιλογής γραμμής εντολών</translation> </message> <message> <location line="+7"/> <source>Show the Number7-Qt help message to get a list with possible Number7 command-line options.</source> <translation>Εμφανιση του Number7-Qt μήνυματος βοήθειας για να πάρετε μια λίστα με τις πιθανές επιλογές Number7 γραμμής εντολών.</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>&amp;Εμφάνιση</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Κονσόλα</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Ημερομηνία κατασκευής</translation> </message> <message> <location line="-104"/> <source>Number7 - Debug window</source> <translation>Number7 - Παράθυρο αποσφαλμάτωσης</translation> </message> <message> <location line="+25"/> <source>Number7 Core</source> <translation>Number7 Core</translation> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>Αρχείο καταγραφής εντοπισμού σφαλμάτων </translation> </message> <message> <location line="+7"/> <source>Open the Number7 debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Ανοίξτε το αρχείο καταγραφής εντοπισμού σφαλμάτων από τον τρέχοντα κατάλογο δεδομένων. Αυτό μπορεί να πάρει μερικά δευτερόλεπτα για τα μεγάλα αρχεία καταγραφής. </translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Καθαρισμός κονσόλας</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the Number7 RPC console.</source> <translation>Καλώς ήρθατε στην Number7 RPC κονσόλα.</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Χρησιμοποιήστε το πάνω και κάτω βέλος για να περιηγηθείτε στο ιστορικο, και &lt;b&gt;Ctrl-L&lt;/b&gt; για εκκαθαριση οθονης.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Γράψτε &lt;b&gt;βοήθεια&lt;/b&gt; για μια επισκόπηση των διαθέσιμων εντολών</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Αποστολή νομισμάτων</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Αποστολή σε πολλούς αποδέκτες ταυτόχρονα</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>&amp;Προσθήκη αποδέκτη</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Διαγραφή όλων των πεδίων συναλλαγής</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Καθαρισμός &amp;Όλων</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Υπόλοιπο:</translation> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation>123,456 BTC</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Επιβεβαίωση αποστολής</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>Αποστολη</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; σε %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Επιβεβαίωση αποστολής νομισμάτων</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Είστε βέβαιοι για την αποστολή %1;</translation> </message> <message> <location line="+0"/> <source> and </source> <translation>και</translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>Η διεύθυνση του αποδέκτη δεν είναι σωστή. Παρακαλώ ελέγξτε ξανά.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>Το ποσό πληρωμής πρέπει να είναι μεγαλύτερο από 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Το ποσό ξεπερνάει το διαθέσιμο υπόλοιπο</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Το σύνολο υπερβαίνει το υπόλοιπό σας όταν συμπεριληφθεί και η αμοιβή %1</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Βρέθηκε η ίδια διεύθυνση δύο φορές. Επιτρέπεται μία μόνο εγγραφή για κάθε διεύθυνση, σε κάθε διαδικασία αποστολής.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation>Σφάλμα: Η δημιουργία της συναλλαγής απέτυχε</translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Σφάλμα: Η συναλλαγή απερρίφθη. Αυτό ενδέχεται να συμβαίνει αν κάποια από τα νομίσματα έχουν ήδη ξοδευθεί, όπως αν χρησιμοποιήσατε αντίγραφο του wallet.dat και τα νομίσματα ξοδεύθηκαν εκεί.</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Φόρμα</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>&amp;Ποσό:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>Πληρωμή &amp;σε:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Διεύθυνση αποστολής της πληρωμής (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Εισάγετε μια επιγραφή για αυτή τη διεύθυνση ώστε να καταχωρηθεί στο βιβλίο διευθύνσεων</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;Επιγραφή</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Επιλογή διεύθυνσης από το βιβλίο διευθύνσεων</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Επικόλληση διεύθυνσης από το πρόχειρο</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Αφαίρεση αποδέκτη</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a Number7 address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Εισάγετε μια διεύθυνση Number7 (π.χ. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Υπογραφές - Είσοδος / Επαλήθευση μήνυματος </translation> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>&amp;Υπογραφή Μηνύματος</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Μπορείτε να υπογράφετε μηνύματα με τις διευθύνσεις σας, ώστε ν&apos; αποδεικνύετε πως αυτές σας ανήκουν. Αποφεύγετε να υπογράφετε κάτι αόριστο καθώς ενδέχεται να εξαπατηθείτε. Υπογράφετε μόνο πλήρης δηλώσεις με τις οποίες συμφωνείτε.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Εισάγετε μια διεύθυνση Number7 (π.χ. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>Επιλογή διεύθυνσης από το βιβλίο διευθύνσεων</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Επικόλληση διεύθυνσης από το βιβλίο διευθύνσεων</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Εισάγετε εδώ το μήνυμα που θέλετε να υπογράψετε</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation>Υπογραφή</translation> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation>Αντέγραφη της επιλεγμενης διεύθυνσης στο πρόχειρο του συστηματος</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Number7 address</source> <translation>Υπογράψτε ένα μήνυμα για ν&apos; αποδείξετε πως σας ανήκει μια συγκεκριμένη διεύθυνση Number7</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Υπογραφη μήνυματος</translation> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation>Επαναφορά όλων των πεδίων μήνυματος</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Καθαρισμός &amp;Όλων</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation>&amp;Επιβεβαίωση μηνύματος</translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Πληκτρολογήστε την υπογραφή διεύθυνσης, μήνυμα (βεβαιωθείτε ότι έχετε αντιγράψει τις αλλαγές γραμμής, κενά, tabs, κ.λπ. ακριβώς) και την υπογραφή παρακάτω, για να ελέγξει το μήνυμα. Να είστε προσεκτικοί για να μην διαβάσετε περισσότερα στην υπογραφή ό, τι είναι στην υπογραφή ίδιο το μήνυμα , για να μην εξαπατηθούν από έναν άνθρωπο -in - the-middle επίθεση.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Εισάγετε μια διεύθυνση Number7 (π.χ. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Number7 address</source> <translation>Υπογράψτε ένα μήνυμα για ν&apos; αποδείξετε πως υπογραφθηκε απο μια συγκεκριμένη διεύθυνση Number7</translation> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation>Επιβεβαίωση μηνύματος</translation> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation>Επαναφορά όλων επαλήθευμενων πεδίων μήνυματος </translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Number7 address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Εισάγετε μια διεύθυνση Number7 (π.χ. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Κάντε κλικ στο &quot;Υπογραφή Μηνύματος&quot; για να λάβετε την υπογραφή</translation> </message> <message> <location line="+3"/> <source>Enter Number7 signature</source> <translation>Εισαγωγή υπογραφής Number7</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>Η διεύθυνση που εισήχθη είναι λάθος.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Παρακαλούμε ελέγξτε την διεύθυνση και δοκιμάστε ξανά.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>Η διεύθυνση που έχει εισαχθεί δεν αναφέρεται σε ένα πλήκτρο.</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>το ξεκλείδωμα του πορτοφολιού απέτυχε</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>Το προσωπικό κλειδί εισαγμενης διευθυνσης δεν είναι διαθέσιμο.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>Η υπογραφή του μηνύματος απέτυχε.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Μήνυμα υπεγράφη.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>Η υπογραφή δεν μπόρεσε να αποκρυπτογραφηθεί.</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Παρακαλούμε ελέγξτε την υπογραφή και δοκιμάστε ξανά.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>Η υπογραφή δεν ταιριάζει με το μήνυμα. </translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Η επιβεβαίωση του μηνύματος απέτυχε</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Μήνυμα επιβεβαιώθηκε.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The Number7 developers</source> <translation>Οι Number7 προγραμματιστές </translation> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Ανοιχτό μέχρι %1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation>%1/χωρίς σύνδεση;</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/χωρίς επιβεβαίωση</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 επιβεβαιώσεις</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Κατάσταση</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>, έχει μεταδοθεί μέσω %n κόμβων</numerusform><numerusform>, έχει μεταδοθεί μέσω %n κόμβων</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Ημερομηνία</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Πηγή</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Δημιουργία </translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Από</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>Προς</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation> δική σας διεύθυνση </translation> </message> <message> <location line="-2"/> <source>label</source> <translation>eπιγραφή</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Πίστωση </translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation><numerusform>ωρίμανση σε %n επιπλέον μπλοκ</numerusform><numerusform>ωρίμανση σε %n επιπλέον μπλοκ</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>μη αποδεκτό</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Debit</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Τέλος συναλλαγής </translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Καθαρό ποσό</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Μήνυμα</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Σχόλιο:</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>ID Συναλλαγής:</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 7 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Πρέπει να περιμένετε 7 μπλοκ πριν μπορέσετε να χρησιμοποιήσετε τα νομίσματα που έχετε δημιουργήσει. Το μπλοκ που δημιουργήσατε μεταδόθηκε στο δίκτυο για να συμπεριληφθεί στην αλυσίδα των μπλοκ. Αν δεν μπει σε αυτή θα μετατραπεί σε &quot;μη αποδεκτό&quot; και δε θα μπορεί να καταναλωθεί. Αυτό συμβαίνει σπάνια όταν κάποιος άλλος κόμβος δημιουργήσει ένα μπλοκ λίγα δευτερόλεπτα πριν από εσάς.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Πληροφορίες αποσφαλμάτωσης</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Συναλλαγή</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation>εισροές </translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Ποσό</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>αληθής</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>αναληθής </translation> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, δεν έχει ακόμα μεταδοθεί μ&apos; επιτυχία</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation><numerusform>Ανοιχτό για %n μπλοκ</numerusform><numerusform>Ανοιχτό για %n μπλοκ</numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>άγνωστο</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Λεπτομέρειες συναλλαγής</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Αυτό το παράθυρο δείχνει μια λεπτομερή περιγραφή της συναλλαγής</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>Ημερομηνία</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Τύπος</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Διεύθυνση</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Ποσό</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation><numerusform>Ανοιχτό για %n μπλοκ</numerusform><numerusform>Ανοιχτό για %n μπλοκ</numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Ανοιχτό μέχρι %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Χωρίς σύνδεση (%1 επικυρώσεις)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Χωρίς επιβεβαίωση (%1 από %2 επικυρώσεις)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Επικυρωμένη (%1 επικυρώσεις)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation><numerusform>Το υπόλοιπο από την εξόρυξη θα είναι διαθέσιμο μετά από %n μπλοκ</numerusform><numerusform>Το υπόλοιπο από την εξόρυξη θα είναι διαθέσιμο μετά από %n μπλοκ</numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Αυτό το μπλοκ δεν έχει παραληφθεί από κανέναν άλλο κόμβο και κατά πάσα πιθανότητα θα απορριφθεί!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Δημιουργήθηκε αλλά απορρίφθηκε</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Παραλαβή με</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Ελήφθη από</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Αποστολή προς</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Πληρωμή προς εσάς</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Εξόρυξη</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(δ/α)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Κατάσταση συναλλαγής. Πηγαίνετε το ποντίκι πάνω από αυτό το πεδίο για να δείτε τον αριθμό των επικυρώσεων</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Ημερομηνία κι ώρα λήψης της συναλλαγής.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Είδος συναλλαγής.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Διεύθυνση αποστολής της συναλλαγής.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Ποσό που αφαιρέθηκε ή προστέθηκε στο υπόλοιπο.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Όλα</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Σήμερα</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Αυτή την εβδομάδα</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Αυτόν τον μήνα</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Τον προηγούμενο μήνα</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Αυτό το έτος</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Έκταση...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Ελήφθη με</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Απεστάλη προς</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>Προς εσάς</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Εξόρυξη</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Άλλο</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Αναζήτηση με βάση τη διεύθυνση ή την επιγραφή</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Ελάχιστο ποσό</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Αντιγραφή διεύθυνσης</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Αντιγραφή επιγραφής</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Αντιγραφή ποσού</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>Αντιγραφη του ID Συναλλαγής</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Επεξεργασία επιγραφής</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Εμφάνιση λεπτομερειών συναλλαγής</translation> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>Εξαγωγή Στοιχείων Συναλλαγών</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Αρχείο οριοθετημένο με κόμματα (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Επικυρωμένες</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Ημερομηνία</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Τύπος</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Επιγραφή</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Διεύθυνση</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Ποσό</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Σφάλμα εξαγωγής</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Αδυναμία εγγραφής στο αρχείο %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Έκταση:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>έως</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>Αποστολή νομισμάτων</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation>&amp;Εξαγωγή</translation> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation>Εξαγωγή δεδομένων καρτέλας σε αρχείο</translation> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation>Αντίγραφο ασφαλείας του πορτοφολιού</translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation>Αρχεία δεδομένων πορτοφολιού (*.dat)</translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>Αποτυχία κατά τη δημιουργία αντιγράφου</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>Παρουσιάστηκε σφάλμα κατά την αποθήκευση των δεδομένων πορτοφολιού στη νέα τοποθεσία.</translation> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation>Η δημιουργια αντιγραφου ασφαλειας πετυχε</translation> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation>Τα δεδομένα πορτοφόλιου αποθηκεύτηκαν με επιτυχία στη νέα θέση. </translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>Number7 version</source> <translation>Έκδοση Number7</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation>Χρήση:</translation> </message> <message> <location line="-29"/> <source>Send command to -server or number7d</source> <translation>Αποστολή εντολής στον εξυπηρετητή ή στο number7d</translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Λίστα εντολών</translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Επεξήγηση εντολής</translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Επιλογές:</translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: number7.conf)</source><|fim▁hole|> </message> <message> <location line="+3"/> <source>Specify pid file (default: number7d.pid)</source> <translation>Ορίστε αρχείο pid (προεπιλογή: number7d.pid)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Ορισμός φακέλου δεδομένων</translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Όρισε το μέγεθος της βάσης προσωρινής αποθήκευσης σε megabytes(προεπιλογή:25)</translation> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 6093 or testnet: 16093)</source> <translation>Εισερχόμενες συνδέσεις στη θύρα &lt;port&gt; (προεπιλογή: 6093 ή στο testnet: 16093)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Μέγιστες αριθμός συνδέσεων με τους peers &lt;n&gt; (προεπιλογή: 125)</translation> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Σύνδεση σε έναν κόμβο για την ανάκτηση διευθύνσεων από ομοτίμους, και αποσυνδέσh</translation> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation>Διευκρινίστε τη δικιά σας δημόσια διεύθυνση.</translation> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Όριο αποσύνδεσης προβληματικών peers (προεπιλογή: 100)</translation> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Δευτερόλεπτα πριν επιτραπεί ξανά η σύνδεση των προβληματικών peers (προεπιλογή: 86400)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>Ένα σφάλμα συνέβη καθώς προετοιμαζόταν η πόρτα RPC %u για αναμονή IPv4: %s</translation> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 6094 or testnet: 16094)</source> <translation>Εισερχόμενες συνδέσεις JSON-RPC στη θύρα &lt;port&gt; (προεπιλογή: 6094 or testnet: 16094)</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Αποδοχή εντολών κονσόλας και JSON-RPC</translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>Εκτέλεση στο παρασκήνιο κι αποδοχή εντολών</translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>Χρήση του δοκιμαστικού δικτύου</translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Να δέχεσαι συνδέσεις από έξω(προεπιλογή:1)</translation> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=number7rpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Number7 Alert&quot; [email protected] </source> <translation>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=number7rpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Number7 Alert&quot; [email protected] </translation> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>Ένα σφάλμα συνέβη καθώς προετοιμαζόταν η υποδοχη RPC %u για αναμονη του IPv6, επεσε πισω στο IPv4:%s</translation> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation>Αποθηκευση σε συγκεκριμένη διεύθυνση. Χρησιμοποιήστε τα πλήκτρα [Host] : συμβολισμός θύρα για IPv6</translation> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Number7 is probably already running.</source> <translation>Αδυναμία κλειδώματος του φακέλου δεδομένων %s. Πιθανώς το Number7 να είναι ήδη ενεργό.</translation> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Σφάλμα: Η συναλλαγή απορρίφθηκε. Αυτό ίσως οφείλεται στο ότι τα νομίσματά σας έχουν ήδη ξοδευτεί, π.χ. με την αντιγραφή του wallet.dat σε άλλο σύστημα και την χρήση τους εκεί, χωρίς η συναλλαγή να έχει καταγραφεί στο παρόν σύστημα.</translation> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation>Σφάλμα: Αυτή η συναλλαγή απαιτεί αμοιβή συναλλαγής τουλάχιστον %s λόγω του μεγέθους, πολυπλοκότητας ή της χρήσης πρόσφατης παραλαβής κεφαλαίου</translation> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation>Εκτέλεση της εντολής όταν το καλύτερο μπλοκ αλλάξει(%s στην εντολή αντικαθίσταται από το hash του μπλοκ)</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Εκτέλεσε την εντολή όταν το καλύτερο μπλοκ αλλάξει(%s στην εντολή αντικαθίσταται από το hash του μπλοκ)</translation> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>Ορίστε το μέγιστο μέγεθος των high-priority/low-fee συναλλαγων σε bytes (προεπιλογή: 27000)</translation> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation>Αυτό είναι ένα προ-τεστ κυκλοφορίας - χρησιμοποιήστε το με δική σας ευθύνη - δεν χρησιμοποιείτε για εξόρυξη ή για αλλες εφαρμογές</translation> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Προειδοποίηση: Η παράμετρος -paytxfee είναι πολύ υψηλή. Πρόκειται για την αμοιβή που θα πληρώνετε για κάθε συναλλαγή που θα στέλνετε.</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>Προειδοποίηση: Εμφανίσεις συναλλαγων δεν μπορεί να είναι σωστες! Μπορεί να χρειαστεί να αναβαθμίσετε, ή άλλοι κόμβοι μπορεί να χρειαστεί να αναβαθμίστουν. </translation> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Number7 will not work properly.</source> <translation>Προειδοποίηση: Παρακαλώ βεβαιωθείτε πως η ημερομηνία κι ώρα του συστήματός σας είναι σωστές. Αν το ρολόι του υπολογιστή σας πάει λάθος, ενδέχεται να μη λειτουργεί σωστά το Number7.</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Προειδοποίηση : Σφάλμα wallet.dat κατα την ανάγνωση ! Όλα τα κλειδιά αναγνωρισθηκαν σωστά, αλλά τα δεδομένα των συναλλαγών ή καταχωρήσεις στο βιβλίο διευθύνσεων μπορεί να είναι ελλιπείς ή λανθασμένα. </translation> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Προειδοποίηση : το αρχειο wallet.dat ειναι διεφθαρμένο, τα δεδομένα σώζονται ! Original wallet.dat αποθηκεύονται ως πορτοφόλι { timestamp } bak στο % s ? . . Αν το υπόλοιπο του ή τις συναλλαγές σας, είναι λάθος θα πρέπει να επαναφέρετε από ένα αντίγραφο ασφαλείας</translation> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Προσπάθεια για ανακτησει ιδιωτικων κλειδιων από ενα διεφθαρμένο αρχειο wallet.dat </translation> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation>Αποκλεισμός επιλογων δημιουργίας: </translation> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>Σύνδεση μόνο με ορισμένους κόμβους</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation>Εντοπισθηκε διεφθαρμενη βαση δεδομενων των μπλοκ</translation> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Ανακαλύψτε την δικη σας IP διεύθυνση (προεπιλογή: 1 όταν ακούει και δεν - externalip) </translation> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation>Θελετε να δημιουργηθει τωρα η βαση δεδομενων του μπλοκ? </translation> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation>Σφάλμα κατά την ενεργοποίηση της βάσης δεδομένων μπλοκ</translation> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation>Σφάλμα κατά την ενεργοποίηση της βάσης δεδομένων πορτοφόλιου %s!</translation> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation>Σφάλμα φορτωσης της βασης δεδομενων των μπλοκ</translation> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation>Σφάλμα φορτωσης της βασης δεδομενων των μπλοκ</translation> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation>Προειδοποίηση: Χαμηλός χώρος στο δίσκο </translation> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Σφάλμα: το πορτοφόλι είναι κλειδωμένο, δεν μπορεί να δημιουργηθεί συναλλαγή</translation> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation>Λάθος: λάθος συστήματος:</translation> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>ταλαιπωρηθειτε για να ακούσετε σε οποιαδήποτε θύρα. Χρήση - ακούστε = 0 , αν θέλετε αυτό.</translation> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation>Αποτυχία αναγνωσης των block πληροφοριων</translation> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation>Η αναγνωση του μπλοκ απετυχε</translation> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation>Ο συγχρονισμος του μπλοκ ευρετηριου απετυχε</translation> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation>Η δημιουργια του μπλοκ ευρετηριου απετυχε</translation> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation>Η δημιουργια των μπλοκ πληροφοριων απετυχε</translation> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation>Η δημιουργια του μπλοκ απετυχε</translation> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation>Αδυναμία εγγραφής πληροφοριων αρχειου</translation> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation>Αποτυχία εγγραφής στη βάση δεδομένων νομίσματος</translation> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation>Αποτυχία εγγραφής δείκτη συναλλαγών </translation> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation>Αποτυχία εγγραφής αναίρεσης δεδομένων </translation> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation>Βρες ομότιμους υπολογιστές χρησιμοποιώντας αναζήτηση DNS(προεπιλογή:1)</translation> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation>Δημιουργία νομισμάτων (προκαθορισμος: 0)</translation> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation>Πόσα μπλοκ να ελέγχθουν κατά την εκκίνηση (προεπιλογή:288,0=όλα)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation>Πόσο εξονυχιστική να είναι η επιβεβαίωση του μπλοκ(0-4, προεπιλογή:3)</translation> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation>Δεν ειναι αρκετες περιγραφες αρχείων διαθέσιμες.</translation> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation>Εισαγωγή μπλοκ από εξωτερικό αρχείο blk000?.dat</translation> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation>Ορίσμος του αριθμόυ θεματων στην υπηρεσία κλήσεων RPC (προεπιλογή: 4) </translation> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation>Επαλήθευση των μπλοκ... </translation> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation>Επαλήθευση πορτοφολιου... </translation> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation>Εισαγωγή μπλοκ από εξωτερικό αρχείο blk000?.dat</translation> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation>Ορίσμος του αριθμό των νημάτων ελέγχου σεναρίου (μέχρι 16, 0 = auto, &lt;0 = αφήνουν τους πολλους πυρήνες δωρεάν, default: 0)</translation> </message> <message> <location line="+77"/> <source>Information</source> <translation>Πληροφορία</translation> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Δεν είναι έγκυρη η διεύθυνση διαμεσολαβητή: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Μη έγκυρο ποσό για την παράμετρο -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Μη έγκυρο ποσό για την παράμετρο -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation>Διατηρήση ένος πλήρες ευρετήριου συναλλαγών (προεπιλογή: 0) </translation> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Μέγιστος buffer λήψης ανά σύνδεση, &lt;n&gt;*1000 bytes (προεπιλογή: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Μέγιστος buffer αποστολής ανά σύνδεση, &lt;n&gt;*1000 bytes (προεπιλογή: 1000)</translation> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation>Μονο αποδοχη αλυσίδας μπλοκ που ταιριάζει με τα ενσωματωμένα σημεία ελέγχου (προεπιλογή: 1) </translation> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation> Συνδέση μόνο σε κόμβους του δικτύου &lt;net&gt; (IPv4, IPv6 ή Tor) </translation> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Έξοδος επιπλέον πληροφοριών εντοπισμού σφαλμάτων</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>Έξοδος επιπλέον πληροφοριών εντοπισμού σφαλμάτων</translation> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>Χρονοσφραγίδα πληροφοριών εντοπισμού σφαλμάτων</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the Number7 Wiki for SSL setup instructions)</source> <translation>Ρυθμίσεις SSL: (ανατρέξτε στο Number7 Wiki για οδηγίες ρυθμίσεων SSL)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Επιλέξτε την έκδοση του διαμεσολαβητη για να χρησιμοποιήσετε (4-5 , προεπιλογή: 5)</translation> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Αποστολή πληροφοριών εντοπισμού σφαλμάτων στην κονσόλα αντί του αρχείου debug.log</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Αποστολή πληροφοριών εντοπισμού σφαλμάτων στον debugger</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Ορίσμος του μέγιστου μέγεθος block σε bytes (προεπιλογή: 250000)</translation> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Ορίστε το μέγιστο μέγεθος block σε bytes (προεπιλογή: 0)</translation> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Συρρίκνωση του αρχείο debug.log κατα την εκκίνηση του πελάτη (προεπιλογή: 1 όταν δεν-debug)</translation> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation>Η υπογραφή συναλλαγής απέτυχε </translation> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Ορισμός λήξης χρονικού ορίου σε χιλιοστά του δευτερολέπτου(προεπιλογή:5000)</translation> </message> <message> <location line="+4"/> <source>System error: </source> <translation>Λάθος Συστήματος:</translation> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation>Το ποσό της συναλλαγής είναι πολύ μικρο </translation> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation>Τα ποσά των συναλλαγών πρέπει να είναι θετικα</translation> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation>Η συναλλαγή ειναι πολύ μεγάλη </translation> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Χρησιμοποίηση του UPnP για την χρήση της πόρτας αναμονής (προεπιλογή:0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Χρησιμοποίηση του UPnP για την χρήση της πόρτας αναμονής (προεπιλογή:1)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>Χρήση διακομιστή μεσολάβησης για την επίτευξη των Tor κρυμμένων υπηρεσιων (προεπιλογή: ίδιο με το-proxy) </translation> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Όνομα χρήστη για τις συνδέσεις JSON-RPC</translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation>Προειδοποίηση</translation> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Προειδοποίηση: Αυτή η έκδοση είναι ξεπερασμένη, απαιτείται αναβάθμιση </translation> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation>Θα πρέπει να ξαναχτίστουν οι βάσεις δεδομένων που χρησιμοποιούντε-Αναδημιουργία αλλάγων-txindex </translation> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation>Το αρχειο wallet.dat ειναι διεφθαρμένο, η διάσωση απέτυχε</translation> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>Κωδικός για τις συνδέσεις JSON-RPC</translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Αποδοχή συνδέσεων JSON-RPC από συγκεκριμένη διεύθυνση IP</translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Αποστολή εντολών στον κόμβο &lt;ip&gt; (προεπιλογή: 127.0.0.1)</translation> </message> <message> <location line="-7"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Εκτέλεσε την εντολή όταν το καλύτερο μπλοκ αλλάξει(%s στην εντολή αντικαθίσταται από το hash του μπλοκ)</translation> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation>Αναβάθμισε το πορτοφόλι στην τελευταία έκδοση</translation> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Όριο πλήθους κλειδιών pool &lt;n&gt; (προεπιλογή: 100)</translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Επανέλεγχος της αλυσίδας μπλοκ για απούσες συναλλαγές</translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Χρήση του OpenSSL (https) για συνδέσεις JSON-RPC</translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation>Αρχείο πιστοποιητικού του διακομιστή (προεπιλογή: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Προσωπικό κλειδί του διακομιστή (προεπιλογή: server.pem)</translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Αποδεκτά κρυπτογραφήματα (προεπιλογή: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Αυτό το κείμενο βοήθειας</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Αδύνατη η σύνδεση με τη θύρα %s αυτού του υπολογιστή (bind returned error %d, %s) </translation> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation>Σύνδεση μέσω διαμεσολαβητή socks</translation> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Να επιτρέπονται οι έλεγχοι DNS για προσθήκη και σύνδεση κόμβων</translation> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>Φόρτωση διευθύνσεων...</translation> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Σφάλμα φόρτωσης wallet.dat: Κατεστραμμένο Πορτοφόλι</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of Number7</source> <translation>Σφάλμα φόρτωσης wallet.dat: Το Πορτοφόλι απαιτεί μια νεότερη έκδοση του Number7</translation> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart Number7 to complete</source> <translation>Απαιτείται η επανεγγραφή του Πορτοφολιού, η οποία θα ολοκληρωθεί στην επανεκκίνηση του Number7</translation> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation>Σφάλμα φόρτωσης αρχείου wallet.dat</translation> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Δεν είναι έγκυρη η διεύθυνση διαμεσολαβητή: &apos;%s&apos;</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Άγνωστo δίκτυο ορίζεται σε onlynet: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>Άγνωστo δίκτυο ορίζεται: %i</translation> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>Δεν μπορώ να γράψω την προεπιλεγμένη διεύθυνση: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>Δεν μπορώ να γράψω την προεπιλεγμένη διεύθυνση: &apos;%s&apos;</translation> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Μη έγκυρο ποσό για την παράμετρο -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Λάθος ποσότητα</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Ανεπαρκές κεφάλαιο</translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Φόρτωση ευρετηρίου μπλοκ...</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Προσέθεσε ένα κόμβο για σύνδεση και προσπάθησε να κρατήσεις την σύνδεση ανοιχτή</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. Number7 is probably already running.</source> <translation>Αδύνατη η σύνδεση με τη θύρα %s αυτού του υπολογιστή. Το Number7 είναι πιθανώς ήδη ενεργό.</translation> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation>Αμοιβή ανά KB που θα προστίθεται στις συναλλαγές που στέλνεις</translation> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation>Φόρτωση πορτοφολιού...</translation> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation>Δεν μπορώ να υποβαθμίσω το πορτοφόλι</translation> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>Δεν μπορώ να γράψω την προεπιλεγμένη διεύθυνση</translation> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>Ανίχνευση...</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>Η φόρτωση ολοκληρώθηκε</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation>Χρήση της %s επιλογής</translation> </message> <message> <location line="-74"/> <source>Error</source> <translation>Σφάλμα</translation> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Πρέπει να βάλεις ένα κωδικό στο αρχείο παραμέτρων: %s Εάν το αρχείο δεν υπάρχει, δημιούργησε το με δικαιώματα μόνο για ανάγνωση από τον δημιουργό</translation> </message> </context> </TS><|fim▁end|>
<translation>Ορίστε αρχείο ρυθμίσεων (προεπιλογή: number7.conf)</translation>
<|file_name|>table.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap; use rustc_serialize::json::Json; #[derive(Debug)] pub enum TableError { TableDoesNotExist, TableAlreadyExists, KeyAlreadyPresent, KeyDoesNotExist, }<|fim▁hole|> data: HashMap<String, HashMap<String, Json>> } impl Tables { pub fn new() -> Tables { Tables { data: HashMap::new() } } pub fn create_table(&mut self, table: &str) -> Result<(), TableError> { if (self.data.contains_key(table)) { Err(TableError::TableAlreadyExists) } else { self.data.insert(table.to_string(), HashMap::new()); Ok(()) } } fn get_table(&self, table: &str) -> Result<&HashMap<String, Json>, TableError> { match self.data.get(table) { Some(table) => Ok(table), None => Err(TableError::TableDoesNotExist), } } fn get_mut_table(&mut self, table: &str) -> Result<&mut HashMap<String, Json>, TableError> { match self.data.get_mut(table) { Some(table) => Ok(table), None => Err(TableError::TableDoesNotExist), } } pub fn put(&mut self, table: &str, key: &str, data: Json) -> Result<(), TableError> { let table = try!(self.get_mut_table(table)); if (table.contains_key(key)) { Err(TableError::KeyAlreadyPresent) } else { table.insert(key.to_string(), data); Ok(()) } } pub fn get(&self, table: &str, key: &str) -> Result<Json, TableError> { let table = try!(self.get_table(table)); match table.get(key) { Some(json) => Ok(json.clone()), None => Err(TableError::KeyDoesNotExist), } } pub fn delete_key(&mut self, table: &str, key: &str) -> Result<(), TableError> { let table = try!(self.get_mut_table(table)); match table.remove(key) { Some(_) => Ok(()), None => Err(TableError::KeyDoesNotExist), } } pub fn delete_table(&mut self, table: &str) -> Result<(), TableError> { match self.data.remove(table) { Some(_) => Ok(()), None => Err(TableError::TableDoesNotExist), } } }<|fim▁end|>
pub struct Tables {
<|file_name|>isis3dataset.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************** * * Project: ISIS Version 3 Driver * Purpose: Implementation of ISIS3Dataset * Author: Trent Hare ([email protected]) * Frank Warmerdam ([email protected]) * Even Rouault (even.rouault at spatialys.com) * * NOTE: Original code authored by Trent and placed in the public domain as * per US government policy. I have (within my rights) appropriated it and * placed it under the following license. This is not intended to diminish * Trents contribution. ****************************************************************************** * Copyright (c) 2007, Frank Warmerdam <[email protected]> * Copyright (c) 2009-2010, Even Rouault <even.rouault at spatialys.com> * Copyright (c) 2017 Hobu Inc * Copyright (c) 2017, Dmitry Baryshnikov <[email protected]> * Copyright (c) 2017, NextGIS <[email protected]> * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. ****************************************************************************/ #include "cpl_json.h" #include "cpl_string.h" #include "cpl_time.h" #include "cpl_vsi_error.h" #include "gdal_frmts.h" #include "gdal_proxy.h" #include "nasakeywordhandler.h" #include "ogrgeojsonreader.h" #include "ogr_spatialref.h" #include "rawdataset.h" #include "vrtdataset.h" #include "cpl_safemaths.hpp" // For gethostname() #ifdef _WIN32 #include <winsock2.h> #else #include <unistd.h> #endif #include <algorithm> #include <map> #include <utility> // pair #include <vector> // Constants coming from ISIS3 source code // in isis/src/base/objs/SpecialPixel/SpecialPixel.h //There are several types of special pixels // * Isis::Null Pixel has no data available // * Isis::Lis Pixel was saturated on the instrument // * Isis::His Pixel was saturated on the instrument // * Isis::Lrs Pixel was saturated during a computation // * Isis::Hrs Pixel was saturated during a computation // 1-byte special pixel values const unsigned char NULL1 = 0; const unsigned char LOW_REPR_SAT1 = 0; const unsigned char LOW_INSTR_SAT1 = 0; const unsigned char HIGH_INSTR_SAT1 = 255; const unsigned char HIGH_REPR_SAT1 = 255; // 2-byte unsigned special pixel values const unsigned short NULLU2 = 0; const unsigned short LOW_REPR_SATU2 = 1; const unsigned short LOW_INSTR_SATU2 = 2; const unsigned short HIGH_INSTR_SATU2 = 65534; const unsigned short HIGH_REPR_SATU2 = 65535; // 2-byte signed special pixel values const short NULL2 = -32768; const short LOW_REPR_SAT2 = -32767; const short LOW_INSTR_SAT2 = -32766; const short HIGH_INSTR_SAT2 = -32765; const short HIGH_REPR_SAT2 = -32764; // Define 4-byte special pixel values for IEEE floating point const float NULL4 = -3.4028226550889045e+38f; // 0xFF7FFFFB; const float LOW_REPR_SAT4 = -3.4028228579130005e+38f; // 0xFF7FFFFC; const float LOW_INSTR_SAT4 = -3.4028230607370965e+38f; // 0xFF7FFFFD; const float HIGH_INSTR_SAT4 = -3.4028232635611926e+38f; // 0xFF7FFFFE; const float HIGH_REPR_SAT4 = -3.4028234663852886e+38f; // 0xFF7FFFFF; // Must be large enough to hold an integer static const char* const pszSTARTBYTE_PLACEHOLDER = "!*^STARTBYTE^*!"; // Must be large enough to hold an integer static const char* const pszLABEL_BYTES_PLACEHOLDER = "!*^LABEL_BYTES^*!"; // Must be large enough to hold an integer static const char* const pszHISTORY_STARTBYTE_PLACEHOLDER = "!*^HISTORY_STARTBYTE^*!"; CPL_CVSID("$Id: isis3dataset.cpp c04e0ea92cfb521061e84b9c3ba75b0e30345ffd 2020-07-02 22:27:16 +0200 Even Rouault $") /************************************************************************/ /* ==================================================================== */ /* ISISDataset */ /* ==================================================================== */ /************************************************************************/ class ISIS3Dataset final: public RawDataset { friend class ISIS3RawRasterBand; friend class ISISTiledBand; friend class ISIS3WrapperRasterBand; class NonPixelSection { public: CPLString osSrcFilename; CPLString osDstFilename; // empty for same file vsi_l_offset nSrcOffset; vsi_l_offset nSize; CPLString osPlaceHolder; // empty if not same file }; VSILFILE *m_fpLabel; // label file (only used for writing) VSILFILE *m_fpImage; // image data file. May be == fpLabel GDALDataset *m_poExternalDS; // external dataset (GeoTIFF) bool m_bGeoTIFFAsRegularExternal; // creation only bool m_bGeoTIFFInitDone; // creation only CPLString m_osExternalFilename; bool m_bIsLabelWritten; // creation only bool m_bIsTiled; bool m_bInitToNodata; // creation only NASAKeywordHandler m_oKeywords; bool m_bGotTransform; double m_adfGeoTransform[6]; bool m_bHasSrcNoData; // creation only double m_dfSrcNoData; // creation only OGRSpatialReference m_oSRS; // creation only variables CPLString m_osComment; CPLString m_osLatitudeType; CPLString m_osLongitudeDirection; CPLString m_osTargetName; bool m_bForce360; bool m_bWriteBoundingDegrees; CPLString m_osBoundingDegrees; CPLJSONObject m_oJSonLabel; CPLString m_osHistory; // creation only bool m_bUseSrcLabel; // creation only bool m_bUseSrcMapping; // creation only bool m_bUseSrcHistory; // creation only bool m_bAddGDALHistory; // creation only CPLString m_osGDALHistory; // creation only std::vector<NonPixelSection> m_aoNonPixelSections; // creation only CPLJSONObject m_oSrcJSonLabel; // creation only CPLStringList m_aosISIS3MD; CPLStringList m_aosAdditionalFiles; CPLString m_osFromFilename; // creation only RawBinaryLayout m_sLayout{}; const char *GetKeyword( const char *pszPath, const char *pszDefault = ""); double FixLong( double dfLong ); void BuildLabel(); void BuildHistory(); void WriteLabel(); void InvalidateLabel(); static CPLString SerializeAsPDL( const CPLJSONObject& oObj ); static void SerializeAsPDL( VSILFILE* fp, const CPLJSONObject& oObj, int nDepth = 0 ); public: ISIS3Dataset(); virtual ~ISIS3Dataset(); virtual int CloseDependentDatasets() override; virtual CPLErr GetGeoTransform( double * padfTransform ) override; virtual CPLErr SetGeoTransform( double * padfTransform ) override; const OGRSpatialReference* GetSpatialRef() const override; CPLErr SetSpatialRef(const OGRSpatialReference* poSRS) override; virtual char **GetFileList() override; virtual char **GetMetadataDomainList() override; virtual char **GetMetadata( const char* pszDomain = "" ) override; virtual CPLErr SetMetadata( char** papszMD, const char* pszDomain = "" ) override; bool GetRawBinaryLayout(GDALDataset::RawBinaryLayout&) override; static int Identify( GDALOpenInfo * ); static GDALDataset *Open( GDALOpenInfo * ); static GDALDataset *Create( const char * pszFilename, int nXSize, int nYSize, int nBands, GDALDataType eType, char ** papszOptions ); static GDALDataset* CreateCopy( const char *pszFilename, GDALDataset *poSrcDS, int bStrict, char ** papszOptions, GDALProgressFunc pfnProgress, void * pProgressData ); }; /************************************************************************/ /* ==================================================================== */ /* ISISTiledBand */ /* ==================================================================== */ /************************************************************************/ class ISISTiledBand final: public GDALPamRasterBand { friend class ISIS3Dataset; VSILFILE *m_fpVSIL; GIntBig m_nFirstTileOffset; GIntBig m_nXTileOffset; GIntBig m_nYTileOffset; int m_bNativeOrder; bool m_bHasOffset; bool m_bHasScale; double m_dfOffset; double m_dfScale; double m_dfNoData; public: ISISTiledBand( GDALDataset *poDS, VSILFILE *fpVSIL, int nBand, GDALDataType eDT, int nTileXSize, int nTileYSize, GIntBig nFirstTileOffset, GIntBig nXTileOffset, GIntBig nYTileOffset, int bNativeOrder ); virtual ~ISISTiledBand() {} virtual CPLErr IReadBlock( int, int, void * ) override; virtual CPLErr IWriteBlock( int, int, void * ) override; virtual double GetOffset( int *pbSuccess = nullptr ) override; virtual double GetScale( int *pbSuccess = nullptr ) override; virtual CPLErr SetOffset( double dfNewOffset ) override; virtual CPLErr SetScale( double dfNewScale ) override; virtual double GetNoDataValue( int *pbSuccess = nullptr ) override; virtual CPLErr SetNoDataValue( double dfNewNoData ) override; void SetMaskBand(GDALRasterBand* poMaskBand); }; /************************************************************************/ /* ==================================================================== */ /* ISIS3RawRasterBand */ /* ==================================================================== */ /************************************************************************/ class ISIS3RawRasterBand final: public RawRasterBand { friend class ISIS3Dataset; bool m_bHasOffset; bool m_bHasScale; double m_dfOffset; double m_dfScale; double m_dfNoData; public: ISIS3RawRasterBand( GDALDataset *l_poDS, int l_nBand, VSILFILE * l_fpRaw, vsi_l_offset l_nImgOffset, int l_nPixelOffset, int l_nLineOffset, GDALDataType l_eDataType, int l_bNativeOrder ); virtual ~ISIS3RawRasterBand() {} virtual CPLErr IReadBlock( int, int, void * ) override; virtual CPLErr IWriteBlock( int, int, void * ) override; virtual CPLErr IRasterIO( GDALRWFlag, int, int, int, int, void *, int, int, GDALDataType, GSpacing nPixelSpace, GSpacing nLineSpace, GDALRasterIOExtraArg* psExtraArg ) override; virtual double GetOffset( int *pbSuccess = nullptr ) override; virtual double GetScale( int *pbSuccess = nullptr ) override; virtual CPLErr SetOffset( double dfNewOffset ) override; virtual CPLErr SetScale( double dfNewScale ) override; virtual double GetNoDataValue( int *pbSuccess = nullptr ) override; virtual CPLErr SetNoDataValue( double dfNewNoData ) override; void SetMaskBand(GDALRasterBand* poMaskBand); }; /************************************************************************/ /* ==================================================================== */ /* ISIS3WrapperRasterBand */ /* */ /* proxy for bands stored in other formats. */ /* ==================================================================== */ /************************************************************************/ class ISIS3WrapperRasterBand final: public GDALProxyRasterBand { friend class ISIS3Dataset; GDALRasterBand* m_poBaseBand; bool m_bHasOffset; bool m_bHasScale; double m_dfOffset; double m_dfScale; double m_dfNoData; protected: virtual GDALRasterBand* RefUnderlyingRasterBand() override { return m_poBaseBand; } public: explicit ISIS3WrapperRasterBand( GDALRasterBand* poBaseBandIn ); ~ISIS3WrapperRasterBand() {} void InitFile(); virtual CPLErr Fill(double dfRealValue, double dfImaginaryValue = 0) override; virtual CPLErr IWriteBlock( int, int, void * ) override; virtual CPLErr IRasterIO( GDALRWFlag, int, int, int, int, void *, int, int, GDALDataType, GSpacing nPixelSpace, GSpacing nLineSpace, GDALRasterIOExtraArg* psExtraArg ) override; virtual double GetOffset( int *pbSuccess = nullptr ) override; virtual double GetScale( int *pbSuccess = nullptr ) override; virtual CPLErr SetOffset( double dfNewOffset ) override; virtual CPLErr SetScale( double dfNewScale ) override; virtual double GetNoDataValue( int *pbSuccess = nullptr ) override; virtual CPLErr SetNoDataValue( double dfNewNoData ) override; int GetMaskFlags() override { return nMaskFlags; } GDALRasterBand* GetMaskBand() override { return poMask; } void SetMaskBand(GDALRasterBand* poMaskBand); }; /************************************************************************/ /* ==================================================================== */ /* ISISMaskBand */ /* ==================================================================== */ class ISISMaskBand final: public GDALRasterBand { GDALRasterBand *m_poBaseBand; void *m_pBuffer; public: explicit ISISMaskBand( GDALRasterBand* poBaseBand ); ~ISISMaskBand(); virtual CPLErr IReadBlock( int, int, void * ) override; }; /************************************************************************/ /* ISISTiledBand() */ /************************************************************************/ ISISTiledBand::ISISTiledBand( GDALDataset *poDSIn, VSILFILE *fpVSILIn, int nBandIn, GDALDataType eDT, int nTileXSize, int nTileYSize, GIntBig nFirstTileOffsetIn, GIntBig nXTileOffsetIn, GIntBig nYTileOffsetIn, int bNativeOrderIn ) : m_fpVSIL(fpVSILIn), m_nFirstTileOffset(0), m_nXTileOffset(nXTileOffsetIn), m_nYTileOffset(nYTileOffsetIn), m_bNativeOrder(bNativeOrderIn), m_bHasOffset(false), m_bHasScale(false), m_dfOffset(0.0), m_dfScale(1.0), m_dfNoData(0.0) { poDS = poDSIn; nBand = nBandIn; eDataType = eDT; nBlockXSize = nTileXSize; nBlockYSize = nTileYSize; nRasterXSize = poDSIn->GetRasterXSize(); nRasterYSize = poDSIn->GetRasterYSize(); const int l_nBlocksPerRow = DIV_ROUND_UP(nRasterXSize, nBlockXSize); const int l_nBlocksPerColumn = DIV_ROUND_UP(nRasterYSize, nBlockYSize); if( m_nXTileOffset == 0 && m_nYTileOffset == 0 ) { m_nXTileOffset = static_cast<GIntBig>(GDALGetDataTypeSizeBytes(eDT)) * nTileXSize; if( m_nXTileOffset > GINTBIG_MAX / nTileYSize ) { CPLError(CE_Failure, CPLE_AppDefined, "Integer overflow"); return; } m_nXTileOffset *= nTileYSize; if( m_nXTileOffset > GINTBIG_MAX / l_nBlocksPerRow ) { CPLError(CE_Failure, CPLE_AppDefined, "Integer overflow"); return; } m_nYTileOffset = m_nXTileOffset * l_nBlocksPerRow; } m_nFirstTileOffset = nFirstTileOffsetIn; if( nBand > 1 ) { if( m_nYTileOffset > GINTBIG_MAX / (nBand - 1) || (nBand-1) * m_nYTileOffset > GINTBIG_MAX / l_nBlocksPerColumn || m_nFirstTileOffset > GINTBIG_MAX - (nBand-1) * m_nYTileOffset * l_nBlocksPerColumn ) { CPLError(CE_Failure, CPLE_AppDefined, "Integer overflow"); return; } m_nFirstTileOffset += (nBand-1) * m_nYTileOffset * l_nBlocksPerColumn; } } /************************************************************************/ /* IReadBlock() */ /************************************************************************/ CPLErr ISISTiledBand::IReadBlock( int nXBlock, int nYBlock, void *pImage ) { ISIS3Dataset* poGDS = reinterpret_cast<ISIS3Dataset*>(poDS); if( poGDS->m_osExternalFilename.empty() ) { if( !poGDS->m_bIsLabelWritten ) poGDS->WriteLabel(); } const GIntBig nOffset = m_nFirstTileOffset + nXBlock * m_nXTileOffset + nYBlock * m_nYTileOffset; const int nDTSize = GDALGetDataTypeSizeBytes(eDataType); const size_t nBlockSize = static_cast<size_t>(nDTSize) * nBlockXSize * nBlockYSize; if( VSIFSeekL( m_fpVSIL, nOffset, SEEK_SET ) != 0 ) { CPLError( CE_Failure, CPLE_FileIO, "Failed to seek to offset %d to read tile %d,%d.", static_cast<int>( nOffset ), nXBlock, nYBlock ); return CE_Failure; } if( VSIFReadL( pImage, 1, nBlockSize, m_fpVSIL ) != nBlockSize ) { CPLError( CE_Failure, CPLE_FileIO, "Failed to read %d bytes for tile %d,%d.", static_cast<int>( nBlockSize ), nXBlock, nYBlock ); return CE_Failure; } if( !m_bNativeOrder && eDataType != GDT_Byte ) GDALSwapWords( pImage, nDTSize, nBlockXSize*nBlockYSize, nDTSize ); return CE_None; } /************************************************************************/ /* RemapNoDataT() */ /************************************************************************/ template<class T> static void RemapNoDataT( T* pBuffer, int nItems, T srcNoData, T dstNoData ) { for( int i = 0; i < nItems; i++ ) { if( pBuffer[i] == srcNoData ) pBuffer[i] = dstNoData; } } /************************************************************************/ /* RemapNoData() */ /************************************************************************/ static void RemapNoData( GDALDataType eDataType, void* pBuffer, int nItems, double dfSrcNoData, double dfDstNoData ) { if( eDataType == GDT_Byte ) { RemapNoDataT( reinterpret_cast<GByte*>(pBuffer), nItems, static_cast<GByte>(dfSrcNoData), static_cast<GByte>(dfDstNoData) ); } else if( eDataType == GDT_UInt16 ) { RemapNoDataT( reinterpret_cast<GUInt16*>(pBuffer), nItems, static_cast<GUInt16>(dfSrcNoData), static_cast<GUInt16>(dfDstNoData) ); } else if( eDataType == GDT_Int16) { RemapNoDataT( reinterpret_cast<GInt16*>(pBuffer), nItems, static_cast<GInt16>(dfSrcNoData), static_cast<GInt16>(dfDstNoData) ); } else { CPLAssert( eDataType == GDT_Float32 ); RemapNoDataT( reinterpret_cast<float*>(pBuffer), nItems, static_cast<float>(dfSrcNoData), static_cast<float>(dfDstNoData) ); } } /** * Get or create CPLJSONObject. * @param oParent Parent CPLJSONObject. * @param osKey Key name. * @return CPLJSONObject class instance. */ static CPLJSONObject GetOrCreateJSONObject(CPLJSONObject &oParent, const std::string &osKey) { CPLJSONObject oChild = oParent[osKey]; if( oChild.IsValid() && oChild.GetType() != CPLJSONObject::Type::Object ) { oParent.Delete( osKey ); oChild.Deinit(); } if( !oChild.IsValid() ) { oChild = CPLJSONObject(); oParent.Add( osKey, oChild ); } return oChild; } /************************************************************************/ /* IReadBlock() */ /************************************************************************/ CPLErr ISISTiledBand::IWriteBlock( int nXBlock, int nYBlock, void *pImage ) { ISIS3Dataset* poGDS = reinterpret_cast<ISIS3Dataset*>(poDS); if( poGDS->m_osExternalFilename.empty() ) { if( !poGDS->m_bIsLabelWritten ) poGDS->WriteLabel(); } if( poGDS->m_bHasSrcNoData && poGDS->m_dfSrcNoData != m_dfNoData ) { RemapNoData( eDataType, pImage, nBlockXSize * nBlockYSize, poGDS->m_dfSrcNoData, m_dfNoData ); } const GIntBig nOffset = m_nFirstTileOffset + nXBlock * m_nXTileOffset + nYBlock * m_nYTileOffset; const int nDTSize = GDALGetDataTypeSizeBytes(eDataType); const size_t nBlockSize = static_cast<size_t>(nDTSize) * nBlockXSize * nBlockYSize; const int l_nBlocksPerRow = DIV_ROUND_UP(nRasterXSize, nBlockXSize); const int l_nBlocksPerColumn = DIV_ROUND_UP(nRasterYSize, nBlockYSize); // Pad partial blocks to nodata value if( nXBlock == l_nBlocksPerRow - 1 && (nRasterXSize % nBlockXSize) != 0 ) { GByte* pabyImage = static_cast<GByte*>(pImage); int nXStart = nRasterXSize % nBlockXSize; for( int iY = 0; iY < nBlockYSize; iY++ ) { GDALCopyWords( &m_dfNoData, GDT_Float64, 0, pabyImage + (iY * nBlockXSize + nXStart) * nDTSize, eDataType, nDTSize, nBlockXSize - nXStart ); } } if( nYBlock == l_nBlocksPerColumn - 1 && (nRasterYSize % nBlockYSize) != 0 ) { GByte* pabyImage = static_cast<GByte*>(pImage); for( int iY = nRasterYSize % nBlockYSize; iY < nBlockYSize; iY++ ) { GDALCopyWords( &m_dfNoData, GDT_Float64, 0, pabyImage + iY * nBlockXSize * nDTSize, eDataType, nDTSize, nBlockXSize ); } } if( VSIFSeekL( m_fpVSIL, nOffset, SEEK_SET ) != 0 ) { CPLError( CE_Failure, CPLE_FileIO, "Failed to seek to offset %d to read tile %d,%d.", static_cast<int>( nOffset ), nXBlock, nYBlock ); return CE_Failure; } if( !m_bNativeOrder && eDataType != GDT_Byte ) GDALSwapWords( pImage, nDTSize, nBlockXSize*nBlockYSize, nDTSize ); if( VSIFWriteL( pImage, 1, nBlockSize, m_fpVSIL ) != nBlockSize ) { CPLError( CE_Failure, CPLE_FileIO, "Failed to write %d bytes for tile %d,%d.", static_cast<int>( nBlockSize ), nXBlock, nYBlock ); return CE_Failure; } if( !m_bNativeOrder && eDataType != GDT_Byte ) GDALSwapWords( pImage, nDTSize, nBlockXSize*nBlockYSize, nDTSize ); return CE_None; } /************************************************************************/ /* SetMaskBand() */ /************************************************************************/ void ISISTiledBand::SetMaskBand(GDALRasterBand* poMaskBand) { bOwnMask = true; poMask = poMaskBand; nMaskFlags = 0; } /************************************************************************/ /* GetOffset() */ /************************************************************************/ double ISISTiledBand::GetOffset( int *pbSuccess ) { if( pbSuccess ) *pbSuccess = m_bHasOffset; return m_dfOffset; } /************************************************************************/ /* GetScale() */ /************************************************************************/ double ISISTiledBand::GetScale( int *pbSuccess ) { if( pbSuccess ) *pbSuccess = m_bHasScale; return m_dfScale; } /************************************************************************/ /* SetOffset() */ /************************************************************************/ CPLErr ISISTiledBand::SetOffset( double dfNewOffset ) { m_dfOffset = dfNewOffset; m_bHasOffset = true; return CE_None; } /************************************************************************/ /* SetScale() */ /************************************************************************/ CPLErr ISISTiledBand::SetScale( double dfNewScale ) { m_dfScale = dfNewScale; m_bHasScale = true; return CE_None; } /************************************************************************/ /* GetNoDataValue() */ /************************************************************************/ double ISISTiledBand::GetNoDataValue( int *pbSuccess ) { if( pbSuccess ) *pbSuccess = true; return m_dfNoData; } /************************************************************************/ /* SetNoDataValue() */ /************************************************************************/ CPLErr ISISTiledBand::SetNoDataValue( double dfNewNoData ) { m_dfNoData = dfNewNoData; return CE_None; } /************************************************************************/ /* ISIS3RawRasterBand() */ /************************************************************************/ ISIS3RawRasterBand::ISIS3RawRasterBand( GDALDataset *l_poDS, int l_nBand, VSILFILE * l_fpRaw, vsi_l_offset l_nImgOffset, int l_nPixelOffset, int l_nLineOffset, GDALDataType l_eDataType, int l_bNativeOrder ) : RawRasterBand(l_poDS, l_nBand, l_fpRaw, l_nImgOffset, l_nPixelOffset, l_nLineOffset, l_eDataType, l_bNativeOrder, RawRasterBand::OwnFP::NO), m_bHasOffset(false), m_bHasScale(false), m_dfOffset(0.0), m_dfScale(1.0), m_dfNoData(0.0) { } /************************************************************************/ /* IReadBlock() */ /************************************************************************/ CPLErr ISIS3RawRasterBand::IReadBlock( int nXBlock, int nYBlock, void *pImage ) { ISIS3Dataset* poGDS = reinterpret_cast<ISIS3Dataset*>(poDS); if( poGDS->m_osExternalFilename.empty() ) { if( !poGDS->m_bIsLabelWritten ) poGDS->WriteLabel(); } return RawRasterBand::IReadBlock( nXBlock, nYBlock, pImage ); } /************************************************************************/ /* IWriteBlock() */ /************************************************************************/ CPLErr ISIS3RawRasterBand::IWriteBlock( int nXBlock, int nYBlock, void *pImage ) { ISIS3Dataset* poGDS = reinterpret_cast<ISIS3Dataset*>(poDS); if( poGDS->m_osExternalFilename.empty() ) { if( !poGDS->m_bIsLabelWritten ) poGDS->WriteLabel(); } if( poGDS->m_bHasSrcNoData && poGDS->m_dfSrcNoData != m_dfNoData ) { RemapNoData( eDataType, pImage, nBlockXSize * nBlockYSize, poGDS->m_dfSrcNoData, m_dfNoData ); } return RawRasterBand::IWriteBlock( nXBlock, nYBlock, pImage ); } /************************************************************************/ /* IRasterIO() */ /************************************************************************/ CPLErr ISIS3RawRasterBand::IRasterIO( GDALRWFlag eRWFlag, int nXOff, int nYOff, int nXSize, int nYSize, void * pData, int nBufXSize, int nBufYSize, GDALDataType eBufType, GSpacing nPixelSpace, GSpacing nLineSpace, GDALRasterIOExtraArg* psExtraArg ) { ISIS3Dataset* poGDS = reinterpret_cast<ISIS3Dataset*>(poDS); if( poGDS->m_osExternalFilename.empty() ) { if( !poGDS->m_bIsLabelWritten ) poGDS->WriteLabel(); } if( eRWFlag == GF_Write && poGDS->m_bHasSrcNoData && poGDS->m_dfSrcNoData != m_dfNoData ) { const int nDTSize = GDALGetDataTypeSizeBytes(eDataType); if( eBufType == eDataType && nPixelSpace == nDTSize && nLineSpace == nPixelSpace * nBufXSize ) { RemapNoData( eDataType, pData, nBufXSize * nBufYSize, poGDS->m_dfSrcNoData, m_dfNoData ); } else { const GByte* pabySrc = reinterpret_cast<GByte*>(pData); GByte* pabyTemp = reinterpret_cast<GByte*>( VSI_MALLOC3_VERBOSE(nDTSize, nBufXSize, nBufYSize)); for( int i = 0; i < nBufYSize; i++ ) { GDALCopyWords( pabySrc + i * nLineSpace, eBufType, static_cast<int>(nPixelSpace), pabyTemp + i * nBufXSize * nDTSize, eDataType, nDTSize, nBufXSize ); } RemapNoData( eDataType, pabyTemp, nBufXSize * nBufYSize, poGDS->m_dfSrcNoData, m_dfNoData ); CPLErr eErr = RawRasterBand::IRasterIO( eRWFlag, nXOff, nYOff, nXSize, nYSize, pabyTemp, nBufXSize, nBufYSize, eDataType, nDTSize, nDTSize*nBufXSize, psExtraArg ); VSIFree(pabyTemp); return eErr; } } return RawRasterBand::IRasterIO( eRWFlag, nXOff, nYOff, nXSize, nYSize, pData, nBufXSize, nBufYSize, eBufType, nPixelSpace, nLineSpace, psExtraArg ); } /************************************************************************/ /* SetMaskBand() */ /************************************************************************/ void ISIS3RawRasterBand::SetMaskBand(GDALRasterBand* poMaskBand) { bOwnMask = true; poMask = poMaskBand; nMaskFlags = 0; } /************************************************************************/ /* GetOffset() */ /************************************************************************/ double ISIS3RawRasterBand::GetOffset( int *pbSuccess ) { if( pbSuccess ) *pbSuccess = m_bHasOffset; return m_dfOffset; } /************************************************************************/ /* GetScale() */ /************************************************************************/ double ISIS3RawRasterBand::GetScale( int *pbSuccess ) { if( pbSuccess ) *pbSuccess = m_bHasScale; return m_dfScale; } /************************************************************************/ /* SetOffset() */ /************************************************************************/ CPLErr ISIS3RawRasterBand::SetOffset( double dfNewOffset ) { m_dfOffset = dfNewOffset; m_bHasOffset = true; return CE_None; } /************************************************************************/ /* SetScale() */ /************************************************************************/ CPLErr ISIS3RawRasterBand::SetScale( double dfNewScale ) { m_dfScale = dfNewScale; m_bHasScale = true; return CE_None; } /************************************************************************/ /* GetNoDataValue() */ /************************************************************************/ double ISIS3RawRasterBand::GetNoDataValue( int *pbSuccess ) { if( pbSuccess ) *pbSuccess = true; return m_dfNoData; } /************************************************************************/ /* SetNoDataValue() */ /************************************************************************/ CPLErr ISIS3RawRasterBand::SetNoDataValue( double dfNewNoData ) { m_dfNoData = dfNewNoData; return CE_None; } /************************************************************************/ /* ISIS3WrapperRasterBand() */ /************************************************************************/ ISIS3WrapperRasterBand::ISIS3WrapperRasterBand( GDALRasterBand* poBaseBandIn ) : m_poBaseBand(poBaseBandIn), m_bHasOffset(false), m_bHasScale(false), m_dfOffset(0.0), m_dfScale(1.0), m_dfNoData(0.0) { eDataType = m_poBaseBand->GetRasterDataType(); m_poBaseBand->GetBlockSize(&nBlockXSize, &nBlockYSize); } /************************************************************************/ /* SetMaskBand() */ /************************************************************************/ void ISIS3WrapperRasterBand::SetMaskBand(GDALRasterBand* poMaskBand) { bOwnMask = true; poMask = poMaskBand; nMaskFlags = 0; } /************************************************************************/ /* GetOffset() */ /************************************************************************/ double ISIS3WrapperRasterBand::GetOffset( int *pbSuccess ) { if( pbSuccess ) *pbSuccess = m_bHasOffset; return m_dfOffset; } /************************************************************************/ /* GetScale() */ /************************************************************************/ double ISIS3WrapperRasterBand::GetScale( int *pbSuccess ) { if( pbSuccess ) *pbSuccess = m_bHasScale; return m_dfScale; } /************************************************************************/ /* SetOffset() */ /************************************************************************/ CPLErr ISIS3WrapperRasterBand::SetOffset( double dfNewOffset ) { m_dfOffset = dfNewOffset; m_bHasOffset = true; ISIS3Dataset* poGDS = reinterpret_cast<ISIS3Dataset*>(poDS); if( poGDS->m_poExternalDS && eAccess == GA_Update ) poGDS->m_poExternalDS->GetRasterBand(nBand)->SetOffset(dfNewOffset); return CE_None; } /************************************************************************/ /* SetScale() */ /************************************************************************/ CPLErr ISIS3WrapperRasterBand::SetScale( double dfNewScale ) { m_dfScale = dfNewScale; m_bHasScale = true; ISIS3Dataset* poGDS = reinterpret_cast<ISIS3Dataset*>(poDS); if( poGDS->m_poExternalDS && eAccess == GA_Update ) poGDS->m_poExternalDS->GetRasterBand(nBand)->SetScale(dfNewScale); return CE_None; } /************************************************************************/ /* GetNoDataValue() */ /************************************************************************/ double ISIS3WrapperRasterBand::GetNoDataValue( int *pbSuccess ) { if( pbSuccess ) *pbSuccess = true; return m_dfNoData; } /************************************************************************/ /* SetNoDataValue() */ /************************************************************************/ CPLErr ISIS3WrapperRasterBand::SetNoDataValue( double dfNewNoData ) { m_dfNoData = dfNewNoData; ISIS3Dataset* poGDS = reinterpret_cast<ISIS3Dataset*>(poDS); if( poGDS->m_poExternalDS && eAccess == GA_Update ) poGDS->m_poExternalDS->GetRasterBand(nBand)->SetNoDataValue(dfNewNoData); return CE_None; } /************************************************************************/ /* InitFile() */ /************************************************************************/ void ISIS3WrapperRasterBand::InitFile() { ISIS3Dataset* poGDS = reinterpret_cast<ISIS3Dataset*>(poDS); if( poGDS->m_bGeoTIFFAsRegularExternal && !poGDS->m_bGeoTIFFInitDone ) { poGDS->m_bGeoTIFFInitDone = true; const int nBands = poGDS->GetRasterCount(); // We need to make sure that blocks are written in the right order for( int i = 0; i < nBands; i++ ) { poGDS->m_poExternalDS->GetRasterBand(i+1)->Fill(m_dfNoData); } poGDS->m_poExternalDS->FlushCache(); // Check that blocks are effectively written in expected order. const int nBlockSizeBytes = nBlockXSize * nBlockYSize * GDALGetDataTypeSizeBytes(eDataType); GIntBig nLastOffset = 0; bool bGoOn = true; const int l_nBlocksPerRow = DIV_ROUND_UP(nRasterXSize, nBlockXSize); const int l_nBlocksPerColumn = DIV_ROUND_UP(nRasterYSize, nBlockYSize); for( int i = 0; i < nBands && bGoOn; i++ ) { for( int y = 0; y < l_nBlocksPerColumn && bGoOn; y++ ) { for( int x = 0; x < l_nBlocksPerRow && bGoOn; x++ ) { const char* pszBlockOffset = poGDS->m_poExternalDS-> GetRasterBand(i+1)->GetMetadataItem( CPLSPrintf("BLOCK_OFFSET_%d_%d", x, y), "TIFF"); if( pszBlockOffset ) { GIntBig nOffset = CPLAtoGIntBig(pszBlockOffset); if( i != 0 || x != 0 || y != 0 ) { if( nOffset != nLastOffset + nBlockSizeBytes ) { CPLError(CE_Warning, CPLE_AppDefined, "Block %d,%d band %d not at expected " "offset", x, y, i+1); bGoOn = false; poGDS->m_bGeoTIFFAsRegularExternal = false; } } nLastOffset = nOffset; } else { CPLError(CE_Warning, CPLE_AppDefined, "Block %d,%d band %d not at expected " "offset", x, y, i+1); bGoOn = false; poGDS->m_bGeoTIFFAsRegularExternal = false; } } } } } } /************************************************************************/ /* Fill() */ /************************************************************************/ CPLErr ISIS3WrapperRasterBand::Fill(double dfRealValue, double dfImaginaryValue) { ISIS3Dataset* poGDS = reinterpret_cast<ISIS3Dataset*>(poDS); if( poGDS->m_bHasSrcNoData && poGDS->m_dfSrcNoData == dfRealValue ) { dfRealValue = m_dfNoData; } if( poGDS->m_bGeoTIFFAsRegularExternal && !poGDS->m_bGeoTIFFInitDone ) { InitFile(); } return GDALProxyRasterBand::Fill( dfRealValue, dfImaginaryValue ); } /************************************************************************/ /* IWriteBlock() */ /************************************************************************/ CPLErr ISIS3WrapperRasterBand::IWriteBlock( int nXBlock, int nYBlock, void *pImage ) { ISIS3Dataset* poGDS = reinterpret_cast<ISIS3Dataset*>(poDS); if( poGDS->m_bHasSrcNoData && poGDS->m_dfSrcNoData != m_dfNoData ) { RemapNoData( eDataType, pImage, nBlockXSize * nBlockYSize, poGDS->m_dfSrcNoData, m_dfNoData ); } if( poGDS->m_bGeoTIFFAsRegularExternal && !poGDS->m_bGeoTIFFInitDone ) { InitFile(); } return GDALProxyRasterBand::IWriteBlock( nXBlock, nYBlock, pImage ); } /************************************************************************/ /* IRasterIO() */ /************************************************************************/ CPLErr ISIS3WrapperRasterBand::IRasterIO( GDALRWFlag eRWFlag, int nXOff, int nYOff, int nXSize, int nYSize, void * pData, int nBufXSize, int nBufYSize, GDALDataType eBufType, GSpacing nPixelSpace, GSpacing nLineSpace, GDALRasterIOExtraArg* psExtraArg ) { ISIS3Dataset* poGDS = reinterpret_cast<ISIS3Dataset*>(poDS); if( eRWFlag == GF_Write && poGDS->m_bGeoTIFFAsRegularExternal && !poGDS->m_bGeoTIFFInitDone ) { InitFile(); } if( eRWFlag == GF_Write && poGDS->m_bHasSrcNoData && poGDS->m_dfSrcNoData != m_dfNoData ) { const int nDTSize = GDALGetDataTypeSizeBytes(eDataType); if( eBufType == eDataType && nPixelSpace == nDTSize && nLineSpace == nPixelSpace * nBufXSize ) { RemapNoData( eDataType, pData, nBufXSize * nBufYSize, poGDS->m_dfSrcNoData, m_dfNoData ); } else { const GByte* pabySrc = reinterpret_cast<GByte*>(pData); GByte* pabyTemp = reinterpret_cast<GByte*>( VSI_MALLOC3_VERBOSE(nDTSize, nBufXSize, nBufYSize)); for( int i = 0; i < nBufYSize; i++ ) { GDALCopyWords( pabySrc + i * nLineSpace, eBufType, static_cast<int>(nPixelSpace), pabyTemp + i * nBufXSize * nDTSize, eDataType, nDTSize, nBufXSize ); } RemapNoData( eDataType, pabyTemp, nBufXSize * nBufYSize, poGDS->m_dfSrcNoData, m_dfNoData ); CPLErr eErr = GDALProxyRasterBand::IRasterIO( eRWFlag, nXOff, nYOff, nXSize, nYSize, pabyTemp, nBufXSize, nBufYSize, eDataType, nDTSize, nDTSize*nBufXSize, psExtraArg ); VSIFree(pabyTemp); return eErr; } } return GDALProxyRasterBand::IRasterIO( eRWFlag, nXOff, nYOff, nXSize, nYSize, pData, nBufXSize, nBufYSize, eBufType, nPixelSpace, nLineSpace, psExtraArg ); } /************************************************************************/ /* ISISMaskBand() */ /************************************************************************/ ISISMaskBand::ISISMaskBand( GDALRasterBand* poBaseBand ) : m_poBaseBand(poBaseBand) , m_pBuffer(nullptr) { eDataType = GDT_Byte; poBaseBand->GetBlockSize(&nBlockXSize, &nBlockYSize); nRasterXSize = poBaseBand->GetXSize(); nRasterYSize = poBaseBand->GetYSize(); } /************************************************************************/ /* ~ISISMaskBand() */ /************************************************************************/ ISISMaskBand::~ISISMaskBand() { VSIFree(m_pBuffer); } /************************************************************************/ /* FillMask() */ /************************************************************************/ template<class T> static void FillMask (void* pvBuffer, GByte* pabyDst, int nReqXSize, int nReqYSize, int nBlockXSize, T NULL_VAL, T LOW_REPR_SAT, T LOW_INSTR_SAT, T HIGH_INSTR_SAT, T HIGH_REPR_SAT) { const T* pSrc = static_cast<T*>(pvBuffer); for( int y = 0; y < nReqYSize; y++ ) { for( int x = 0; x < nReqXSize; x++ ) { const T nSrc = pSrc[y * nBlockXSize + x]; if( nSrc == NULL_VAL || nSrc == LOW_REPR_SAT || nSrc == LOW_INSTR_SAT || nSrc == HIGH_INSTR_SAT || nSrc == HIGH_REPR_SAT ) { pabyDst[y * nBlockXSize + x] = 0; } else { pabyDst[y * nBlockXSize + x] = 255; } } } } /************************************************************************/ /* IReadBlock() */ /************************************************************************/ CPLErr ISISMaskBand::IReadBlock( int nXBlock, int nYBlock, void *pImage ) { const GDALDataType eSrcDT = m_poBaseBand->GetRasterDataType(); const int nSrcDTSize = GDALGetDataTypeSizeBytes(eSrcDT); if( m_pBuffer == nullptr ) { m_pBuffer = VSI_MALLOC3_VERBOSE(nBlockXSize, nBlockYSize, nSrcDTSize); if( m_pBuffer == nullptr ) return CE_Failure; } int nXOff = nXBlock * nBlockXSize; int nReqXSize = nBlockXSize; if( nXOff + nReqXSize > nRasterXSize ) nReqXSize = nRasterXSize - nXOff; int nYOff = nYBlock * nBlockYSize; int nReqYSize = nBlockYSize; if( nYOff + nReqYSize > nRasterYSize ) nReqYSize = nRasterYSize - nYOff; if( m_poBaseBand->RasterIO( GF_Read, nXOff, nYOff, nReqXSize, nReqYSize, m_pBuffer, nReqXSize, nReqYSize, eSrcDT, nSrcDTSize, nSrcDTSize * nBlockXSize, nullptr ) != CE_None ) { return CE_Failure; } GByte* pabyDst = static_cast<GByte*>(pImage); if( eSrcDT == GDT_Byte ) { FillMask<GByte>(m_pBuffer, pabyDst, nReqXSize, nReqYSize, nBlockXSize, NULL1, LOW_REPR_SAT1, LOW_INSTR_SAT1, HIGH_INSTR_SAT1, HIGH_REPR_SAT1); } else if( eSrcDT == GDT_UInt16 ) { FillMask<GUInt16>(m_pBuffer, pabyDst, nReqXSize, nReqYSize, nBlockXSize, NULLU2, LOW_REPR_SATU2, LOW_INSTR_SATU2, HIGH_INSTR_SATU2, HIGH_REPR_SATU2); } else if( eSrcDT == GDT_Int16 ) { FillMask<GInt16>(m_pBuffer, pabyDst, nReqXSize, nReqYSize, nBlockXSize, NULL2, LOW_REPR_SAT2, LOW_INSTR_SAT2, HIGH_INSTR_SAT2, HIGH_REPR_SAT2); } else { CPLAssert( eSrcDT == GDT_Float32 ); FillMask<float>(m_pBuffer, pabyDst, nReqXSize, nReqYSize, nBlockXSize, NULL4, LOW_REPR_SAT4, LOW_INSTR_SAT4, HIGH_INSTR_SAT4, HIGH_REPR_SAT4); } return CE_None; } /************************************************************************/ /* ISIS3Dataset() */ /************************************************************************/ ISIS3Dataset::ISIS3Dataset() : m_fpLabel(nullptr), m_fpImage(nullptr), m_poExternalDS(nullptr), m_bGeoTIFFAsRegularExternal(false), m_bGeoTIFFInitDone(true), m_bIsLabelWritten(true), m_bIsTiled(false), m_bInitToNodata(false), m_bGotTransform(false), m_bHasSrcNoData(false), m_dfSrcNoData(0.0), m_bForce360(false), m_bWriteBoundingDegrees(true), m_bUseSrcLabel(true), m_bUseSrcMapping(false), m_bUseSrcHistory(true), m_bAddGDALHistory(true) { m_oKeywords.SetStripSurroundingQuotes(true); m_adfGeoTransform[0] = 0.0; m_adfGeoTransform[1] = 1.0; m_adfGeoTransform[2] = 0.0; m_adfGeoTransform[3] = 0.0; m_adfGeoTransform[4] = 0.0; m_adfGeoTransform[5] = 1.0; // Deinit JSON objects m_oJSonLabel.Deinit(); m_oSrcJSonLabel.Deinit(); } /************************************************************************/ /* ~ISIS3Dataset() */ /************************************************************************/ ISIS3Dataset::~ISIS3Dataset() { if( !m_bIsLabelWritten ) WriteLabel(); if( m_poExternalDS && m_bGeoTIFFAsRegularExternal && !m_bGeoTIFFInitDone ) { reinterpret_cast<ISIS3WrapperRasterBand*>(GetRasterBand(1))-> InitFile(); } ISIS3Dataset::FlushCache(); if( m_fpLabel != nullptr ) VSIFCloseL( m_fpLabel ); if( m_fpImage != nullptr && m_fpImage != m_fpLabel ) VSIFCloseL( m_fpImage ); ISIS3Dataset::CloseDependentDatasets(); } /************************************************************************/ /* CloseDependentDatasets() */ /************************************************************************/ int ISIS3Dataset::CloseDependentDatasets() { int bHasDroppedRef = GDALPamDataset::CloseDependentDatasets(); if( m_poExternalDS ) { bHasDroppedRef = FALSE; delete m_poExternalDS; m_poExternalDS = nullptr; } for( int iBand = 0; iBand < nBands; iBand++ ) { delete papoBands[iBand]; } nBands = 0; return bHasDroppedRef; } /************************************************************************/ /* GetFileList() */ /************************************************************************/ char **ISIS3Dataset::GetFileList() { char **papszFileList = GDALPamDataset::GetFileList(); if( !m_osExternalFilename.empty() ) papszFileList = CSLAddString( papszFileList, m_osExternalFilename ); for( int i = 0; i < m_aosAdditionalFiles.Count(); ++i ) { if( CSLFindString(papszFileList, m_aosAdditionalFiles[i]) < 0 ) { papszFileList = CSLAddString( papszFileList, m_aosAdditionalFiles[i] ); } } return papszFileList; } /************************************************************************/ /* GetSpatialRef() */ /************************************************************************/ const OGRSpatialReference* ISIS3Dataset::GetSpatialRef() const { if( !m_oSRS.IsEmpty() ) return &m_oSRS; return GDALPamDataset::GetSpatialRef(); } /************************************************************************/ /* SetSpatialRef() */ /************************************************************************/ CPLErr ISIS3Dataset::SetSpatialRef( const OGRSpatialReference* poSRS ) { if( eAccess == GA_ReadOnly ) return GDALPamDataset::SetSpatialRef( poSRS ); if( poSRS ) m_oSRS = *poSRS; else m_oSRS.Clear(); if( m_poExternalDS ) m_poExternalDS->SetSpatialRef(poSRS); InvalidateLabel(); return CE_None; } /************************************************************************/ /* GetGeoTransform() */ /************************************************************************/ CPLErr ISIS3Dataset::GetGeoTransform( double * padfTransform ) { if( m_bGotTransform ) { memcpy( padfTransform, m_adfGeoTransform, sizeof(double) * 6 ); return CE_None; } return GDALPamDataset::GetGeoTransform( padfTransform ); } /************************************************************************/ /* SetGeoTransform() */ /************************************************************************/ CPLErr ISIS3Dataset::SetGeoTransform( double * padfTransform ) { if( eAccess == GA_ReadOnly ) return GDALPamDataset::SetGeoTransform( padfTransform ); if( padfTransform[1] <= 0.0 || padfTransform[1] != -padfTransform[5] || padfTransform[2] != 0.0 || padfTransform[4] != 0.0 ) { CPLError(CE_Failure, CPLE_NotSupported, "Only north-up geotransform with square pixels supported"); return CE_Failure; } m_bGotTransform = true; memcpy( m_adfGeoTransform, padfTransform, sizeof(double) * 6 ); if( m_poExternalDS ) m_poExternalDS->SetGeoTransform(padfTransform); InvalidateLabel(); return CE_None; } /************************************************************************/ /* GetMetadataDomainList() */ /************************************************************************/ char **ISIS3Dataset::GetMetadataDomainList() { return BuildMetadataDomainList( nullptr, FALSE, "", "json:ISIS3", nullptr); } /************************************************************************/ /* GetMetadata() */ /************************************************************************/ char **ISIS3Dataset::GetMetadata( const char* pszDomain ) { if( pszDomain != nullptr && EQUAL( pszDomain, "json:ISIS3" ) ) { if( m_aosISIS3MD.empty() ) { if( eAccess == GA_Update && !m_oJSonLabel.IsValid() ) { BuildLabel(); } CPLAssert( m_oJSonLabel.IsValid() ); const CPLString osJson = m_oJSonLabel.Format(CPLJSONObject::PrettyFormat::Pretty); m_aosISIS3MD.InsertString(0, osJson.c_str()); } return m_aosISIS3MD.List(); } return GDALPamDataset::GetMetadata(pszDomain); } /************************************************************************/ /* InvalidateLabel() */ /************************************************************************/ void ISIS3Dataset::InvalidateLabel() { m_oJSonLabel.Deinit(); m_aosISIS3MD.Clear(); } /************************************************************************/ /* SetMetadata() */ /************************************************************************/ CPLErr ISIS3Dataset::SetMetadata( char** papszMD, const char* pszDomain ) { if( m_bUseSrcLabel && eAccess == GA_Update && pszDomain != nullptr && EQUAL( pszDomain, "json:ISIS3" ) ) { m_oSrcJSonLabel.Deinit(); InvalidateLabel(); if( papszMD != nullptr && papszMD[0] != nullptr ) { CPLJSONDocument oJSONDocument; const GByte *pabyData = reinterpret_cast<const GByte *>(papszMD[0]); if( !oJSONDocument.LoadMemory( pabyData ) ) { return CE_Failure; } m_oSrcJSonLabel = oJSONDocument.GetRoot(); if( !m_oSrcJSonLabel.IsValid() ) { return CE_Failure; } } return CE_None; } return GDALPamDataset::SetMetadata(papszMD, pszDomain); } /************************************************************************/ /* Identify() */ /************************************************************************/ int ISIS3Dataset::Identify( GDALOpenInfo * poOpenInfo ) { if( poOpenInfo->fpL != nullptr && poOpenInfo->pabyHeader != nullptr && strstr((const char *)poOpenInfo->pabyHeader,"IsisCube") != nullptr ) return TRUE; return FALSE; } /************************************************************************/ /* GetRawBinaryLayout() */ /************************************************************************/ bool ISIS3Dataset::GetRawBinaryLayout(GDALDataset::RawBinaryLayout& sLayout) { if( m_sLayout.osRawFilename.empty() ) return false; sLayout = m_sLayout; return true; } /************************************************************************/ /* GetValueAndUnits() */ /************************************************************************/ static void GetValueAndUnits(const CPLJSONObject& obj, std::vector<double>& adfValues, std::vector<std::string>& aosUnits, int nExpectedVals) { if( obj.GetType() == CPLJSONObject::Type::Integer || obj.GetType() == CPLJSONObject::Type::Double ) { adfValues.push_back(obj.ToDouble()); } else if( obj.GetType() == CPLJSONObject::Type::Object ) { auto oValue = obj.GetObj("value"); auto oUnit = obj.GetObj("unit"); if( oValue.IsValid() && (oValue.GetType() == CPLJSONObject::Type::Integer || oValue.GetType() == CPLJSONObject::Type::Double || oValue.GetType() == CPLJSONObject::Type::Array) && oUnit.IsValid() && oUnit.GetType() == CPLJSONObject::Type::String ) { if( oValue.GetType() == CPLJSONObject::Type::Array ) { GetValueAndUnits(oValue, adfValues, aosUnits, nExpectedVals); } else { adfValues.push_back(oValue.ToDouble()); } aosUnits.push_back(oUnit.ToString()); } } else if( obj.GetType() == CPLJSONObject::Type::Array ) { auto oArray = obj.ToArray(); if( oArray.Size() == nExpectedVals ) { for( int i = 0; i < nExpectedVals; i++ ) { if( oArray[i].GetType() == CPLJSONObject::Type::Integer || oArray[i].GetType() == CPLJSONObject::Type::Double ) { adfValues.push_back(oArray[i].ToDouble()); } else { adfValues.clear(); return; } } } } } /************************************************************************/ /* Open() */ /************************************************************************/ GDALDataset *ISIS3Dataset::Open( GDALOpenInfo * poOpenInfo ) { /* -------------------------------------------------------------------- */ /* Does this look like a CUBE dataset? */ /* -------------------------------------------------------------------- */ if( !Identify( poOpenInfo ) ) return nullptr; /* -------------------------------------------------------------------- */ /* Open the file using the large file API. */ /* -------------------------------------------------------------------- */ ISIS3Dataset *poDS = new ISIS3Dataset(); if( ! poDS->m_oKeywords.Ingest( poOpenInfo->fpL, 0 ) ) { VSIFCloseL( poOpenInfo->fpL ); poOpenInfo->fpL = nullptr; delete poDS; return nullptr; } poDS->m_oJSonLabel = poDS->m_oKeywords.GetJsonObject(); poDS->m_oJSonLabel.Add( "_filename", poOpenInfo->pszFilename ); // Find additional files from the label for( const CPLJSONObject& oObj : poDS->m_oJSonLabel.GetChildren() ) { if( oObj.GetType() == CPLJSONObject::Type::Object ) { CPLString osContainerName = oObj.GetName(); CPLJSONObject oContainerName = oObj.GetObj( "_container_name" ); if( oContainerName.GetType() == CPLJSONObject::Type::String ) { osContainerName = oContainerName.ToString(); } CPLJSONObject oFilename = oObj.GetObj( "^" + osContainerName ); if( oFilename.GetType() == CPLJSONObject::Type::String ) { VSIStatBufL sStat; CPLString osFilename( CPLFormFilename( CPLGetPath(poOpenInfo->pszFilename), oFilename.ToString().c_str(), nullptr ) ); if( VSIStatL( osFilename, &sStat ) == 0 ) { poDS->m_aosAdditionalFiles.AddString(osFilename); } else { CPLDebug("ISIS3", "File %s referenced but not foud", osFilename.c_str()); } } } } VSIFCloseL( poOpenInfo->fpL ); poOpenInfo->fpL = nullptr; /* -------------------------------------------------------------------- */ /* Assume user is pointing to label (i.e. .lbl) file for detached option */ /* -------------------------------------------------------------------- */ // Image can be inline or detached and point to an image name // the Format can be Tiled or Raw // Object = Core // StartByte = 65537 // Format = Tile // TileSamples = 128 // TileLines = 128 //OR----- // Object = Core // StartByte = 1 // ^Core = r0200357_detatched.cub // Format = BandSequential //OR----- // Object = Core // StartByte = 1 // ^Core = r0200357_detached_tiled.cub // Format = Tile // TileSamples = 128 // TileLines = 128 //OR----- // Object = Core // StartByte = 1 // ^Core = some.tif // Format = GeoTIFF /* -------------------------------------------------------------------- */ /* What file contains the actual data? */ /* -------------------------------------------------------------------- */ const char *pszCore = poDS->GetKeyword( "IsisCube.Core.^Core" ); CPLString osQubeFile; if( EQUAL(pszCore,"") ) osQubeFile = poOpenInfo->pszFilename; else { CPLString osPath = CPLGetPath( poOpenInfo->pszFilename ); osQubeFile = CPLFormFilename( osPath, pszCore, nullptr ); poDS->m_osExternalFilename = osQubeFile; } /* -------------------------------------------------------------------- */ /* Check if file an ISIS3 header file? Read a few lines of text */ /* searching for something starting with nrows or ncols. */ /* -------------------------------------------------------------------- */ /************* Skipbytes *****************************/ int nSkipBytes = atoi(poDS->GetKeyword("IsisCube.Core.StartByte", "1")); if( nSkipBytes <= 1 ) nSkipBytes = 0; else nSkipBytes -= 1; /******* Grab format type (BandSequential, Tiled) *******/ CPLString osFormat = poDS->GetKeyword( "IsisCube.Core.Format" ); int tileSizeX = 0; int tileSizeY = 0; if (EQUAL(osFormat,"Tile") ) { poDS->m_bIsTiled = true; /******* Get Tile Sizes *********/ tileSizeX = atoi(poDS->GetKeyword("IsisCube.Core.TileSamples")); tileSizeY = atoi(poDS->GetKeyword("IsisCube.Core.TileLines")); if (tileSizeX <= 0 || tileSizeY <= 0) { CPLError( CE_Failure, CPLE_OpenFailed, "Wrong tile dimensions : %d x %d", tileSizeX, tileSizeY); delete poDS; return nullptr; } } else if (!EQUAL(osFormat,"BandSequential") && !EQUAL(osFormat,"GeoTIFF") ) { CPLError( CE_Failure, CPLE_OpenFailed, "%s format not supported.", osFormat.c_str()); delete poDS; return nullptr; } /*********** Grab samples lines band ************/ const int nCols = atoi(poDS->GetKeyword("IsisCube.Core.Dimensions.Samples")); const int nRows = atoi(poDS->GetKeyword("IsisCube.Core.Dimensions.Lines")); const int nBands = atoi(poDS->GetKeyword("IsisCube.Core.Dimensions.Bands")); /****** Grab format type - ISIS3 only supports 8,U16,S16,32 *****/ GDALDataType eDataType = GDT_Byte; double dfNoData = 0.0; const char *itype = poDS->GetKeyword( "IsisCube.Core.Pixels.Type" ); if (EQUAL(itype,"UnsignedByte") ) { eDataType = GDT_Byte; dfNoData = NULL1; } else if (EQUAL(itype,"UnsignedWord") ) { eDataType = GDT_UInt16; dfNoData = NULLU2; } else if (EQUAL(itype,"SignedWord") ) { eDataType = GDT_Int16; dfNoData = NULL2; } else if (EQUAL(itype,"Real") || EQUAL(itype,"") ) { eDataType = GDT_Float32; dfNoData = NULL4; } else { CPLError( CE_Failure, CPLE_OpenFailed, "%s pixel type not supported.", itype); delete poDS; return nullptr; } /*********** Grab samples lines band ************/ //default to MSB const bool bIsLSB = EQUAL( poDS->GetKeyword( "IsisCube.Core.Pixels.ByteOrder"),"Lsb"); /*********** Grab Cellsize ************/ double dfXDim = 1.0; double dfYDim = 1.0; const char* pszRes = poDS->GetKeyword("IsisCube.Mapping.PixelResolution"); if (strlen(pszRes) > 0 ) { dfXDim = CPLAtof(pszRes); /* values are in meters */ dfYDim = -CPLAtof(pszRes); } /*********** Grab UpperLeftCornerY ************/ double dfULYMap = 0.5; const char* pszULY = poDS->GetKeyword("IsisCube.Mapping.UpperLeftCornerY"); if (strlen(pszULY) > 0) { dfULYMap = CPLAtof(pszULY); } /*********** Grab UpperLeftCornerX ************/ double dfULXMap = 0.5; const char* pszULX = poDS->GetKeyword("IsisCube.Mapping.UpperLeftCornerX"); if( strlen(pszULX) > 0 ) { dfULXMap = CPLAtof(pszULX); } /*********** Grab TARGET_NAME ************/ /**** This is the planets name i.e. Mars ***/ const char *target_name = poDS->GetKeyword("IsisCube.Mapping.TargetName"); #ifdef notdef const double dfLongitudeMulFactor = EQUAL(poDS->GetKeyword( "IsisCube.Mapping.LongitudeDirection", "PositiveEast"), "PositiveEast") ? 1 : -1; #else const double dfLongitudeMulFactor = 1; #endif /*********** Grab MAP_PROJECTION_TYPE ************/ const char *map_proj_name = poDS->GetKeyword( "IsisCube.Mapping.ProjectionName"); /*********** Grab SEMI-MAJOR ************/ const double semi_major = CPLAtof(poDS->GetKeyword( "IsisCube.Mapping.EquatorialRadius")); /*********** Grab semi-minor ************/ const double semi_minor = CPLAtof(poDS->GetKeyword( "IsisCube.Mapping.PolarRadius")); /*********** Grab CENTER_LAT ************/ const double center_lat = CPLAtof(poDS->GetKeyword( "IsisCube.Mapping.CenterLatitude")); /*********** Grab CENTER_LON ************/ const double center_lon = CPLAtof(poDS->GetKeyword( "IsisCube.Mapping.CenterLongitude")) * dfLongitudeMulFactor; /*********** Grab 1st std parallel ************/ const double first_std_parallel = CPLAtof(poDS->GetKeyword( "IsisCube.Mapping.FirstStandardParallel")); /*********** Grab 2nd std parallel ************/ const double second_std_parallel = CPLAtof(poDS->GetKeyword( "IsisCube.Mapping.SecondStandardParallel")); /*********** Grab scaleFactor ************/ const double scaleFactor = CPLAtof(poDS->GetKeyword( "IsisCube.Mapping.scaleFactor", "1.0")); /*** grab LatitudeType = Planetographic ****/ // Need to further study how ocentric/ographic will effect the gdal library // So far we will use this fact to define a sphere or ellipse for some // projections // Frank - may need to talk this over bool bIsGeographic = true; if (EQUAL( poDS->GetKeyword("IsisCube.Mapping.LatitudeType"), "Planetocentric" )) bIsGeographic = false; //Set oSRS projection and parameters //############################################################ //ISIS3 Projection types // Equirectangular // LambertConformal // Mercator // ObliqueCylindrical // Orthographic // PolarStereographic // SimpleCylindrical // Sinusoidal // TransverseMercator #ifdef DEBUG CPLDebug( "ISIS3", "using projection %s", map_proj_name); #endif OGRSpatialReference oSRS; bool bProjectionSet = true; if ((EQUAL( map_proj_name, "Equirectangular" )) || (EQUAL( map_proj_name, "SimpleCylindrical" )) ) { oSRS.SetEquirectangular2 ( 0.0, center_lon, center_lat, 0, 0 ); } else if (EQUAL( map_proj_name, "Orthographic" )) { oSRS.SetOrthographic ( center_lat, center_lon, 0, 0 ); } else if (EQUAL( map_proj_name, "Sinusoidal" )) { oSRS.SetSinusoidal ( center_lon, 0, 0 ); } else if (EQUAL( map_proj_name, "Mercator" )) { oSRS.SetMercator ( center_lat, center_lon, scaleFactor, 0, 0 ); } else if (EQUAL( map_proj_name, "PolarStereographic" )) { oSRS.SetPS ( center_lat, center_lon, scaleFactor, 0, 0 ); } else if (EQUAL( map_proj_name, "TransverseMercator" )) { oSRS.SetTM ( center_lat, center_lon, scaleFactor, 0, 0 ); } else if (EQUAL( map_proj_name, "LambertConformal" )) { oSRS.SetLCC ( first_std_parallel, second_std_parallel, center_lat, center_lon, 0, 0 ); } else if (EQUAL( map_proj_name, "PointPerspective" )) { // Distance parameter is the distance to the center of the body, and is given in km const double distance = CPLAtof(poDS->GetKeyword( "IsisCube.Mapping.Distance")) * 1000.0; const double height_above_ground = distance - semi_major; oSRS.SetVerticalPerspective(center_lat, center_lon, 0, height_above_ground, 0, 0); } else if (EQUAL( map_proj_name, "ObliqueCylindrical" )) { const double poleLatitude = CPLAtof(poDS->GetKeyword( "IsisCube.Mapping.PoleLatitude")); const double poleLongitude = CPLAtof(poDS->GetKeyword( "IsisCube.Mapping.PoleLongitude")) * dfLongitudeMulFactor; const double poleRotation = CPLAtof(poDS->GetKeyword( "IsisCube.Mapping.PoleRotation")); CPLString oProj4String; // ISIS3 rotated pole doesn't use the same conventions than PROJ ob_tran // Compare the sign difference in https://github.com/USGS-Astrogeology/ISIS3/blob/3.8.0/isis/src/base/objs/ObliqueCylindrical/ObliqueCylindrical.cpp#L244 // and https://github.com/OSGeo/PROJ/blob/6.2/src/projections/ob_tran.cpp#L34 // They can be compensated by modifying the poleLatitude to 180-poleLatitude // There's also a sign difference for the poleRotation parameter // The existence of those different conventions is acknowledged in // https://pds-imaging.jpl.nasa.gov/documentation/Cassini_BIDRSIS.PDF in the middle of page 10 oProj4String.Printf( "+proj=ob_tran +o_proj=eqc +o_lon_p=%.18g +o_lat_p=%.18g +lon_0=%.18g", -poleRotation, 180-poleLatitude, poleLongitude); oSRS.SetFromUserInput(oProj4String); } else { CPLDebug( "ISIS3", "Dataset projection %s is not supported. Continuing...", map_proj_name ); bProjectionSet = false; } if (bProjectionSet) { //Create projection name, i.e. MERCATOR MARS and set as ProjCS keyword CPLString osProjTargetName(map_proj_name); osProjTargetName += " "; osProjTargetName += target_name; oSRS.SetProjCS(osProjTargetName); //set ProjCS keyword //The geographic/geocentric name will be the same basic name as the body name //'GCS' = Geographic/Geocentric Coordinate System CPLString osGeogName("GCS_"); osGeogName += target_name; //The datum name will be the same basic name as the planet CPLString osDatumName("D_"); osDatumName += target_name; CPLString osSphereName(target_name); //strcat(osSphereName, "_IAU_IAG"); //Might not be IAU defined so don't add //calculate inverse flattening from major and minor axis: 1/f = a/(a-b) double iflattening = 0.0; if ((semi_major - semi_minor) < 0.0000001) iflattening = 0; else iflattening = semi_major / (semi_major - semi_minor); //Set the body size but take into consideration which proj is being used to help w/ proj4 compatibility //The use of a Sphere, polar radius or ellipse here is based on how ISIS does it internally if ( ( (EQUAL( map_proj_name, "Stereographic" ) && (fabs(center_lat) == 90)) ) || (EQUAL( map_proj_name, "PolarStereographic" )) ) { if (bIsGeographic) { //Geograpraphic, so set an ellipse oSRS.SetGeogCS( osGeogName, osDatumName, osSphereName, semi_major, iflattening, "Reference_Meridian", 0.0 ); } else { //Geocentric, so force a sphere using the semi-minor axis. I hope... osSphereName += "_polarRadius"; oSRS.SetGeogCS( osGeogName, osDatumName, osSphereName, semi_minor, 0.0, "Reference_Meridian", 0.0 ); } } else if ( (EQUAL( map_proj_name, "SimpleCylindrical" )) || (EQUAL( map_proj_name, "Orthographic" )) || (EQUAL( map_proj_name, "Stereographic" )) || (EQUAL( map_proj_name, "Sinusoidal" )) || (EQUAL( map_proj_name, "PointPerspective" )) ) { // ISIS uses the spherical equation for these projections // so force a sphere. oSRS.SetGeogCS( osGeogName, osDatumName, osSphereName, semi_major, 0.0, "Reference_Meridian", 0.0 ); } else if (EQUAL( map_proj_name, "Equirectangular" )) { //Calculate localRadius using ISIS3 simple elliptical method // not the more standard Radius of Curvature method //PI = 4 * atan(1); const double radLat = center_lat * M_PI / 180; // in radians const double meanRadius = sqrt( pow( semi_minor * cos( radLat ), 2) + pow( semi_major * sin( radLat ), 2) ); const double localRadius = ( meanRadius == 0.0 ) ? 0.0 : semi_major * semi_minor / meanRadius; osSphereName += "_localRadius"; oSRS.SetGeogCS( osGeogName, osDatumName, osSphereName, localRadius, 0.0, "Reference_Meridian", 0.0 ); } else { //All other projections: Mercator, Transverse Mercator, Lambert Conformal, etc. //Geographic, so set an ellipse if (bIsGeographic) { oSRS.SetGeogCS( osGeogName, osDatumName, osSphereName, semi_major, iflattening, "Reference_Meridian", 0.0 ); } else { //Geocentric, so force a sphere. I hope... oSRS.SetGeogCS( osGeogName, osDatumName, osSphereName, semi_major, 0.0, "Reference_Meridian", 0.0 ); } } // translate back into a projection string. poDS->m_oSRS = oSRS; poDS->m_oSRS.SetAxisMappingStrategy(OAMS_TRADITIONAL_GIS_ORDER); } /* END ISIS3 Label Read */ /*++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++*/ /* -------------------------------------------------------------------- */ /* Did we get the required keywords? If not we return with */ /* this never having been considered to be a match. This isn't */ /* an error! */ /* -------------------------------------------------------------------- */ if( !GDALCheckDatasetDimensions(nCols, nRows) || !GDALCheckBandCount(nBands, false) ) { delete poDS; return nullptr; } /* -------------------------------------------------------------------- */ /* Capture some information from the file that is of interest. */ /* -------------------------------------------------------------------- */ poDS->nRasterXSize = nCols; poDS->nRasterYSize = nRows; /* -------------------------------------------------------------------- */ /* Open target binary file. */ /* -------------------------------------------------------------------- */ if( EQUAL(osFormat,"GeoTIFF") ) { if( nSkipBytes != 0 ) { CPLError(CE_Warning, CPLE_NotSupported, "Ignoring StartByte=%d for format=GeoTIFF", 1+nSkipBytes); } if( osQubeFile == poOpenInfo->pszFilename ) { CPLError( CE_Failure, CPLE_AppDefined, "A ^Core file must be set"); delete poDS; return nullptr; } poDS->m_poExternalDS = reinterpret_cast<GDALDataset *>( GDALOpen( osQubeFile, poOpenInfo->eAccess ) ); if( poDS->m_poExternalDS == nullptr ) { delete poDS; return nullptr; } if( poDS->m_poExternalDS->GetRasterXSize() != poDS->nRasterXSize || poDS->m_poExternalDS->GetRasterYSize() != poDS->nRasterYSize || poDS->m_poExternalDS->GetRasterCount() != nBands || poDS->m_poExternalDS->GetRasterBand(1)->GetRasterDataType() != eDataType ) { CPLError( CE_Failure, CPLE_AppDefined, "%s has incompatible characteristics with the ones " "declared in the label.", osQubeFile.c_str() ); delete poDS; return nullptr; } } else { if( poOpenInfo->eAccess == GA_ReadOnly ) poDS->m_fpImage = VSIFOpenL( osQubeFile, "r" ); else poDS->m_fpImage = VSIFOpenL( osQubeFile, "r+" ); if( poDS->m_fpImage == nullptr ) { CPLError( CE_Failure, CPLE_OpenFailed, "Failed to open %s: %s.", osQubeFile.c_str(), VSIStrerror( errno ) ); delete poDS; return nullptr; } // Sanity checks in case the external raw file appears to be a // TIFF file if( EQUAL(CPLGetExtension(osQubeFile), "tif") ) { GDALDataset* poTIF_DS = reinterpret_cast<GDALDataset*>( GDALOpen(osQubeFile, GA_ReadOnly)); if( poTIF_DS ) { bool bWarned = false; if( poTIF_DS->GetRasterXSize() != poDS->nRasterXSize || poTIF_DS->GetRasterYSize() != poDS->nRasterYSize || poTIF_DS->GetRasterCount() != nBands || poTIF_DS->GetRasterBand(1)->GetRasterDataType() != eDataType || poTIF_DS->GetMetadataItem("COMPRESSION", "IMAGE_STRUCTURE") != nullptr ) { bWarned = true; CPLError( CE_Warning, CPLE_AppDefined, "%s has incompatible characteristics with the ones " "declared in the label.", osQubeFile.c_str() ); } int nBlockXSize = 1, nBlockYSize = 1; poTIF_DS->GetRasterBand(1)->GetBlockSize(&nBlockXSize, &nBlockYSize); if( (poDS->m_bIsTiled && (nBlockXSize != tileSizeX || nBlockYSize != tileSizeY) ) || (!poDS->m_bIsTiled && (nBlockXSize != nCols || (nBands > 1 && nBlockYSize != 1))) ) { if( !bWarned ) { bWarned = true; CPLError( CE_Warning, CPLE_AppDefined, "%s has incompatible characteristics with the ones " "declared in the label.", osQubeFile.c_str() ); } } // to please Clang Static Analyzer nBlockXSize = std::max(1, nBlockXSize); nBlockYSize = std::max(1, nBlockYSize); // Check that blocks are effectively written in expected order. const int nBlockSizeBytes = nBlockXSize * nBlockYSize * GDALGetDataTypeSizeBytes(eDataType); bool bGoOn = !bWarned; const int l_nBlocksPerRow = DIV_ROUND_UP(nCols, nBlockXSize); const int l_nBlocksPerColumn = DIV_ROUND_UP(nRows, nBlockYSize); int nBlockNo = 0; for( int i = 0; i < nBands && bGoOn; i++ ) { for( int y = 0; y < l_nBlocksPerColumn && bGoOn; y++ ) { for( int x = 0; x < l_nBlocksPerRow && bGoOn; x++ ) { const char* pszBlockOffset = poTIF_DS-> GetRasterBand(i+1)->GetMetadataItem( CPLSPrintf("BLOCK_OFFSET_%d_%d", x, y), "TIFF"); if( pszBlockOffset ) { GIntBig nOffset = CPLAtoGIntBig(pszBlockOffset); if( nOffset != nSkipBytes + nBlockNo * nBlockSizeBytes ) { //bWarned = true; CPLError( CE_Warning, CPLE_AppDefined, "%s has incompatible " "characteristics with the ones " "declared in the label.", osQubeFile.c_str() ); bGoOn = false; } } nBlockNo ++; } } } delete poTIF_DS; } } } poDS->eAccess = poOpenInfo->eAccess; /* -------------------------------------------------------------------- */ /* Compute the line offset. */ /* -------------------------------------------------------------------- */ int nLineOffset = 0; int nPixelOffset = 0; vsi_l_offset nBandOffset = 0; if( EQUAL(osFormat,"BandSequential") ) { const int nItemSize = GDALGetDataTypeSizeBytes(eDataType); nPixelOffset = nItemSize; try { nLineOffset = (CPLSM(nPixelOffset) * CPLSM(nCols)).v(); } catch( const CPLSafeIntOverflow& ) { delete poDS; return nullptr; } nBandOffset = static_cast<vsi_l_offset>(nLineOffset) * nRows; poDS->m_sLayout.osRawFilename = osQubeFile; if( nBands > 1 ) poDS->m_sLayout.eInterleaving = RawBinaryLayout::Interleaving::BSQ; poDS->m_sLayout.eDataType = eDataType; poDS->m_sLayout.bLittleEndianOrder = bIsLSB; poDS->m_sLayout.nImageOffset = nSkipBytes; poDS->m_sLayout.nPixelOffset = nPixelOffset; poDS->m_sLayout.nLineOffset = nLineOffset; poDS->m_sLayout.nBandOffset = static_cast<GIntBig>(nBandOffset); } /* else Tiled or external */ /* -------------------------------------------------------------------- */ /* Extract BandBin info. */ /* -------------------------------------------------------------------- */ std::vector<std::string> aosBandNames; std::vector<std::string> aosBandUnits; std::vector<double> adfWavelengths; std::vector<std::string> aosWavelengthsUnit; std::vector<double> adfBandwidth; std::vector<std::string> aosBandwidthUnit; const auto oBandBin = poDS->m_oJSonLabel.GetObj( "IsisCube/BandBin" ); if( oBandBin.IsValid() && oBandBin.GetType() == CPLJSONObject::Type::Object ) { for( const auto& child: oBandBin.GetChildren() ) { if( CPLString(child.GetName()).ifind("name") != std::string::npos ) { // Use "name" in priority if( EQUAL(child.GetName().c_str(), "name") ) { aosBandNames.clear(); } else if( !aosBandNames.empty() ) { continue; } if( child.GetType() == CPLJSONObject::Type::String && nBands == 1 ) { aosBandNames.push_back(child.ToString()); } else if( child.GetType() == CPLJSONObject::Type::Array ) { auto oArray = child.ToArray(); if( oArray.Size() == nBands ) { for( int i = 0; i < nBands; i++ ) { if( oArray[i].GetType() == CPLJSONObject::Type::String ) { aosBandNames.push_back(oArray[i].ToString()); } else { aosBandNames.clear(); break; } } } } } else if( EQUAL(child.GetName().c_str(), "BandSuffixUnit") && child.GetType() == CPLJSONObject::Type::Array ) { auto oArray = child.ToArray(); if( oArray.Size() == nBands ) { for( int i = 0; i < nBands; i++ ) { if( oArray[i].GetType() == CPLJSONObject::Type::String ) { aosBandUnits.push_back(oArray[i].ToString()); } else { aosBandUnits.clear(); break; } } } } else if( EQUAL(child.GetName().c_str(), "BandBinCenter") || EQUAL(child.GetName().c_str(), "Center") ) { GetValueAndUnits(child, adfWavelengths, aosWavelengthsUnit, nBands); } else if( EQUAL(child.GetName().c_str(), "BandBinUnit") && child.GetType() == CPLJSONObject::Type::String ) { CPLString unit(child.ToString()); if( STARTS_WITH_CI(unit, "micromet") || EQUAL(unit, "um") || STARTS_WITH_CI(unit, "nanomet") || EQUAL(unit, "nm") ) { aosWavelengthsUnit.push_back(child.ToString()); } } else if( EQUAL(child.GetName().c_str(), "Width") ) { GetValueAndUnits(child, adfBandwidth, aosBandwidthUnit, nBands); } } if( !adfWavelengths.empty() && aosWavelengthsUnit.size() == 1 ) { for( int i = 1; i < nBands; i++ ) { aosWavelengthsUnit.push_back(aosWavelengthsUnit[0]); } } if( !adfBandwidth.empty() && aosBandwidthUnit.size() == 1 ) { for( int i = 1; i < nBands; i++ ) { aosBandwidthUnit.push_back(aosBandwidthUnit[0]); } } } /* -------------------------------------------------------------------- */ /* Create band information objects. */ /* -------------------------------------------------------------------- */ #ifdef CPL_LSB const bool bNativeOrder = bIsLSB; #else const bool bNativeOrder = !bIsLSB; #endif for( int i = 0; i < nBands; i++ ) { GDALRasterBand *poBand = nullptr; if( poDS->m_poExternalDS != nullptr ) { ISIS3WrapperRasterBand* poISISBand = new ISIS3WrapperRasterBand( poDS->m_poExternalDS->GetRasterBand( i+1 ) ); poBand = poISISBand; poDS->SetBand( i+1, poBand ); poISISBand->SetMaskBand( new ISISMaskBand(poISISBand) ); } else if( poDS->m_bIsTiled ) { CPLErrorReset(); ISISTiledBand* poISISBand = new ISISTiledBand( poDS, poDS->m_fpImage, i+1, eDataType, tileSizeX, tileSizeY, nSkipBytes, 0, 0, bNativeOrder ); if( CPLGetLastErrorType() != CE_None ) { delete poISISBand; delete poDS; return nullptr; } poBand = poISISBand; poDS->SetBand( i+1, poBand ); poISISBand->SetMaskBand( new ISISMaskBand(poISISBand) ); } else { ISIS3RawRasterBand* poISISBand = new ISIS3RawRasterBand( poDS, i+1, poDS->m_fpImage, nSkipBytes + nBandOffset * i, nPixelOffset, nLineOffset, eDataType, bNativeOrder ); poBand = poISISBand; poDS->SetBand( i+1, poBand ); poISISBand->SetMaskBand( new ISISMaskBand(poISISBand) ); } if( i < static_cast<int>(aosBandNames.size()) ) { poBand->SetDescription(aosBandNames[i].c_str()); } if( i < static_cast<int>(adfWavelengths.size()) && i < static_cast<int>(aosWavelengthsUnit.size()) ) { poBand->SetMetadataItem("WAVELENGTH", CPLSPrintf("%f", adfWavelengths[i])); poBand->SetMetadataItem("WAVELENGTH_UNIT", aosWavelengthsUnit[i].c_str()); if( i < static_cast<int>(adfBandwidth.size()) && i < static_cast<int>(aosBandwidthUnit.size()) ) { poBand->SetMetadataItem("BANDWIDTH", CPLSPrintf("%f", adfBandwidth[i])); poBand->SetMetadataItem("BANDWIDTH_UNIT", aosBandwidthUnit[i].c_str()); } } if( i < static_cast<int>(aosBandUnits.size()) ) { poBand->SetUnitType(aosBandUnits[i].c_str()); } poBand->SetNoDataValue( dfNoData ); // Set offset/scale values. const double dfOffset = CPLAtofM(poDS->GetKeyword("IsisCube.Core.Pixels.Base","0.0")); const double dfScale = CPLAtofM(poDS->GetKeyword("IsisCube.Core.Pixels.Multiplier","1.0")); if( dfOffset != 0.0 || dfScale != 1.0 ) { poBand->SetOffset(dfOffset); poBand->SetScale(dfScale); } } /* -------------------------------------------------------------------- */ /* Check for a .prj file. For ISIS3 I would like to keep this in */ /* -------------------------------------------------------------------- */ const CPLString osPath = CPLGetPath( poOpenInfo->pszFilename ); const CPLString osName = CPLGetBasename(poOpenInfo->pszFilename); const char *pszPrjFile = CPLFormCIFilename( osPath, osName, "prj" ); VSILFILE *fp = VSIFOpenL( pszPrjFile, "r" ); if( fp != nullptr ) { VSIFCloseL( fp ); char **papszLines = CSLLoad( pszPrjFile ); OGRSpatialReference oSRS2; if( oSRS2.importFromESRI( papszLines ) == OGRERR_NONE ) { poDS->m_aosAdditionalFiles.AddString( pszPrjFile ); poDS->m_oSRS = oSRS2; poDS->m_oSRS.SetAxisMappingStrategy(OAMS_TRADITIONAL_GIS_ORDER); } CSLDestroy( papszLines ); } if( dfULXMap != 0.5 || dfULYMap != 0.5 || dfXDim != 1.0 || dfYDim != 1.0 ) { poDS->m_bGotTransform = true; poDS->m_adfGeoTransform[0] = dfULXMap; poDS->m_adfGeoTransform[1] = dfXDim; poDS->m_adfGeoTransform[2] = 0.0; poDS->m_adfGeoTransform[3] = dfULYMap; poDS->m_adfGeoTransform[4] = 0.0; poDS->m_adfGeoTransform[5] = dfYDim; } if( !poDS->m_bGotTransform ) { poDS->m_bGotTransform = CPL_TO_BOOL(GDALReadWorldFile( poOpenInfo->pszFilename, "cbw", poDS->m_adfGeoTransform )); if( poDS->m_bGotTransform ) { poDS->m_aosAdditionalFiles.AddString( CPLResetExtension(poOpenInfo->pszFilename, "cbw") ); } } if( !poDS->m_bGotTransform ) { poDS->m_bGotTransform = CPL_TO_BOOL(GDALReadWorldFile( poOpenInfo->pszFilename, "wld", poDS->m_adfGeoTransform )); if( poDS->m_bGotTransform ) { poDS->m_aosAdditionalFiles.AddString( CPLResetExtension(poOpenInfo->pszFilename, "wld") ); } } /* -------------------------------------------------------------------- */ /* Initialize any PAM information. */ /* -------------------------------------------------------------------- */ poDS->SetDescription( poOpenInfo->pszFilename ); poDS->TryLoadXML(); /* -------------------------------------------------------------------- */ /* Check for overviews. */ /* -------------------------------------------------------------------- */ poDS->oOvManager.Initialize( poDS, poOpenInfo->pszFilename ); return poDS; } /************************************************************************/ /* GetKeyword() */ /************************************************************************/ const char *ISIS3Dataset::GetKeyword( const char *pszPath, const char *pszDefault ) { return m_oKeywords.GetKeyword( pszPath, pszDefault ); } /************************************************************************/ /* FixLong() */ /************************************************************************/ double ISIS3Dataset::FixLong( double dfLong ) { if( m_osLongitudeDirection == "PositiveWest" ) dfLong = -dfLong; if( m_bForce360 && dfLong < 0 ) dfLong += 360.0; return dfLong; } /************************************************************************/ /* BuildLabel() */ /************************************************************************/ void ISIS3Dataset::BuildLabel() { CPLJSONObject oLabel = m_oSrcJSonLabel; if( !oLabel.IsValid() ) { oLabel = CPLJSONObject(); } // If we have a source label, then edit it directly CPLJSONObject oIsisCube = GetOrCreateJSONObject(oLabel, "IsisCube"); oIsisCube.Set( "_type", "object"); if( !m_osComment.empty() ) oIsisCube.Set( "_comment", m_osComment ); CPLJSONObject oCore = GetOrCreateJSONObject(oIsisCube, "Core"); if( oCore.GetType() != CPLJSONObject::Type::Object ) { oIsisCube.Delete( "Core" ); oCore = CPLJSONObject(); oIsisCube.Add("Core", oCore); } oCore.Set( "_type", "object" ); if( !m_osExternalFilename.empty() ) { if( m_poExternalDS && m_bGeoTIFFAsRegularExternal ) { if( !m_bGeoTIFFInitDone ) { reinterpret_cast<ISIS3WrapperRasterBand*>(GetRasterBand(1))-> InitFile(); } const char* pszOffset = m_poExternalDS->GetRasterBand(1)-> GetMetadataItem("BLOCK_OFFSET_0_0", "TIFF"); if( pszOffset ) { oCore.Set( "StartByte", 1 + atoi(pszOffset) ); } else { // Shouldn't happen normally CPLError(CE_Warning, CPLE_AppDefined, "Missing BLOCK_OFFSET_0_0"); m_bGeoTIFFAsRegularExternal = false; oCore.Set( "StartByte", 1 ); } } else { oCore.Set( "StartByte", 1 ); } if( !m_osExternalFilename.empty() ) { const CPLString osExternalFilename = CPLGetFilename(m_osExternalFilename); oCore.Set( "^Core", osExternalFilename ); } } else { oCore.Set( "StartByte", pszSTARTBYTE_PLACEHOLDER ); oCore.Delete( "^Core" ); } if( m_poExternalDS && !m_bGeoTIFFAsRegularExternal ) { oCore.Set( "Format", "GeoTIFF" ); oCore.Delete( "TileSamples" ); oCore.Delete( "TileLines" ); } else if( m_bIsTiled ) { oCore.Set( "Format", "Tile"); int nBlockXSize = 1, nBlockYSize = 1; GetRasterBand(1)->GetBlockSize(&nBlockXSize, &nBlockYSize); oCore.Set( "TileSamples", nBlockXSize ); oCore.Set( "TileLines", nBlockYSize ); } else { oCore.Set( "Format", "BandSequential" ); oCore.Delete( "TileSamples" ); oCore.Delete( "TileLines" ); } CPLJSONObject oDimensions = GetOrCreateJSONObject(oCore, "Dimensions"); oDimensions.Set( "_type", "group" ); oDimensions.Set( "Samples", nRasterXSize ); oDimensions.Set( "Lines", nRasterYSize ); oDimensions.Set( "Bands", nBands ); CPLJSONObject oPixels = GetOrCreateJSONObject(oCore, "Pixels"); oPixels.Set( "_type", "group" ); const GDALDataType eDT = GetRasterBand(1)->GetRasterDataType(); oPixels.Set( "Type", (eDT == GDT_Byte) ? "UnsignedByte" : (eDT == GDT_UInt16) ? "UnsignedWord" : (eDT == GDT_Int16) ? "SignedWord" : "Real" ); oPixels.Set( "ByteOrder", "Lsb" ); oPixels.Set( "Base", GetRasterBand(1)->GetOffset() ); oPixels.Set( "Multiplier", GetRasterBand(1)->GetScale() ); const OGRSpatialReference& oSRS = m_oSRS; if( !m_bUseSrcMapping ) { oIsisCube.Delete( "Mapping" ); } CPLJSONObject oMapping = GetOrCreateJSONObject(oIsisCube, "Mapping"); if( m_bUseSrcMapping && oMapping.IsValid() && oMapping.GetType() == CPLJSONObject::Type::Object ) { if( !m_osTargetName.empty() ) oMapping.Set( "TargetName", m_osTargetName ); if( !m_osLatitudeType.empty() ) oMapping.Set( "LatitudeType", m_osLatitudeType ); if( !m_osLongitudeDirection.empty() ) oMapping.Set( "LongitudeDirection", m_osLongitudeDirection ); } else if( !m_bUseSrcMapping && !m_oSRS.IsEmpty() ) { oMapping.Add( "_type", "group" ); if( oSRS.IsProjected() || oSRS.IsGeographic() ) { const char* pszDatum = oSRS.GetAttrValue("DATUM"); CPLString osTargetName( m_osTargetName ); if( osTargetName.empty() ) { if( pszDatum && STARTS_WITH(pszDatum, "D_") ) { osTargetName = pszDatum + 2; } else if( pszDatum ) { osTargetName = pszDatum; } } if( !osTargetName.empty() ) oMapping.Add( "TargetName", osTargetName ); oMapping.Add( "EquatorialRadius/value", oSRS.GetSemiMajor() ); oMapping.Add( "EquatorialRadius/unit", "meters" ); oMapping.Add( "PolarRadius/value", oSRS.GetSemiMinor() ); oMapping.Add( "PolarRadius/unit", "meters" ); if( !m_osLatitudeType.empty() ) oMapping.Add( "LatitudeType", m_osLatitudeType ); else oMapping.Add( "LatitudeType", "Planetocentric" ); if( !m_osLongitudeDirection.empty() ) oMapping.Add( "LongitudeDirection", m_osLongitudeDirection ); else oMapping.Add( "LongitudeDirection", "PositiveEast" ); double adfX[4] = {0}; double adfY[4] = {0}; bool bLongLatCorners = false; if( m_bGotTransform ) { for( int i = 0; i < 4; i++ ) { adfX[i] = m_adfGeoTransform[0] + (i%2) * nRasterXSize * m_adfGeoTransform[1]; adfY[i] = m_adfGeoTransform[3] + ( (i == 0 || i == 3) ? 0 : 1 ) * nRasterYSize * m_adfGeoTransform[5]; } if( oSRS.IsGeographic() ) { bLongLatCorners = true; } else { OGRSpatialReference* poSRSLongLat = oSRS.CloneGeogCS(); if( poSRSLongLat ) { poSRSLongLat->SetAxisMappingStrategy(OAMS_TRADITIONAL_GIS_ORDER); OGRCoordinateTransformation* poCT = OGRCreateCoordinateTransformation(&oSRS, poSRSLongLat); if( poCT ) { if( poCT->Transform(4, adfX, adfY) ) { bLongLatCorners = true; } delete poCT; } delete poSRSLongLat; } } } if( bLongLatCorners ) { for( int i = 0; i < 4; i++ ) { adfX[i] = FixLong(adfX[i]); } } if( bLongLatCorners && ( m_bForce360 || adfX[0] <- 180.0 || adfX[3] > 180.0) ) { oMapping.Add( "LongitudeDomain", 360 ); } else { oMapping.Add( "LongitudeDomain", 180 ); } if( m_bWriteBoundingDegrees && !m_osBoundingDegrees.empty() ) { char** papszTokens = CSLTokenizeString2(m_osBoundingDegrees, ",", 0); if( CSLCount(papszTokens) == 4 ) { oMapping.Add( "MinimumLatitude", CPLAtof(papszTokens[1]) ); oMapping.Add( "MinimumLongitude", CPLAtof(papszTokens[0]) ); oMapping.Add( "MaximumLatitude", CPLAtof(papszTokens[3]) ); oMapping.Add( "MaximumLongitude", CPLAtof(papszTokens[2]) ); } CSLDestroy(papszTokens); } else if( m_bWriteBoundingDegrees && bLongLatCorners ) { oMapping.Add( "MinimumLatitude", std::min( std::min(adfY[0], adfY[1]), std::min(adfY[2],adfY[3])) ); oMapping.Add( "MinimumLongitude", std::min( std::min(adfX[0], adfX[1]), std::min(adfX[2],adfX[3])) ); oMapping.Add( "MaximumLatitude", std::max( std::max(adfY[0], adfY[1]), std::max(adfY[2],adfY[3])) ); oMapping.Add( "MaximumLongitude", std::max( std::max(adfX[0], adfX[1]), std::max(adfX[2],adfX[3])) ); } const char* pszProjection = oSRS.GetAttrValue("PROJECTION"); if( pszProjection == nullptr ) { oMapping.Add( "ProjectionName", "SimpleCylindrical" ); oMapping.Add( "CenterLongitude", 0.0 ); oMapping.Add( "CenterLatitude", 0.0 ); oMapping.Add( "CenterLatitudeRadius", oSRS.GetSemiMajor() ); } else if( EQUAL(pszProjection, SRS_PT_EQUIRECTANGULAR) ) { oMapping.Add( "ProjectionName", "Equirectangular" ); if( oSRS.GetNormProjParm( SRS_PP_LATITUDE_OF_ORIGIN, 0.0 ) != 0.0 ) { CPLError(CE_Warning, CPLE_NotSupported, "Ignoring %s. Only 0 value supported", SRS_PP_LATITUDE_OF_ORIGIN); } oMapping.Add( "CenterLongitude", FixLong(oSRS.GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN, 0.0)) ); const double dfCenterLat = oSRS.GetNormProjParm(SRS_PP_STANDARD_PARALLEL_1, 0.0); oMapping.Add( "CenterLatitude", dfCenterLat ); // in radians const double radLat = dfCenterLat * M_PI / 180; const double semi_major = oSRS.GetSemiMajor(); const double semi_minor = oSRS.GetSemiMinor(); const double localRadius = semi_major * semi_minor / sqrt( pow( semi_minor * cos( radLat ), 2) + pow( semi_major * sin( radLat ), 2) ); oMapping.Add( "CenterLatitudeRadius", localRadius ); } else if( EQUAL(pszProjection, SRS_PT_ORTHOGRAPHIC) ) { oMapping.Add( "ProjectionName", "Orthographic" ); oMapping.Add( "CenterLongitude", FixLong( oSRS.GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN, 0.0)) ); oMapping.Add( "CenterLatitude", oSRS.GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN, 0.0) ); } else if( EQUAL(pszProjection, SRS_PT_SINUSOIDAL) ) { oMapping.Add( "ProjectionName", "Sinusoidal" ); oMapping.Add( "CenterLongitude", FixLong( oSRS.GetNormProjParm(SRS_PP_LONGITUDE_OF_CENTER, 0.0)) ); } else if( EQUAL(pszProjection, SRS_PT_MERCATOR_1SP) ) { oMapping.Add( "ProjectionName", "Mercator" ); oMapping.Add( "CenterLongitude", FixLong( oSRS.GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN, 0.0)) ); oMapping.Add( "CenterLatitude", oSRS.GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN, 0.0) ); oMapping.Add( "scaleFactor", oSRS.GetNormProjParm(SRS_PP_SCALE_FACTOR, 1.0) ); } else if( EQUAL(pszProjection, SRS_PT_POLAR_STEREOGRAPHIC) ) { oMapping.Add( "ProjectionName", "PolarStereographic" ); oMapping.Add( "CenterLongitude", FixLong( oSRS.GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN, 0.0)) ); oMapping.Add( "CenterLatitude", oSRS.GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN, 0.0) ); oMapping.Add( "scaleFactor", oSRS.GetNormProjParm(SRS_PP_SCALE_FACTOR, 1.0) ); } else if( EQUAL(pszProjection, SRS_PT_TRANSVERSE_MERCATOR) ) { oMapping.Add( "ProjectionName", "TransverseMercator" ); oMapping.Add( "CenterLongitude", FixLong( oSRS.GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN, 0.0)) ); oMapping.Add( "CenterLatitude", oSRS.GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN, 0.0) ); oMapping.Add( "scaleFactor", oSRS.GetNormProjParm(SRS_PP_SCALE_FACTOR, 1.0) ); } else if( EQUAL(pszProjection, SRS_PT_LAMBERT_CONFORMAL_CONIC_2SP) ) { oMapping.Add( "ProjectionName", "LambertConformal" ); oMapping.Add( "CenterLongitude", FixLong( oSRS.GetNormProjParm(SRS_PP_CENTRAL_MERIDIAN, 0.0)) ); oMapping.Add( "CenterLatitude", oSRS.GetNormProjParm(SRS_PP_LATITUDE_OF_ORIGIN, 0.0) ); oMapping.Add( "FirstStandardParallel", oSRS.GetNormProjParm(SRS_PP_STANDARD_PARALLEL_1, 0.0) ); oMapping.Add( "SecondStandardParallel", oSRS.GetNormProjParm(SRS_PP_STANDARD_PARALLEL_2, 0.0) ); } else if( EQUAL(pszProjection, "Vertical Perspective") ) // PROJ 7 required { oMapping.Add( "ProjectionName", "PointPerspective" ); oMapping.Add( "CenterLongitude", FixLong( oSRS.GetNormProjParm("Longitude of topocentric origin", 0.0)) ); oMapping.Add( "CenterLatitude", oSRS.GetNormProjParm("Latitude of topocentric origin", 0.0) ); // ISIS3 value is the distance from center of ellipsoid, in km oMapping.Add( "Distance", (oSRS.GetNormProjParm("Viewpoint height", 0.0) + oSRS.GetSemiMajor()) / 1000.0 ); } else if( EQUAL(pszProjection, "custom_proj4") ) { const char* pszProj4 = oSRS.GetExtension("PROJCS", "PROJ4", nullptr); if( pszProj4 && strstr(pszProj4, "+proj=ob_tran" ) && strstr(pszProj4, "+o_proj=eqc") ) { const auto FetchParam = [](const char* pszProj4Str, const char* pszKey) { CPLString needle; needle.Printf("+%s=", pszKey); const char* pszVal = strstr(pszProj4Str, needle.c_str()); if( pszVal ) return CPLAtof(pszVal+needle.size()); return 0.0; }; double dfLonP = FetchParam(pszProj4, "o_lon_p"); double dfLatP = FetchParam(pszProj4, "o_lat_p"); double dfLon0 = FetchParam(pszProj4, "lon_0"); double dfPoleRotation = -dfLonP; double dfPoleLatitude = 180 - dfLatP; double dfPoleLongitude = dfLon0; oMapping.Add( "ProjectionName", "ObliqueCylindrical" ); oMapping.Add( "PoleLatitude", dfPoleLatitude ); oMapping.Add( "PoleLongitude", FixLong(dfPoleLongitude) ); oMapping.Add( "PoleRotation", dfPoleRotation ); } else { CPLError(CE_Warning, CPLE_NotSupported, "Projection %s not supported", pszProjection); } } else { CPLError(CE_Warning, CPLE_NotSupported, "Projection %s not supported", pszProjection); } if( oMapping["ProjectionName"].IsValid() ) { if( oSRS.GetNormProjParm( SRS_PP_FALSE_EASTING, 0.0 ) != 0.0 ) { CPLError(CE_Warning, CPLE_NotSupported, "Ignoring %s. Only 0 value supported", SRS_PP_FALSE_EASTING); } if( oSRS.GetNormProjParm( SRS_PP_FALSE_NORTHING, 0.0 ) != 0.0 ) { CPLError(CE_Warning, CPLE_AppDefined, "Ignoring %s. Only 0 value supported", SRS_PP_FALSE_NORTHING); } } } else { CPLError(CE_Warning, CPLE_NotSupported, "SRS not supported"); } } if( !m_bUseSrcMapping && m_bGotTransform ) { oMapping.Add( "_type", "group" ); const double dfDegToMeter = oSRS.GetSemiMajor() * M_PI / 180.0; if( !m_oSRS.IsEmpty() && oSRS.IsProjected() ) { const double dfLinearUnits = oSRS.GetLinearUnits(); // Maybe we should deal differently with non meter units ? const double dfRes = m_adfGeoTransform[1] * dfLinearUnits; const double dfScale = dfDegToMeter / dfRes; oMapping.Add( "UpperLeftCornerX", m_adfGeoTransform[0] ); oMapping.Add( "UpperLeftCornerY", m_adfGeoTransform[3] ); oMapping.Add( "PixelResolution/value", dfRes ); oMapping.Add( "PixelResolution/unit", "meters/pixel" ); oMapping.Add( "Scale/value", dfScale ); oMapping.Add( "Scale/unit", "pixels/degree" ); } else if( !m_oSRS.IsEmpty() && oSRS.IsGeographic() ) { const double dfScale = 1.0 / m_adfGeoTransform[1]; const double dfRes = m_adfGeoTransform[1] * dfDegToMeter; oMapping.Add( "UpperLeftCornerX", m_adfGeoTransform[0] * dfDegToMeter ); oMapping.Add( "UpperLeftCornerY", m_adfGeoTransform[3] * dfDegToMeter ); oMapping.Add( "PixelResolution/value", dfRes ); oMapping.Add( "PixelResolution/unit", "meters/pixel" ); oMapping.Add( "Scale/value", dfScale ); oMapping.Add( "Scale/unit", "pixels/degree" ); } else { oMapping.Add( "UpperLeftCornerX", m_adfGeoTransform[0] ); oMapping.Add( "UpperLeftCornerY", m_adfGeoTransform[3] ); oMapping.Add( "PixelResolution", m_adfGeoTransform[1] ); } } CPLJSONObject oLabelLabel = GetOrCreateJSONObject(oLabel, "Label"); oLabelLabel.Set( "_type", "object" ); oLabelLabel.Set( "Bytes", pszLABEL_BYTES_PLACEHOLDER ); // Deal with History object BuildHistory(); oLabel.Delete( "History" ); if( !m_osHistory.empty() ) { CPLJSONObject oHistory; oHistory.Add( "_type", "object" ); oHistory.Add( "Name", "IsisCube" ); if( m_osExternalFilename.empty() ) oHistory.Add( "StartByte", pszHISTORY_STARTBYTE_PLACEHOLDER ); else oHistory.Add( "StartByte", 1 ); oHistory.Add( "Bytes", static_cast<GIntBig>(m_osHistory.size()) ); if( !m_osExternalFilename.empty() ) { CPLString osFilename(CPLGetBasename(GetDescription())); osFilename += ".History.IsisCube"; oHistory.Add( "^History", osFilename ); } oLabel.Add( "History", oHistory ); } // Deal with other objects that have StartByte & Bytes m_aoNonPixelSections.clear(); if( m_oSrcJSonLabel.IsValid() ) { CPLString osLabelSrcFilename; CPLJSONObject oFilename = oLabel["_filename"]; if( oFilename.GetType() == CPLJSONObject::Type::String ) { osLabelSrcFilename = oFilename.ToString(); } for( CPLJSONObject& oObj : oLabel.GetChildren() ) { CPLString osKey = oObj.GetName(); if( osKey == "History" ) { continue; } CPLJSONObject oBytes = oObj.GetObj( "Bytes" ); if( oBytes.GetType() != CPLJSONObject::Type::Integer || oBytes.ToInteger() <= 0 ) { continue; } CPLJSONObject oStartByte = oObj.GetObj( "StartByte" ); if( oStartByte.GetType() != CPLJSONObject::Type::Integer || oStartByte.ToInteger() <= 0 ) { continue; } if( osLabelSrcFilename.empty() ) { CPLError(CE_Warning, CPLE_AppDefined, "Cannot find _filename attribute in " "source ISIS3 metadata. Removing object " "%s from the label.", osKey.c_str()); oLabel.Delete( osKey ); continue; } NonPixelSection oSection; oSection.osSrcFilename = osLabelSrcFilename; oSection.nSrcOffset = static_cast<vsi_l_offset>( oObj.GetInteger("StartByte")) - 1U; oSection.nSize = static_cast<vsi_l_offset>( oObj.GetInteger("Bytes")); CPLString osName; CPLJSONObject oName = oObj.GetObj( "Name" ); if( oName.GetType() == CPLJSONObject::Type::String ) { osName = oName.ToString(); } CPLString osContainerName(osKey); CPLJSONObject oContainerName = oObj.GetObj( "_container_name" ); if( oContainerName.GetType() == CPLJSONObject::Type::String ) { osContainerName = oContainerName.ToString(); } const CPLString osKeyFilename( "^" + osContainerName ); CPLJSONObject oFilenameCap = oObj.GetObj( osKeyFilename ); if( oFilenameCap.GetType() == CPLJSONObject::Type::String ) { VSIStatBufL sStat; const CPLString osSrcFilename( CPLFormFilename( CPLGetPath(osLabelSrcFilename), oFilenameCap.ToString().c_str(), nullptr ) ); if( VSIStatL( osSrcFilename, &sStat ) == 0 ) { oSection.osSrcFilename = osSrcFilename; } else { CPLError(CE_Warning, CPLE_AppDefined, "Object %s points to %s, which does " "not exist. Removing this section " "from the label", osKey.c_str(), osSrcFilename.c_str()); oLabel.Delete( osKey ); continue; } } if( !m_osExternalFilename.empty() ) { oObj.Set( "StartByte", 1 ); } else { CPLString osPlaceHolder; osPlaceHolder.Printf( "!*^PLACEHOLDER_%d_STARTBYTE^*!", static_cast<int>(m_aoNonPixelSections.size()) + 1); oObj.Set( "StartByte", osPlaceHolder ); oSection.osPlaceHolder = osPlaceHolder; } if( !m_osExternalFilename.empty() ) { CPLString osDstFilename( CPLGetBasename(GetDescription()) ); osDstFilename += "."; osDstFilename += osContainerName; if( !osName.empty() ) { osDstFilename += "."; osDstFilename += osName; } oSection.osDstFilename = CPLFormFilename( CPLGetPath( GetDescription() ), osDstFilename, nullptr ); oObj.Set( osKeyFilename, osDstFilename ); } else { oObj.Delete( osKeyFilename ); } m_aoNonPixelSections.push_back(oSection); } } m_oJSonLabel = oLabel; } /************************************************************************/ /* BuildHistory() */ /************************************************************************/ void ISIS3Dataset::BuildHistory() { CPLString osHistory; if( m_oSrcJSonLabel.IsValid() && m_bUseSrcHistory ) { vsi_l_offset nHistoryOffset = 0; int nHistorySize = 0; CPLString osSrcFilename; CPLJSONObject oFilename = m_oSrcJSonLabel["_filename"]; if( oFilename.GetType() == CPLJSONObject::Type::String ) { osSrcFilename = oFilename.ToString(); } CPLString osHistoryFilename(osSrcFilename); CPLJSONObject oHistory = m_oSrcJSonLabel["History"]; if( oHistory.GetType() == CPLJSONObject::Type::Object ) { CPLJSONObject oHistoryFilename = oHistory["^History"]; if( oHistoryFilename.GetType() == CPLJSONObject::Type::String ) { osHistoryFilename = CPLFormFilename( CPLGetPath(osSrcFilename), oHistoryFilename.ToString().c_str(), nullptr ); } CPLJSONObject oStartByte = oHistory["StartByte"]; if( oStartByte.GetType() == CPLJSONObject::Type::Integer ) { if( oStartByte.ToInteger() > 0 ) { nHistoryOffset = static_cast<vsi_l_offset>( oStartByte.ToInteger()) - 1U; } } CPLJSONObject oBytes = oHistory["Bytes"]; if( oBytes.GetType() == CPLJSONObject::Type::Integer ) { nHistorySize = static_cast<int>( oBytes.ToInteger() ); } } if( osHistoryFilename.empty() ) { CPLDebug("ISIS3", "Cannot find filename for source history"); } else if( nHistorySize <= 0 || nHistorySize > 1000000 ) { CPLDebug("ISIS3", "Invalid or missing value for History.Bytes " "for source history"); } else { VSILFILE* fpHistory = VSIFOpenL(osHistoryFilename, "rb"); if( fpHistory != nullptr ) { VSIFSeekL(fpHistory, nHistoryOffset, SEEK_SET); osHistory.resize( nHistorySize ); if( VSIFReadL( &osHistory[0], nHistorySize, 1, fpHistory ) != 1 ) { CPLError(CE_Warning, CPLE_FileIO, "Cannot read %d bytes at offset " CPL_FRMT_GUIB "of %s: history will not be preserved", nHistorySize, nHistoryOffset, osHistoryFilename.c_str()); osHistory.clear(); } VSIFCloseL(fpHistory); } else { CPLError(CE_Warning, CPLE_FileIO, "Cannot open %s: history will not be preserved", osHistoryFilename.c_str()); } } } if( m_bAddGDALHistory && !m_osGDALHistory.empty() ) { if( !osHistory.empty() ) osHistory += "\n"; osHistory += m_osGDALHistory; } else if( m_bAddGDALHistory ) { if( !osHistory.empty() ) osHistory += "\n"; CPLJSONObject oHistoryObj; char szFullFilename[2048] = { 0 }; if( !CPLGetExecPath(szFullFilename, sizeof(szFullFilename) - 1) ) strcpy(szFullFilename, "unknown_program"); const CPLString osProgram(CPLGetBasename(szFullFilename)); const CPLString osPath(CPLGetPath(szFullFilename)); CPLJSONObject oObj; oHistoryObj.Add( osProgram, oObj ); oObj.Add( "_type", "object" ); oObj.Add( "GdalVersion", GDALVersionInfo("RELEASE_NAME") ); if( osPath != "." ) oObj.Add( "ProgramPath", osPath ); time_t nCurTime = time(nullptr); if( nCurTime != -1 ) { struct tm mytm; CPLUnixTimeToYMDHMS(nCurTime, &mytm); oObj.Add( "ExecutionDateTime", CPLSPrintf("%04d-%02d-%02dT%02d:%02d:%02d", mytm.tm_year + 1900, mytm.tm_mon + 1, mytm.tm_mday, mytm.tm_hour, mytm.tm_min, mytm.tm_sec) ); } char szHostname[256] = { 0 }; if( gethostname(szHostname, sizeof(szHostname)-1) == 0 ) { oObj.Add( "HostName", std::string(szHostname) ); } const char* pszUsername = CPLGetConfigOption("USERNAME", nullptr); if( pszUsername == nullptr ) pszUsername = CPLGetConfigOption("USER", nullptr); if( pszUsername != nullptr ) { oObj.Add( "UserName", pszUsername ); } oObj.Add( "Description", "GDAL conversion" ); CPLJSONObject oUserParameters; oObj.Add( "UserParameters", oUserParameters ); oUserParameters.Add( "_type", "group"); if( !m_osFromFilename.empty() ) { const CPLString osFromFilename = CPLGetFilename( m_osFromFilename ); oUserParameters.Add( "FROM", osFromFilename ); } if( nullptr != GetDescription() ) { const CPLString osToFileName = CPLGetFilename( GetDescription() ); oUserParameters.Add( "TO", osToFileName ); } if( m_bForce360 ) oUserParameters.Add( "Force_360", "true"); osHistory += SerializeAsPDL( oHistoryObj ); } m_osHistory = osHistory; } /************************************************************************/ /* WriteLabel() */ /************************************************************************/ void ISIS3Dataset::WriteLabel() { m_bIsLabelWritten = true; if( !m_oJSonLabel.IsValid() ) BuildLabel(); // Serialize label CPLString osLabel( SerializeAsPDL(m_oJSonLabel) ); osLabel += "End\n"; if( m_osExternalFilename.empty() && osLabel.size() < 65536 ) { // In-line labels have conventionally a minimize size of 65536 bytes // See #2741 osLabel.resize(65536); } char *pszLabel = &osLabel[0]; const int nLabelSize = static_cast<int>(osLabel.size()); // Hack back StartByte value { char *pszStartByte = strstr(pszLabel, pszSTARTBYTE_PLACEHOLDER); if( pszStartByte != nullptr ) { const char* pszOffset = CPLSPrintf("%d", 1 + nLabelSize); memcpy(pszStartByte, pszOffset, strlen(pszOffset)); memset(pszStartByte + strlen(pszOffset), ' ', strlen(pszSTARTBYTE_PLACEHOLDER) - strlen(pszOffset)); } } // Hack back Label.Bytes value { char* pszLabelBytes = strstr(pszLabel, pszLABEL_BYTES_PLACEHOLDER); if( pszLabelBytes != nullptr ) { const char* pszBytes = CPLSPrintf("%d", nLabelSize); memcpy(pszLabelBytes, pszBytes, strlen(pszBytes)); memset(pszLabelBytes + strlen(pszBytes), ' ', strlen(pszLABEL_BYTES_PLACEHOLDER) - strlen(pszBytes)); } } const GDALDataType eType = GetRasterBand(1)->GetRasterDataType(); const int nDTSize = GDALGetDataTypeSizeBytes(eType); vsi_l_offset nImagePixels = 0; if( m_poExternalDS == nullptr ) { if( m_bIsTiled ) { int nBlockXSize = 1, nBlockYSize = 1; GetRasterBand(1)->GetBlockSize(&nBlockXSize, &nBlockYSize); nImagePixels = static_cast<vsi_l_offset>(nBlockXSize) * nBlockYSize * nBands * DIV_ROUND_UP(nRasterXSize, nBlockXSize) * DIV_ROUND_UP(nRasterYSize, nBlockYSize); } else { nImagePixels = static_cast<vsi_l_offset>(nRasterXSize) * nRasterYSize * nBands; } } // Hack back History.StartBytes value char* pszHistoryStartBytes = strstr(pszLabel, pszHISTORY_STARTBYTE_PLACEHOLDER); vsi_l_offset nHistoryOffset = 0; vsi_l_offset nLastOffset = 0; if( pszHistoryStartBytes != nullptr ) { CPLAssert( m_osExternalFilename.empty() ); nHistoryOffset = nLabelSize + nImagePixels * nDTSize; nLastOffset = nHistoryOffset + m_osHistory.size(); const char* pszStartByte = CPLSPrintf(CPL_FRMT_GUIB, nHistoryOffset + 1); CPLAssert(strlen(pszStartByte) < strlen(pszHISTORY_STARTBYTE_PLACEHOLDER)); memcpy(pszHistoryStartBytes, pszStartByte, strlen(pszStartByte)); memset(pszHistoryStartBytes + strlen(pszStartByte), ' ', strlen(pszHISTORY_STARTBYTE_PLACEHOLDER) - strlen(pszStartByte)); } // Replace placeholders in other sections for( size_t i = 0; i < m_aoNonPixelSections.size(); ++i ) { if( !m_aoNonPixelSections[i].osPlaceHolder.empty() ) { char* pszPlaceHolder = strstr(pszLabel, m_aoNonPixelSections[i].osPlaceHolder.c_str()); CPLAssert( pszPlaceHolder != nullptr ); const char* pszStartByte = CPLSPrintf(CPL_FRMT_GUIB, nLastOffset + 1); nLastOffset += m_aoNonPixelSections[i].nSize; CPLAssert(strlen(pszStartByte) < m_aoNonPixelSections[i].osPlaceHolder.size() ); memcpy(pszPlaceHolder, pszStartByte, strlen(pszStartByte)); memset(pszPlaceHolder + strlen(pszStartByte), ' ', m_aoNonPixelSections[i].osPlaceHolder.size() - strlen(pszStartByte)); } } // Write to final file VSIFSeekL( m_fpLabel, 0, SEEK_SET ); VSIFWriteL( pszLabel, 1, osLabel.size(), m_fpLabel); if( m_osExternalFilename.empty() ) { // Update image offset in bands if( m_bIsTiled ) { for(int i=0;i<nBands;i++) { ISISTiledBand* poBand = reinterpret_cast<ISISTiledBand*>(GetRasterBand(i+1)); poBand->m_nFirstTileOffset += nLabelSize; } } else { for(int i=0;i<nBands;i++) { ISIS3RawRasterBand* poBand = reinterpret_cast<ISIS3RawRasterBand*>(GetRasterBand(i+1)); poBand->nImgOffset += nLabelSize; } } } if( m_bInitToNodata ) { // Initialize the image to nodata const double dfNoData = GetRasterBand(1)->GetNoDataValue(); if( dfNoData == 0.0 ) { VSIFTruncateL( m_fpImage, VSIFTellL(m_fpImage) + nImagePixels * nDTSize ); } else if( nDTSize != 0 ) // to make Coverity not warn about div by 0 { const int nPageSize = 4096; // Must be multiple of 4 since // Float32 is the largest type CPLAssert( (nPageSize % nDTSize) == 0 ); const int nMaxPerPage = nPageSize / nDTSize; GByte* pabyTemp = static_cast<GByte*>(CPLMalloc(nPageSize)); GDALCopyWords( &dfNoData, GDT_Float64, 0, pabyTemp, eType, nDTSize, nMaxPerPage ); #ifdef CPL_MSB GDALSwapWords( pabyTemp, nDTSize, nMaxPerPage, nDTSize ); #endif for( vsi_l_offset i = 0; i < nImagePixels; i += nMaxPerPage ) { int n; if( i + nMaxPerPage <= nImagePixels ) n = nMaxPerPage; else n = static_cast<int>(nImagePixels - i); if( VSIFWriteL( pabyTemp, n * nDTSize, 1, m_fpImage ) != 1 ) { CPLError(CE_Failure, CPLE_FileIO, "Cannot initialize imagery to null"); break; } } CPLFree( pabyTemp ); } } // Write history if( !m_osHistory.empty() ) { if( m_osExternalFilename.empty() ) { VSIFSeekL( m_fpLabel, nHistoryOffset, SEEK_SET ); VSIFWriteL( m_osHistory.c_str(), 1, m_osHistory.size(), m_fpLabel); } else { CPLString osFilename(CPLGetBasename(GetDescription())); osFilename += ".History.IsisCube"; osFilename = CPLFormFilename(CPLGetPath(GetDescription()), osFilename, nullptr); VSILFILE* fp = VSIFOpenL(osFilename, "wb"); if( fp ) { m_aosAdditionalFiles.AddString(osFilename); VSIFWriteL( m_osHistory.c_str(), 1, m_osHistory.size(), fp ); VSIFCloseL(fp); } else { CPLError(CE_Warning, CPLE_FileIO, "Cannot write %s", osFilename.c_str()); } } } // Write other non pixel sections for( size_t i = 0; i < m_aoNonPixelSections.size(); ++i ) { VSILFILE* fpSrc = VSIFOpenL( m_aoNonPixelSections[i].osSrcFilename, "rb"); if( fpSrc == nullptr ) { CPLError(CE_Warning, CPLE_FileIO, "Cannot open %s", m_aoNonPixelSections[i].osSrcFilename.c_str()); continue; } VSILFILE* fpDest = m_fpLabel; if( !m_aoNonPixelSections[i].osDstFilename.empty() ) { fpDest = VSIFOpenL(m_aoNonPixelSections[i].osDstFilename, "wb"); if( fpDest == nullptr ) { CPLError(CE_Warning, CPLE_FileIO, "Cannot create %s", m_aoNonPixelSections[i].osDstFilename.c_str()); VSIFCloseL(fpSrc); continue; } m_aosAdditionalFiles.AddString( m_aoNonPixelSections[i].osDstFilename); } VSIFSeekL(fpSrc, m_aoNonPixelSections[i].nSrcOffset, SEEK_SET); GByte abyBuffer[4096]; vsi_l_offset nRemaining = m_aoNonPixelSections[i].nSize; while( nRemaining ) { size_t nToRead = 4096; if( nRemaining < nToRead ) nToRead = static_cast<size_t>(nRemaining); size_t nRead = VSIFReadL( abyBuffer, 1, nToRead, fpSrc ); if( nRead != nToRead ) { CPLError(CE_Warning, CPLE_FileIO, "Could not read " CPL_FRMT_GUIB " bytes from %s", m_aoNonPixelSections[i].nSize, m_aoNonPixelSections[i].osSrcFilename.c_str()); break; } VSIFWriteL( abyBuffer, 1, nRead, fpDest ); nRemaining -= nRead; } VSIFCloseL( fpSrc ); if( fpDest != m_fpLabel ) VSIFCloseL(fpDest); } } /************************************************************************/ /* SerializeAsPDL() */ /************************************************************************/ CPLString ISIS3Dataset::SerializeAsPDL( const CPLJSONObject &oObj ) { CPLString osTmpFile( CPLSPrintf("/vsimem/isis3_%p", oObj.GetInternalHandle()) ); VSILFILE* fpTmp = VSIFOpenL( osTmpFile, "wb+" ); SerializeAsPDL( fpTmp, oObj ); VSIFCloseL( fpTmp ); CPLString osContent( reinterpret_cast<char*>( VSIGetMemFileBuffer( osTmpFile, nullptr, FALSE )) ); VSIUnlink(osTmpFile); return osContent; } /************************************************************************/ /* SerializeAsPDL() */ /************************************************************************/ void ISIS3Dataset::SerializeAsPDL( VSILFILE* fp, const CPLJSONObject &oObj, int nDepth ) { CPLString osIndentation; for( int i = 0; i < nDepth; i++ ) osIndentation += " "; const size_t WIDTH = 79; std::vector<CPLJSONObject> aoChildren = oObj.GetChildren(); size_t nMaxKeyLength = 0; for( const CPLJSONObject& oChild : aoChildren ) { const CPLString osKey = oChild.GetName(); if( EQUAL(osKey, "_type") || EQUAL(osKey, "_container_name") || EQUAL(osKey, "_filename") ) { continue; } const auto eType = oChild.GetType(); if( eType == CPLJSONObject::Type::String || eType == CPLJSONObject::Type::Integer || eType == CPLJSONObject::Type::Double || eType == CPLJSONObject::Type::Array ) { if( osKey.size() > nMaxKeyLength ) { nMaxKeyLength = osKey.size(); } } else if( eType == CPLJSONObject::Type::Object ) { CPLJSONObject oValue = oChild.GetObj( "value" ); CPLJSONObject oUnit = oChild.GetObj( "unit" ); if( oValue.IsValid() && oUnit.GetType() == CPLJSONObject::Type::String ) { if( osKey.size() > nMaxKeyLength ) { nMaxKeyLength = osKey.size(); } } } } for( const CPLJSONObject& oChild : aoChildren ) { const CPLString osKey = oChild.GetName(); if( EQUAL(osKey, "_type") || EQUAL(osKey, "_container_name") || EQUAL(osKey, "_filename") ) { continue; } if( STARTS_WITH(osKey, "_comment") ) { if( oChild.GetType() == CPLJSONObject::Type::String ) { VSIFPrintfL(fp, "#%s\n", oChild.ToString().c_str() ); } continue; } CPLString osPadding; size_t nLen = osKey.size(); if( nLen < nMaxKeyLength ) { osPadding.append( nMaxKeyLength - nLen, ' ' ); } const auto eType = oChild.GetType(); if( eType == CPLJSONObject::Type::Object ) { CPLJSONObject oType = oChild.GetObj( "_type" ); CPLJSONObject oContainerName = oChild.GetObj( "_container_name" ); CPLString osContainerName = osKey; if( oContainerName.GetType() == CPLJSONObject::Type::String ) { osContainerName = oContainerName.ToString(); } if( oType.GetType() == CPLJSONObject::Type::String ) { const CPLString osType = oType.ToString(); if( EQUAL(osType, "Object") ) { if( nDepth == 0 && VSIFTellL(fp) != 0 ) VSIFPrintfL(fp, "\n"); VSIFPrintfL(fp, "%sObject = %s\n", osIndentation.c_str(), osContainerName.c_str()); SerializeAsPDL( fp, oChild, nDepth + 1 ); VSIFPrintfL(fp, "%sEnd_Object\n", osIndentation.c_str()); } else if( EQUAL(osType, "Group") ) { VSIFPrintfL(fp, "\n"); VSIFPrintfL(fp, "%sGroup = %s\n", osIndentation.c_str(), osContainerName.c_str()); SerializeAsPDL( fp, oChild, nDepth + 1 ); VSIFPrintfL(fp, "%sEnd_Group\n", osIndentation.c_str()); } } else { CPLJSONObject oValue = oChild.GetObj( "value" ); CPLJSONObject oUnit = oChild.GetObj( "unit" ); if( oValue.IsValid() && oUnit.GetType() == CPLJSONObject::Type::String ) { const CPLString osUnit = oUnit.ToString(); const auto eValueType = oValue.GetType(); if( eValueType == CPLJSONObject::Type::Integer ) { VSIFPrintfL(fp, "%s%s%s = %d <%s>\n", osIndentation.c_str(), osKey.c_str(), osPadding.c_str(), oValue.ToInteger(), osUnit.c_str()); } else if( eValueType == CPLJSONObject::Type::Double ) { const double dfVal = oValue.ToDouble(); if( dfVal >= INT_MIN && dfVal <= INT_MAX && static_cast<int>(dfVal) == dfVal ) { VSIFPrintfL(fp, "%s%s%s = %d.0 <%s>\n", osIndentation.c_str(), osKey.c_str(), osPadding.c_str(), static_cast<int>(dfVal), osUnit.c_str()); } else { VSIFPrintfL(fp, "%s%s%s = %.18g <%s>\n", osIndentation.c_str(), osKey.c_str(), osPadding.c_str(), dfVal, osUnit.c_str()); } } } } } else if( eType == CPLJSONObject::Type::String ) { CPLString osVal = oChild.ToString(); const char* pszVal = osVal.c_str(); if( pszVal[0] == '\0' || strchr(pszVal, ' ') || strstr(pszVal, "\\n") || strstr(pszVal, "\\r") ) { osVal.replaceAll("\\n", "\n"); osVal.replaceAll("\\r", "\r"); VSIFPrintfL(fp, "%s%s%s = \"%s\"\n", osIndentation.c_str(), osKey.c_str(), osPadding.c_str(), osVal.c_str()); } else { if( osIndentation.size() + osKey.size() + osPadding.size() + strlen(" = ") + strlen(pszVal) > WIDTH && osIndentation.size() + osKey.size() + osPadding.size() + strlen(" = ") < WIDTH ) { size_t nFirstPos = osIndentation.size() + osKey.size() + osPadding.size() + strlen(" = "); VSIFPrintfL(fp, "%s%s%s = ", osIndentation.c_str(), osKey.c_str(), osPadding.c_str()); size_t nCurPos = nFirstPos; for( int j = 0; pszVal[j] != '\0'; j++ ) { nCurPos ++; if( nCurPos == WIDTH && pszVal[j+1] != '\0' ) { VSIFPrintfL( fp, "-\n" ); for( size_t k=0;k<nFirstPos;k++ ) { const char chSpace = ' '; VSIFWriteL(&chSpace, 1, 1, fp); } nCurPos = nFirstPos + 1; } VSIFWriteL( &pszVal[j], 1, 1, fp ); } VSIFPrintfL(fp, "\n"); } else { VSIFPrintfL(fp, "%s%s%s = %s\n", osIndentation.c_str(), osKey.c_str(), osPadding.c_str(), pszVal); } } } else if( eType == CPLJSONObject::Type::Integer ) { const int nVal = oChild.ToInteger(); VSIFPrintfL(fp, "%s%s%s = %d\n", osIndentation.c_str(), osKey.c_str(), osPadding.c_str(), nVal); } else if( eType == CPLJSONObject::Type::Double ) { const double dfVal = oChild.ToDouble(); if( dfVal >= INT_MIN && dfVal <= INT_MAX && static_cast<int>(dfVal) == dfVal ) { VSIFPrintfL(fp, "%s%s%s = %d.0\n", osIndentation.c_str(), osKey.c_str(), osPadding.c_str(), static_cast<int>(dfVal)); } else { VSIFPrintfL(fp, "%s%s%s = %.18g\n", osIndentation.c_str(), osKey.c_str(), osPadding.c_str(), dfVal); } } else if( eType == CPLJSONObject::Type::Array ) { CPLJSONArray oArrayItem(oChild); const int nLength = oArrayItem.Size(); size_t nFirstPos = osIndentation.size() + osKey.size() + osPadding.size() + strlen(" = ("); VSIFPrintfL(fp, "%s%s%s = (", osIndentation.c_str(), osKey.c_str(), osPadding.c_str()); size_t nCurPos = nFirstPos; for( int idx = 0; idx < nLength; idx++ ) { CPLJSONObject oItem = oArrayItem[idx]; const auto eArrayItemType = oItem.GetType(); if( eArrayItemType == CPLJSONObject::Type::String ) { CPLString osVal = oItem.ToString(); const char* pszVal = osVal.c_str(); if( pszVal[0] == '\0' || strchr(pszVal, ' ') || strstr(pszVal, "\\n") || strstr(pszVal, "\\r") ) { osVal.replaceAll("\\n", "\n"); osVal.replaceAll("\\r", "\r"); VSIFPrintfL(fp, "\"%s\"", osVal.c_str()); } else if( nFirstPos < WIDTH && nCurPos + strlen(pszVal) > WIDTH ) { if( idx > 0 ) { VSIFPrintfL( fp, "\n" ); for( size_t j=0;j<nFirstPos;j++ ) { const char chSpace = ' '; VSIFWriteL(&chSpace, 1, 1, fp); } nCurPos = nFirstPos; } for( int j = 0; pszVal[j] != '\0'; j++ ) { nCurPos ++; if( nCurPos == WIDTH && pszVal[j+1] != '\0' ) { VSIFPrintfL( fp, "-\n" ); for( size_t k=0;k<nFirstPos;k++ ) { const char chSpace = ' '; VSIFWriteL(&chSpace, 1, 1, fp); } nCurPos = nFirstPos + 1; } VSIFWriteL( &pszVal[j], 1, 1, fp ); } } else { VSIFPrintfL( fp, "%s", pszVal ); nCurPos += strlen(pszVal); } } else if( eArrayItemType == CPLJSONObject::Type::Integer ) { const int nVal = oItem.ToInteger(); const char* pszVal = CPLSPrintf("%d", nVal); const size_t nValLen = strlen(pszVal); if( nFirstPos < WIDTH && idx > 0 && nCurPos + nValLen > WIDTH ) { VSIFPrintfL( fp, "\n" ); for( size_t j=0;j<nFirstPos;j++ ) { const char chSpace = ' '; VSIFWriteL(&chSpace, 1, 1, fp); } nCurPos = nFirstPos; } VSIFPrintfL( fp, "%d", nVal ); nCurPos += nValLen; } else if( eArrayItemType == CPLJSONObject::Type::Double ) { const double dfVal = oItem.ToDouble(); CPLString osVal; if( dfVal >= INT_MIN && dfVal <= INT_MAX && static_cast<int>(dfVal) == dfVal ) { osVal = CPLSPrintf("%d.0", static_cast<int>(dfVal)); } else { osVal = CPLSPrintf("%.18g", dfVal); } const size_t nValLen = osVal.size(); if( nFirstPos < WIDTH && idx > 0 && nCurPos + nValLen > WIDTH ) { VSIFPrintfL( fp, "\n" ); for( size_t j=0;j<nFirstPos;j++ ) { const char chSpace = ' '; VSIFWriteL(&chSpace, 1, 1, fp); } nCurPos = nFirstPos; } VSIFPrintfL( fp, "%s", osVal.c_str() ); nCurPos += nValLen; } if( idx < nLength - 1 ) { VSIFPrintfL( fp, ", " ); nCurPos += 2; } } VSIFPrintfL(fp, ")\n" ); } } } /************************************************************************/ /* Create() */ /************************************************************************/ GDALDataset *ISIS3Dataset::Create(const char* pszFilename, int nXSize, int nYSize, int nBands, GDALDataType eType, char** papszOptions) { if( eType != GDT_Byte && eType != GDT_UInt16 && eType != GDT_Int16 && eType != GDT_Float32 ) { CPLError(CE_Failure, CPLE_NotSupported, "Unsupported data type"); return nullptr; } if( nBands == 0 || nBands > 32767 ) { CPLError(CE_Failure, CPLE_NotSupported, "Unsupported band count"); return nullptr; } const char* pszDataLocation = CSLFetchNameValueDef(papszOptions, "DATA_LOCATION", "LABEL"); const bool bIsTiled = CPLFetchBool(papszOptions, "TILED", false); const int nBlockXSize = std::max(1, atoi(CSLFetchNameValueDef(papszOptions, "BLOCKXSIZE", "256"))); const int nBlockYSize = std::max(1, atoi(CSLFetchNameValueDef(papszOptions, "BLOCKYSIZE", "256"))); if( !EQUAL(pszDataLocation, "LABEL") && !EQUAL( CPLGetExtension(pszFilename), "LBL") ) { CPLError(CE_Failure, CPLE_NotSupported, "For DATA_LOCATION=%s, " "the main filename should have a .lbl extension", pszDataLocation); return nullptr; } VSILFILE* fp = VSIFOpenExL(pszFilename, "wb", true); if( fp == nullptr ) { CPLError( CE_Failure, CPLE_FileIO, "Cannot create %s: %s", pszFilename, VSIGetLastErrorMsg() ); return nullptr; } VSILFILE* fpImage = nullptr; CPLString osExternalFilename; GDALDataset* poExternalDS = nullptr; bool bGeoTIFFAsRegularExternal = false; if( EQUAL(pszDataLocation, "EXTERNAL") ) { osExternalFilename = CSLFetchNameValueDef(papszOptions, "EXTERNAL_FILENAME", CPLResetExtension(pszFilename, "cub")); fpImage = VSIFOpenExL(osExternalFilename, "wb", true); if( fpImage == nullptr ) { CPLError( CE_Failure, CPLE_FileIO, "Cannot create %s: %s", osExternalFilename.c_str(), VSIGetLastErrorMsg() ); VSIFCloseL(fp); return nullptr; } } else if( EQUAL(pszDataLocation, "GEOTIFF") ) { osExternalFilename = CSLFetchNameValueDef(papszOptions, "EXTERNAL_FILENAME", CPLResetExtension(pszFilename, "tif")); GDALDriver* poDrv = static_cast<GDALDriver*>( GDALGetDriverByName("GTiff")); if( poDrv == nullptr ) { CPLError( CE_Failure, CPLE_AppDefined, "Cannot find GTiff driver" ); VSIFCloseL(fp); return nullptr; } char** papszGTiffOptions = nullptr; papszGTiffOptions = CSLSetNameValue(papszGTiffOptions, "ENDIANNESS", "LITTLE"); if( bIsTiled ) { papszGTiffOptions = CSLSetNameValue(papszGTiffOptions, "TILED", "YES"); papszGTiffOptions = CSLSetNameValue(papszGTiffOptions, "BLOCKXSIZE", CPLSPrintf("%d", nBlockXSize)); papszGTiffOptions = CSLSetNameValue(papszGTiffOptions, "BLOCKYSIZE", CPLSPrintf("%d", nBlockYSize)); } const char* pszGTiffOptions = CSLFetchNameValueDef(papszOptions, "GEOTIFF_OPTIONS", ""); char** papszTokens = CSLTokenizeString2( pszGTiffOptions, ",", 0 ); for( int i = 0; papszTokens[i] != nullptr; i++ ) { papszGTiffOptions = CSLAddString(papszGTiffOptions, papszTokens[i]); } CSLDestroy(papszTokens); // If the user didn't specify any compression and // GEOTIFF_AS_REGULAR_EXTERNAL is set (or unspecified), then the // GeoTIFF file can be seen as a regular external raw file, provided // we make some provision on its organization. if( CSLFetchNameValue(papszGTiffOptions, "COMPRESS") == nullptr && CPLFetchBool(papszOptions, "GEOTIFF_AS_REGULAR_EXTERNAL", true) ) { bGeoTIFFAsRegularExternal = true; papszGTiffOptions = CSLSetNameValue(papszGTiffOptions, "INTERLEAVE", "BAND"); // Will make sure that our blocks at nodata are not optimized // away but indeed well written papszGTiffOptions = CSLSetNameValue(papszGTiffOptions, "@WRITE_EMPTY_TILES_SYNCHRONOUSLY", "YES"); if( !bIsTiled && nBands > 1 ) { papszGTiffOptions = CSLSetNameValue(papszGTiffOptions, "BLOCKYSIZE", "1"); } } poExternalDS = poDrv->Create( osExternalFilename, nXSize, nYSize, nBands, eType, papszGTiffOptions ); CSLDestroy(papszGTiffOptions); if( poExternalDS == nullptr ) { CPLError( CE_Failure, CPLE_FileIO, "Cannot create %s", osExternalFilename.c_str() ); VSIFCloseL(fp); return nullptr; } } ISIS3Dataset* poDS = new ISIS3Dataset(); poDS->SetDescription( pszFilename ); poDS->eAccess = GA_Update; poDS->nRasterXSize = nXSize; poDS->nRasterYSize = nYSize; poDS->m_osExternalFilename = osExternalFilename; poDS->m_poExternalDS = poExternalDS; poDS->m_bGeoTIFFAsRegularExternal = bGeoTIFFAsRegularExternal; if( bGeoTIFFAsRegularExternal ) poDS->m_bGeoTIFFInitDone = false; poDS->m_fpLabel = fp; poDS->m_fpImage = fpImage ? fpImage: fp; poDS->m_bIsLabelWritten = false; poDS->m_bIsTiled = bIsTiled; poDS->m_bInitToNodata = (poDS->m_poExternalDS == nullptr); poDS->m_osComment = CSLFetchNameValueDef(papszOptions, "COMMENT", ""); poDS->m_osLatitudeType = CSLFetchNameValueDef(papszOptions, "LATITUDE_TYPE", ""); poDS->m_osLongitudeDirection = CSLFetchNameValueDef(papszOptions, "LONGITUDE_DIRECTION", ""); poDS->m_osTargetName = CSLFetchNameValueDef(papszOptions, "TARGET_NAME", ""); poDS->m_bForce360 = CPLFetchBool(papszOptions, "FORCE_360", false); poDS->m_bWriteBoundingDegrees = CPLFetchBool(papszOptions, "WRITE_BOUNDING_DEGREES", true); poDS->m_osBoundingDegrees = CSLFetchNameValueDef(papszOptions, "BOUNDING_DEGREES", ""); poDS->m_bUseSrcLabel = CPLFetchBool(papszOptions, "USE_SRC_LABEL", true); poDS->m_bUseSrcMapping = CPLFetchBool(papszOptions, "USE_SRC_MAPPING", false); poDS->m_bUseSrcHistory = CPLFetchBool(papszOptions, "USE_SRC_HISTORY", true); poDS->m_bAddGDALHistory = CPLFetchBool(papszOptions, "ADD_GDAL_HISTORY", true); if( poDS->m_bAddGDALHistory ) { poDS->m_osGDALHistory = CSLFetchNameValueDef(papszOptions, "GDAL_HISTORY", ""); } const double dfNoData = (eType == GDT_Byte) ? NULL1: (eType == GDT_UInt16) ? NULLU2: (eType == GDT_Int16) ? NULL2: /*(eType == GDT_Float32) ?*/ NULL4; for( int i = 0; i < nBands; i++ ) { GDALRasterBand *poBand = nullptr; if( poDS->m_poExternalDS != nullptr ) { ISIS3WrapperRasterBand* poISISBand = new ISIS3WrapperRasterBand( poDS->m_poExternalDS->GetRasterBand( i+1 ) ); poBand = poISISBand; } else if( bIsTiled ) { ISISTiledBand* poISISBand = new ISISTiledBand( poDS, poDS->m_fpImage, i+1, eType, nBlockXSize, nBlockYSize, 0, //nSkipBytes, to be hacked // afterwards for in-label imagery 0, 0, CPL_IS_LSB ); poBand = poISISBand; } else { const int nPixelOffset = GDALGetDataTypeSizeBytes(eType); const int nLineOffset = nPixelOffset * nXSize; const vsi_l_offset nBandOffset = static_cast<vsi_l_offset>(nLineOffset) * nYSize; ISIS3RawRasterBand* poISISBand = new ISIS3RawRasterBand( poDS, i+1, poDS->m_fpImage, nBandOffset * i, // nImgOffset, to be //hacked afterwards for in-label imagery nPixelOffset, nLineOffset, eType, CPL_IS_LSB ); poBand = poISISBand; } poDS->SetBand( i+1, poBand ); poBand->SetNoDataValue(dfNoData); } return poDS; } /************************************************************************/ /* GetUnderlyingDataset() */ /************************************************************************/ static GDALDataset* GetUnderlyingDataset( GDALDataset* poSrcDS ) { if( poSrcDS->GetDriver() != nullptr && poSrcDS->GetDriver() == GDALGetDriverByName("VRT") ) {<|fim▁hole|> VRTDataset* poVRTDS = reinterpret_cast<VRTDataset* >(poSrcDS); poSrcDS = poVRTDS->GetSingleSimpleSource(); } return poSrcDS; } /************************************************************************/ /* CreateCopy() */ /************************************************************************/ GDALDataset* ISIS3Dataset::CreateCopy( const char *pszFilename, GDALDataset *poSrcDS, int /*bStrict*/, char ** papszOptions, GDALProgressFunc pfnProgress, void * pProgressData ) { const char* pszDataLocation = CSLFetchNameValueDef(papszOptions, "DATA_LOCATION", "LABEL"); GDALDataset* poSrcUnderlyingDS = GetUnderlyingDataset(poSrcDS); if( poSrcUnderlyingDS == nullptr ) poSrcUnderlyingDS = poSrcDS; if( EQUAL(pszDataLocation, "GEOTIFF") && strcmp(poSrcUnderlyingDS->GetDescription(), CSLFetchNameValueDef(papszOptions, "EXTERNAL_FILENAME", CPLResetExtension(pszFilename, "tif")) ) == 0 ) { CPLError(CE_Failure, CPLE_NotSupported, "Output file has same name as input file"); return nullptr; } if( poSrcDS->GetRasterCount() == 0 ) { CPLError(CE_Failure, CPLE_NotSupported, "Unsupported band count"); return nullptr; } const int nXSize = poSrcDS->GetRasterXSize(); const int nYSize = poSrcDS->GetRasterYSize(); const int nBands = poSrcDS->GetRasterCount(); GDALDataType eType = poSrcDS->GetRasterBand(1)->GetRasterDataType(); ISIS3Dataset *poDS = reinterpret_cast<ISIS3Dataset*>( Create( pszFilename, nXSize, nYSize, nBands, eType, papszOptions )); if( poDS == nullptr ) return nullptr; poDS->m_osFromFilename = poSrcUnderlyingDS->GetDescription(); double adfGeoTransform[6] = { 0.0 }; if( poSrcDS->GetGeoTransform( adfGeoTransform ) == CE_None && (adfGeoTransform[0] != 0.0 || adfGeoTransform[1] != 1.0 || adfGeoTransform[2] != 0.0 || adfGeoTransform[3] != 0.0 || adfGeoTransform[4] != 0.0 || adfGeoTransform[5] != 1.0) ) { poDS->SetGeoTransform( adfGeoTransform ); } auto poSrcSRS = poSrcDS->GetSpatialRef(); if( poSrcSRS ) { poDS->SetSpatialRef( poSrcSRS ); } for(int i=1;i<=nBands;i++) { const double dfOffset = poSrcDS->GetRasterBand(i)->GetOffset(); if( dfOffset != 0.0 ) poDS->GetRasterBand(i)->SetOffset(dfOffset); const double dfScale = poSrcDS->GetRasterBand(i)->GetScale(); if( dfScale != 1.0 ) poDS->GetRasterBand(i)->SetScale(dfScale); } // Do we need to remap nodata ? int bHasNoData = FALSE; poDS->m_dfSrcNoData = poSrcDS->GetRasterBand(1)->GetNoDataValue(&bHasNoData); poDS->m_bHasSrcNoData = CPL_TO_BOOL(bHasNoData); if( poDS->m_bUseSrcLabel ) { char** papszMD_ISIS3 = poSrcDS->GetMetadata("json:ISIS3"); if( papszMD_ISIS3 != nullptr ) { poDS->SetMetadata( papszMD_ISIS3, "json:ISIS3" ); } } // We don't need to initialize the imagery as we are going to copy it // completely poDS->m_bInitToNodata = false; CPLErr eErr = GDALDatasetCopyWholeRaster( poSrcDS, poDS, nullptr, pfnProgress, pProgressData ); poDS->FlushCache(); poDS->m_bHasSrcNoData = false; if( eErr != CE_None ) { delete poDS; return nullptr; } return poDS; } /************************************************************************/ /* GDALRegister_ISIS3() */ /************************************************************************/ void GDALRegister_ISIS3() { if( GDALGetDriverByName( "ISIS3" ) != nullptr ) return; GDALDriver *poDriver = new GDALDriver(); poDriver->SetDescription( "ISIS3" ); poDriver->SetMetadataItem( GDAL_DCAP_RASTER, "YES" ); poDriver->SetMetadataItem( GDAL_DMD_LONGNAME, "USGS Astrogeology ISIS cube (Version 3)" ); poDriver->SetMetadataItem( GDAL_DMD_HELPTOPIC, "drivers/raster/isis3.html" ); poDriver->SetMetadataItem( GDAL_DCAP_VIRTUALIO, "YES" ); poDriver->SetMetadataItem( GDAL_DMD_EXTENSIONS, "lbl cub" ); poDriver->SetMetadataItem( GDAL_DMD_CREATIONDATATYPES, "Byte UInt16 Int16 Float32" ); poDriver->SetMetadataItem( GDAL_DMD_OPENOPTIONLIST, "<OpenOptionList/>"); poDriver->SetMetadataItem( GDAL_DMD_CREATIONOPTIONLIST, "<CreationOptionList>" " <Option name='DATA_LOCATION' type='string-select' " "description='Location of pixel data' default='LABEL'>" " <Value>LABEL</Value>" " <Value>EXTERNAL</Value>" " <Value>GEOTIFF</Value>" " </Option>" " <Option name='GEOTIFF_AS_REGULAR_EXTERNAL' type='boolean' " "description='Whether the GeoTIFF file, if uncompressed, should be " "registered as a regular raw file' default='YES'/>" " <Option name='GEOTIFF_OPTIONS' type='string' " "description='Comma separated list of KEY=VALUE tuples to forward " "to the GeoTIFF driver'/>" " <Option name='EXTERNAL_FILENAME' type='string' " "description='Override default external filename. " "Only for DATA_LOCATION=EXTERNAL or GEOTIFF'/>" " <Option name='TILED' type='boolean' " "description='Whether the pixel data should be tiled' default='NO'/>" " <Option name='BLOCKXSIZE' type='int' " "description='Tile width' default='256'/>" " <Option name='BLOCKYSIZE' type='int' " "description='Tile height' default='256'/>" " <Option name='COMMENT' type='string' " "description='Comment to add into the label'/>" " <Option name='LATITUDE_TYPE' type='string-select' " "description='Value of Mapping.LatitudeType' default='Planetocentric'>" " <Value>Planetocentric</Value>" " <Value>Planetographic</Value>" " </Option>" " <Option name='LONGITUDE_DIRECTION' type='string-select' " "description='Value of Mapping.LongitudeDirection' " "default='PositiveEast'>" " <Value>PositiveEast</Value>" " <Value>PositiveWest</Value>" " </Option>" " <Option name='TARGET_NAME' type='string' description='Value of " "Mapping.TargetName'/>" " <Option name='FORCE_360' type='boolean' " "description='Whether to force longitudes in [0,360] range' default='NO'/>" " <Option name='WRITE_BOUNDING_DEGREES' type='boolean' " "description='Whether to write Min/MaximumLong/Latitude values' " "default='YES'/>" " <Option name='BOUNDING_DEGREES' type='string' " "description='Manually set bounding box with the syntax " "min_long,min_lat,max_long,max_lat'/>" " <Option name='USE_SRC_LABEL' type='boolean' " "description='Whether to use source label in ISIS3 to ISIS3 conversions' " "default='YES'/>" " <Option name='USE_SRC_MAPPING' type='boolean' " "description='Whether to use Mapping group from source label in " "ISIS3 to ISIS3 conversions' " "default='NO'/>" " <Option name='USE_SRC_HISTORY' type='boolean' " "description='Whether to use content pointed by the History object in " "ISIS3 to ISIS3 conversions' " "default='YES'/>" " <Option name='ADD_GDAL_HISTORY' type='boolean' " "description='Whether to add GDAL specific history in the content pointed " "by the History object in " "ISIS3 to ISIS3 conversions' " "default='YES'/>" " <Option name='GDAL_HISTORY' type='string' " "description='Manually defined GDAL history. Must be formatted as ISIS3 " "PDL. If not specified, it is automatically composed.'/>" "</CreationOptionList>" ); poDriver->pfnOpen = ISIS3Dataset::Open; poDriver->pfnIdentify = ISIS3Dataset::Identify; poDriver->pfnCreate = ISIS3Dataset::Create; poDriver->pfnCreateCopy = ISIS3Dataset::CreateCopy; GetGDALDriverManager()->RegisterDriver( poDriver ); }<|fim▁end|>
<|file_name|>calc_distance_hint.py<|end_file_name|><|fim▁begin|>def output_gpx(points, output_filename): """ Output a GPX file with latitude and longitude from the points DataFrame. """ from xml.dom.minidom import getDOMImplementation def append_trkpt(pt, trkseg, doc): trkpt = doc.createElement('trkpt') trkpt.setAttribute('lat', '%.8f' % (pt['lat'])) trkpt.setAttribute('lon', '%.8f' % (pt['lon'])) trkseg.appendChild(trkpt) doc = getDOMImplementation().createDocument(None, 'gpx', None) trk = doc.createElement('trk') doc.documentElement.appendChild(trk) trkseg = doc.createElement('trkseg') trk.appendChild(trkseg) points.apply(append_trkpt, axis=1, trkseg=trkseg, doc=doc) with open(output_filename, 'w') as fh: doc.writexml(fh, indent=' ') <|fim▁hole|> print('Unfiltered distance: %0.2f' % (distance(points),)) smoothed_points = smooth(points) print('Filtered distance: %0.2f' % (distance(smoothed_points),)) output_gpx(smoothed_points, 'out.gpx') if __name__ == '__main__': main()<|fim▁end|>
def main(): points = get_data(sys.argv[1])
<|file_name|>vec.py<|end_file_name|><|fim▁begin|>def getitem(v,d): "Returns the value of entry d in v" assert d in v.D return v.f[d] if d in v.f else 0 def setitem(v,d,val): "Set the element of v with label d to be val" assert d in v.D v.f[d] = val def equal(u,v): "Returns true iff u is equal to v" assert u.D == v.D union = set(u.f) | set (v.f) for k in union: uval = u.f[k] if k in u.f else 0 vval = v.f[k] if k in v.f else 0 if uval != vval: return False return True def add(u,v): "Returns the sum of the two vectors" assert u.D == v.D ukeys = set(u.f) vkeys = set (v.f) both = ukeys & vkeys uonly = ukeys - both vonly = vkeys - both f = {} for k in both: f[k] = u.f[k] + v.f[k] for k in uonly: f[k] = u.f[k] for k in vonly: f[k] = v.f[k] return Vec (u.D | v.D, f) def dot(u,v): "Returns the dot product of the two vectors" assert u.D == v.D ukeys = set(u.f) vkeys = set (v.f) both = ukeys & vkeys return sum([u.f[k] * v.f[k] for k in both]) def scalar_mul(v, alpha): "Returns the scalar-vector product alpha times v" f = {k: alpha * v.f[k] for k in v.f} return (Vec(v.D, f)) def neg(v): "Returns the negation of a vector" return scalar_mul (v, -1) def toStr(v): "pretty-printing" try: D_list = sorted(v.D) except TypeError: D_list = sorted(v.D, key=hash) numdec = 3 wd = dict([(k,(1+max(len(str(k)), len('{0:.{1}G}'.format(v[k], numdec))))) if isinstance(v[k], int) or isinstance(v[k], float) else (k,(1+max(len(str(k)), len(str(v[k]))))) for k in D_list]) # w = 1+max([len(str(k)) for k in D_list]+[len('{0:.{1}G}'.format(value,numdec)) for value in v.f.values()]) s1 = ''.join(['{0:>{1}}'.format(k,wd[k]) for k in D_list]) s2 = ''.join(['{0:>{1}.{2}G}'.format(v[k],wd[k],numdec) if isinstance(v[k], int) or isinstance(v[k], float) else '{0:>{1}}'.format(v[k], wd[k]) for k in D_list]) return "\n" + s1 + "\n" + '-'*sum(wd.values()) +"\n" + s2 ##### NO NEED TO MODIFY BELOW HERE ##### class Vec:<|fim▁hole|> A vector has two fields: D - the domain (a set) f - a dictionary mapping (some) domain elements to field elements elements of D not appearing in f are implicitly mapped to zero """ def __init__(self, labels, function): self.D = labels self.f = function __getitem__ = getitem __setitem__ = setitem __neg__ = neg __rmul__ = scalar_mul #if left arg of * is primitive, assume it's a scalar def __mul__(self,other): #If other is a vector, returns the dot product of self and other if isinstance(other, Vec): return dot(self,other) else: return NotImplemented # Will cause other.__rmul__(self) to be invoked def __truediv__(self,other): # Scalar division return (1/other)*self __add__ = add def __radd__(self, other): "Hack to allow sum(...) to work with vectors" if other == 0: return self # def __sub__(self, a,b): # "Returns a vector which is the difference of a and b." # return a+(-b) def __sub__(self, other): "Returns a vector which is the difference of a and b." return self+(-other) __eq__ = equal __str__ = toStr def __repr__(self): return "Vec(" + str(self.D) + "," + str(self.f) + ")" def copy(self): "Don't make a new copy of the domain D" return Vec(self.D, self.f.copy())<|fim▁end|>
"""
<|file_name|>selection.js<|end_file_name|><|fim▁begin|>/** * @file * Provides functions that enable the selection of elements on a webpage, which can then be manipulated. * * HTML elements on the webpage that have a class containing '.selectable' */ // JavaScript should be made compatible with libraries other than jQuery by // wrapping it with an "anonymous closure". See: // - https://drupal.org/node/1446420 // - http://www.adequatelygood.com/2010/3/JavaScript-Module-Pattern-In-Depth (function ($, Drupal, window, document, undefined) { /** * CONTENTS - variable declarations - Drupal.behaviors.selection.attach() Drupal.edit_selected .find_selectables() .get_selecteds_indexes() .are_selecteds() .set_selecteds_indexes() .selecteds_indexes_push() .selecteds_indexes_remove() .add_selection_listeners() .get_selected_selectables() // Simplest get function. .get_selected_nids() .get_selectable() .get_index() .deselect_all() disable_image_drag() set_up_page() register_selection() register_focus() toggle_selected_element() adjust_buttons() set_up_keypress_mgmt() handle_keypress_see_shortcuts() handle_keypress_select() handle_keypress_prev() get_focussed_prev() handle_keypress_next() get_focussed_next() handle_keypress_group() */ /* * @todo Change primary storage array `selecteds_indexes` * to store the jQ elements rather than their 'indexes' (unreliable!). */ var page_is_setup = false; var selectables; // View row numbers of the // The selecteds array temporarily stores the 'pictures' that are currently selected // The indexes refer to the 'result numbers' of the elements within the view content (for easy JQuery retrieval) var selecteds_indexes = []; // View row numbers of the var selecteds = []; // View row numbers of the var mouseIsDown = false; // Tracks status of mouse button var shift_key_down = false; // To understand behaviors, see https://drupal.org/node/756722#behaviors Drupal.behaviors.selection = { attach: function(context, settings) { // console.log('context: ', context); // console.log('context: ', $(context)); // console.log('selectables: ', selectables); if (! page_is_setup) { // disable_image_drag(); // Not needed anymore. Drupal.selection.find_selectables(); register_focus( selectables.eq(0) ); set_up_keypress_mgmt(context); set_up_page(context); $(document).mousedown(function(event) { // event.preventDefault(); mouseIsDown = true; // When mouse goes down, set mouseIsDown to true // return false; }) .mouseup(function() { mouseIsDown = false; // When mouse goes up, set mouseIsDown to false }); selectables.each(function() { Drupal.selection.add_selection_listeners($(this)); }); $( '#deselect_all', context ).click( function() { Drupal.selection.deselect_all(); }); page_is_setup = true; } }, weight: 3 }; Drupal.selection = { find_selectables: function(context) { if (typeof context === 'undefined') { context = document; } selectables = $('.selectable', context); }, get_selecteds_indexes: function() { return selecteds_indexes; }, /** * Determines if there are any selectables currently selected. */ are_selecteds: function() { // var num_selected = Drupal.selection.get_selecteds_indexes().length; var are_selecteds = selectables.filter('.selected').length > 0 ? true : false; // console.log('are_selecteds: ', are_selecteds); return are_selecteds; }, set_selecteds_indexes: function( array ) { selecteds_indexes = array; }, selecteds_indexes_push: function( index ) { selecteds_indexes.push(index); }, selecteds_indexes_remove: function( index ) { selecteds_indexes.splice( $.inArray( index, selecteds_indexes), 1 ); }, /** * The first element could after the last element (if select was made in reverse). */ select_multiple: function( first_element, last_element ) { // console.log('first_element: ', first_element); // console.log('last_element: ', last_element); var settings = { cleanup: false, push_index: false }; var select_on = false; selectables.each(function(key, element) { var selectable = $(element); if (selectable.is(first_element) || selectable.is(last_element)) { if (! select_on) { select_on = true; } else { register_selection( selectable, settings ); // For the last one. select_on = false; } } if (select_on && (! selectable.hasClass('selected'))) { register_selection( selectable, settings ); } }); selectables.each(function() { selecteds_indexes.push($(this).index()); }); // console.log('selecteds_indexes: ', selecteds_indexes); adjust_buttons(); adjust_toolbar(); $(document).trigger( "selection:select_all" ); }, add_selection_listeners: function(selectable) { selectable.find('.selectable-target').mousedown( function(event) { event.preventDefault(); // Prevents browser's "text selection" var settings = { cleanup: true }; var selectable = $(this).parents('.selectable'); if (shift_key_down) { Drupal.selection.select_multiple( selectables.filter('.focussed'), selectable ); } else { register_selection( selectable, settings ); } register_focus( selectable ); }); selectable.find('.selectable-target').mouseenter( function(event) { var settings = { cleanup: true }; var selectable = $(this).parents('.selectable'); if (mouseIsDown) { register_selection( selectable, settings ); } }); }, /** * @return a list of Jquery elements */ get_selected_selectables: function(id) { // var selecteds_indexes = Drupal.selection.get_selecteds_indexes(); // console.log('selecteds_indexes received: ', selecteds_indexes); // selecteds = []; // $.each( selecteds_indexes, function( key, index ) { // // selecteds.push(selectables.filter('.views-row-' + index )); // selecteds.push(selectables.filter('.selected')); // // console.log('elements: ', element.length); // }); // return selecteds; return selectables.filter('.selected'); }, /** * Gets the array of selectable indexes, and creates an array of the nids from it. */ get_selected_nids: function() { // var selecteds_indexes = Drupal.selection.get_selecteds_indexes(); // console.log('selecteds_indexes received: ', selecteds_indexes); selecteds_nids = []; selectables.filter('.selected').each(function( key, index ) { // var selectable = selectables.filter('.views-row-' + (index - 1) ); // console.log('selectable: ', $(this)); var nid = $(this).find( ".property-nid" ).html().trim(); // console.log('nid: ', nid); selecteds_nids.push( nid ); }); // console.log('selecteds_nids: ', selecteds_nids); return selecteds_nids; }, /** * @return a Jquery element, regardless of whether it's in a group */ get_selectable: function(id) { return selectables.filter('.views-row-' + id ); }, /** * Determines the position that a result is in within view results. */ get_index: function(target) { var index; var classes = target.attr( "class" ).split( " " ); // console.log('classes: ', classes); if(classes) { // Determine the picture's result number (within view) $.each( classes, function( key, value ) { // We use "views-row-#" because it's reliable, and doesn't change (as long as the layout is 'list') if ( value.substr( 0, 10 ) == "views-row-" ) { // console.log('substr: ', value.substr( 10, value.length)); index = parseInt( value.substr( 10, value.length ), 10 ); } else return; }); } return index; }, /** * Deselects all selected elements. */ deselect_all: function() { // console.log('call: Drupal.selection.deselect_all()'); selectables.filter('.selected').removeClass( "selected" ); adjust_buttons(); adjust_toolbar(); $(window).trigger( "selection:deselect_all" ); } } function disable_image_drag() { // Disable firefox's 'image drag' feature, which messes with our ‘drag-select’ $(document).on("dragstart", function(event) { event.preventDefault(); // console.log('On: dragstart'); return false; }); } function set_up_page(context) { var toolbar = $('[role="toolbar"][aria-label="primary toolbar"]', context); $( '.page__title', context ) .after('<div class="action_buttons"></div>'); toolbar .append("<button id='deselect_all' type='button' disabled>Un-select all</button>"); toolbar.find('#deselect_all').prepend("<span class='icon-select_all'></span> "); } /** * Adds or removes the result number from the selected array. * * @param target * JQuery object that was selected or unselected. * * @param checked * Boolean indicating if the target's checked attribute should be set to true. */ function register_selection(target, settings) { // console.log('Call: register_selection'); var checked = toggle_selected_element( target ); // console.log('checked: ', checked); var index = Drupal.selection.get_index(target); // console.log('index: ', index); if (typeof settings.push_index == 'undefined') { settings.push_index = true; } if (checked && settings.push_index) { // Add the picture to the 'selecteds' arrays selecteds_indexes.push( index ); } else if (settings.push_index) { // Remove the picture from the 'selecteds' arrays selecteds_indexes.splice( $.inArray( index, selecteds_indexes), 1 ); } // console.log("selecteds_indexes: ", selecteds_indexes); settings = (typeof settings !== 'undefined') ? settings : { cleanup: false }; if (settings.cleanup) { // console.log('cleanup'); adjust_buttons(); adjust_toolbar(); target.trigger( "selection:select" ); } } /** * Adds or removes the result number from the selected array. */ function register_focus(target) { selectables.removeClass('focussed'); target.toggleClass('focussed'); } /** * Toggles the selected state of the selected element. * * @return boolean * Returns true if the selected element is now 'selected', or false otherwise. */ function toggle_selected_element(target) { target.toggleClass( "selected" ); var checked = target.hasClass( "selected" ); return checked; } /** * Enables and disables the buttons according to selections. */ function adjust_buttons() { // console.log('adjust_buttons()'); // if( selecteds_indexes.length ) { // $( '#deselect_all' ).attr('disabled', false); // } else { // $( '#deselect_all' ).attr('disabled', true); // } $( '#deselect_all' ).attr('disabled', ! Drupal.selection.are_selecteds()); } /** * Shows or hides the toolbar according to selections. */ function adjust_toolbar() { var toolbar = $('[role="toolbar"][aria-label*="primary"]'); toolbar.attr('aria-expanded', Drupal.selection.are_selecteds()); var selection_count = selectables.filter('.selected').length; toolbar.find('.selection-count .count').html(selection_count); } function set_up_keypress_mgmt(context) { Drupal.casabio.add_keypress_info( "<tr><td><strong>shift</strong> <small>+</small> <strong>/</strong> : </td><td>Show shortcuts</td></tr>" ); // Further keypress info will be customised by page and added by edit_selected.js. // Drupal.casabio.add_keypress_info(); // console.log('document, context: ', $(context).find(document)); $('body', context).bind('keydown', 'shift+/', handle_keypress_see_shortcuts); $('body', context).bind('keydown', 'x', handle_keypress_select); $('body', context).bind('keydown', 'c', handle_keypress_unselect_all); $('body', context).bind('keydown', 'left', handle_keypress_prev, context); $('body', context).bind('keydown', 'right', handle_keypress_next, context); $('body', context).bind('keydown', 'ctrl+a', handle_keypress_select_all, context); $('body', context).bind('keydown', 'shift', handle_keypress_shift_down); $('body', context).bind('keyup', 'shift', handle_keypress_shift_up); } function handle_keypress_see_shortcuts() { // console.log('handle_keypress_see_shortcuts'); <|fim▁hole|> var is_open = $( '#display-shortcuts[style*="display: block"]' ).length > 0; if (is_open) { $( '#display-shortcuts' ).dialog('close'); } else { $( '#display-shortcuts' ).dialog('open'); } } function handle_keypress_select() { var settings = { cleanup: true, push_index: false }; var focussed = selectables.filter('.focussed'); register_selection(focussed, settings); } function handle_keypress_unselect_all() { Drupal.selection.deselect_all(); } function handle_keypress_prev() { var selectable = selectables.filter('.focussed'); prev = get_focussed_prev(selectable); if (prev !== null) { register_focus(prev); } $(window).trigger( "selection:prev", prev ); } /** * Gets the previous selectable before an element (which can be a selectable or a group). */ function get_focussed_prev(element) { var prev; // If in a group and is the last in the group if ((element.parent('.group').length > 0) && (element.prev().length == 0)) { // console.log('is in group') prev = get_focussed_prev(element.parent()); } // If the previous li or ul is a group else if (element.prevAll('li, ul').eq(0).hasClass('group')) { // console.log('next is group') prev = element.prevAll('li, ul').eq(0).children('.selectable').eq(-1); } // If this is the first selectable else if (element.prevAll('.selectable').length == 0) { prev = null; } else { prev = element.prevAll('.selectable').eq(0); } return prev; } function handle_keypress_next(context) { // console.log('called: handle_keypress_next()'); var selectable = selectables.filter('.focussed'); next = get_focussed_next(selectable); if (next !== null) { register_focus(next); } $(window).trigger( "selection:next", next ); } function handle_keypress_select_all(context) { // Drupal.casa_utilities.start_timer('select_all'); Drupal.selection.select_multiple(selectables.first(), selectables.last()); // Drupal.casa_utilities.end_timer('select_all'); return false; } /** * Gets the next selectable following an element (which can be a selectable or a group). */ function get_focussed_next(element) { // console.log('element class: ', element.attr('class')); var next; // If in a group and is the last in the group if ((element.parent('.group').length > 0) && (element.nextAll('li, ul').eq(0).length == 0)) { // console.log('is in group') next = get_focussed_next(element.parent()); } // If the next li or ul is a group else if (element.nextAll('li, ul').eq(0).hasClass('group')) { // console.log('next is group') next = element.nextAll('li, ul').eq(0).children('.selectable').eq(0); } // If this is the last selectable else if (element.nextAll('.selectable').length == 0) { next = null; } else { next = element.nextAll('.selectable').eq(0); // console.log('next class: ', next.attr('class')); } return next; } function handle_keypress_group() { group(); } function handle_keypress_ungroup() { ungroup_all(); } function handle_keypress_shift_down() { shift_key_down = true; } function handle_keypress_shift_up() { shift_key_down = false; } })(jQuery, Drupal, this, this.document);<|fim▁end|>
<|file_name|>validationGroup.ts<|end_file_name|><|fim▁begin|>import { Component, Input, Optional, OnInit, AfterViewInit, OnChanges, SimpleChange } from '@angular/core'; import { FormGroup, FormControl } from '@angular/forms'; import { services } from 'typescript-angular-utilities'; import __validation = services.validation; import __array = services.array; import { FormComponent } from '../form/form'; import { ComponentValidator } from '../../services/componentValidator/componentValidator.service'; export interface IGroupChanges { [key: string]: SimpleChange; model: SimpleChange; } @Component({ selector: 'rlValidationGroup', template: require('./validationGroup.html'), providers: [ComponentValidator], }) export class ValidationGroupComponent implements OnInit, AfterViewInit, OnChanges { @Input() validator: __validation.IObservableValidationHandler;<|fim▁hole|> @Input() model: any; groupValidator: ComponentValidator; formGroup: FormGroup; validationControl: FormControl; arrayUtility: __array.IArrayUtility; constructor(@Optional() rlForm: FormComponent , componentValidator: ComponentValidator , arrayUtility: __array.ArrayUtility) { this.arrayUtility = arrayUtility; this.groupValidator = componentValidator; this.validationControl = new FormControl('', null, this.groupValidator.validate.bind(this.groupValidator)); this.formGroup = new FormGroup({ validation: this.validationControl }); if (rlForm) { rlForm.form.addControl('', this.formGroup) } } ngOnInit(): void { let validators: __validation.IObservableValidationHandler[] = []; if (this.validator) { validators = validators.concat(this.arrayUtility.arrayify(this.validator)); } if (this.validators) { validators = validators.concat(this.arrayUtility.arrayify(this.validators)); } this.groupValidator.initValidator(validators, this.validationControl.valueChanges, this.validationControl); } ngAfterViewInit(): void { this.validationControl.updateValueAndValidity(this.model || undefined); } ngOnChanges(changes: IGroupChanges): void { if (changes.model) { this.validationControl.updateValueAndValidity(changes.model.currentValue); } } checkValidity(): void { this.validationControl.updateValueAndValidity(this.model); } }<|fim▁end|>
@Input() validators: __validation.IObservableValidationHandler[];
<|file_name|>rust-indexer.rs<|end_file_name|><|fim▁begin|>#[macro_use] extern crate clap; extern crate env_logger; #[macro_use] extern crate log; extern crate rls_analysis; extern crate rls_data as data; extern crate tools; use crate::data::GlobalCrateId; use crate::data::{DefKind, ImplKind}; use rls_analysis::{AnalysisHost, AnalysisLoader, SearchDirectory}; use std::collections::{BTreeSet, HashMap}; use std::fs::{self, File}; use std::io; use std::io::{BufRead, BufReader, Read, Seek}; use std::path::{Path, PathBuf}; use tools::file_format::analysis::{ AnalysisKind, AnalysisSource, AnalysisTarget, LineRange, Location, SourceRange, WithLocation, }; /// A global definition id in a crate. /// /// FIXME(emilio): This key is kind of slow, because GlobalCrateId contains a /// String. There's a "disambiguator" field which may be more than enough for /// our purposes. #[derive(Clone, Hash, Debug, Eq, PartialEq)] pub struct DefId(GlobalCrateId, u32); /// A map from global definition ids to the actual definition. pub struct Defs { map: HashMap<DefId, data::Def>, } /// Local filesystem path mappings and metadata which exist for the following /// purposes: /// 1. Know where to output the analysis files. /// - There is only ever one analysis output directory. /// 2. Know how to locate rust source files in order to hackily extract strings /// that should have been in the save-analysis files. /// - After config scripts run and normalize things there are 2 source /// directories: revision controlled source (cross-platform) and the /// (per-platform) generated files directory. #[derive(Debug)] struct TreeInfo<'a> { /// Local filesystem path root for the analysis dir where rust-indexer.rs /// should write its output. out_analysis_dir: &'a Path, /// Local filesystem path root for the source tree. In the searchfox path /// space presented to users, this means all paths not prefixed with /// `__GENERATED__`. srcdir: &'a Path, /// Local filesystem path root for the per-platform generated source tree. /// In the searchfox path space presented to users, this means paths /// prefixed with `__GENERATED__`. generated: &'a Path, /// The searchfox path space prefix for generated. generated_friendly: &'a Path, } fn construct_qualname(scope: &str, name: &str) -> String { // Some of the names don't start with ::, for example: // __self_0_0$282 // <Loader>::new // Since we're gluing it to the "scope" (which might be a crate name) // we'll insert the :: to make it more readable let glue = if name.starts_with("::") { "" } else { "::" }; format!("{}{}{}", scope, glue, name) } fn sanitize_symbol(sym: &str) -> String { // Downstream processing of the symbol doesn't deal well with // these characters, so replace them with underscores sym.replace(",", "_").replace(" ", "_").replace("\n", "_") } // Given a definition, and the global crate id where that definition is found, // return a qualified name that identifies the definition unambiguously. fn crate_independent_qualname(def: &data::Def, crate_id: &data::GlobalCrateId) -> String { // For stuff with "no_mangle" functions or statics, or extern declarations, // we just use the name. // // TODO(emilio): Maybe there's a way to get the #[link_name] attribute from // here and make C++ agree with that? Though we don't use it so it may not // be worth the churn. fn use_unmangled_name(def: &data::Def) -> bool { match def.kind { DefKind::ForeignStatic | DefKind::ForeignFunction => true, DefKind::Static | DefKind::Function => { def.attributes.iter().any(|attr| attr.value == "no_mangle") } _ => false, } } if use_unmangled_name(def) { return def.name.clone(); } construct_qualname(&crate_id.name, &def.qualname) } impl Defs { fn new() -> Self { Self { map: HashMap::new(), } } fn insert(&mut self, analysis: &data::Analysis, def: &data::Def) { let crate_id = analysis.prelude.as_ref().unwrap().crate_id.clone(); let mut definition = def.clone(); definition.qualname = crate_independent_qualname(&def, &crate_id); let index = definition.id.index; let defid = DefId(crate_id, index); debug!("Indexing def: {:?} -> {:?}", defid, definition); let previous = self.map.insert(defid, definition); if let Some(previous) = previous { // This shouldn't happen, but as of right now it can happen with // some builtin definitions when highly generic types are involved. // This is probably a rust bug, just ignore it for now. debug!( "Found a definition with the same ID twice? {:?}, {:?}", previous, def, ); } } /// Getter for a given local id, which takes care of converting to a global /// ID and returning the definition if present. fn get(&self, analysis: &data::Analysis, id: data::Id) -> Option<data::Def> { let prelude = analysis.prelude.as_ref().unwrap(); let krate_id = if id.krate == 0 { prelude.crate_id.clone() } else { // TODO(emilio): This escales with the number of crates in this // particular crate, but it's probably not too bad, since it should // be a pretty fast linear search. let krate = prelude .external_crates .iter() .find(|krate| krate.num == id.krate); let krate = match krate { Some(k) => k, None => { debug!("Crate not found: {:?}", id); return None; } }; krate.id.clone() }; let id = DefId(krate_id, id.index); let result = self.map.get(&id).cloned(); if result.is_none() { debug!("Def not found: {:?}", id); } result } } #[derive(Clone)] pub struct Loader { deps_dirs: Vec<PathBuf>, } impl Loader { pub fn new(deps_dirs: Vec<PathBuf>) -> Self { Self { deps_dirs } } } impl AnalysisLoader for Loader { fn needs_hard_reload(&self, _: &Path) -> bool { true } fn fresh_host(&self) -> AnalysisHost<Self> { AnalysisHost::new_with_loader(self.clone()) } fn set_path_prefix(&mut self, _: &Path) {} fn abs_path_prefix(&self) -> Option<PathBuf> { None } fn search_directories(&self) -> Vec<SearchDirectory> { self.deps_dirs .iter() .map(|pb| SearchDirectory { path: pb.clone(), prefix_rewrite: None, }) .collect() } } fn def_kind_to_human(kind: DefKind) -> &'static str { match kind { DefKind::Enum => "enum", DefKind::Local => "local", DefKind::ExternType => "extern type", DefKind::Const => "constant", DefKind::Field => "field", DefKind::Function | DefKind::ForeignFunction => "function", DefKind::Macro => "macro", DefKind::Method => "method", DefKind::Mod => "module", DefKind::Static | DefKind::ForeignStatic => "static", DefKind::Struct => "struct", DefKind::Tuple => "tuple", DefKind::TupleVariant => "tuple variant", DefKind::Union => "union", DefKind::Type => "type", DefKind::Trait => "trait", DefKind::StructVariant => "struct variant", } } /// Potentially non-helpful mapping of impl kind. fn impl_kind_to_human(kind: &ImplKind) -> &'static str { match kind { ImplKind::Inherent => "impl", ImplKind::Direct => "impl for", ImplKind::Indirect => "impl for ref", ImplKind::Blanket => "impl for where", _ => "impl for where deref", } } /// Given two spans, create a new super-span that encloses them both if the files match. If the /// files don't match, just return the first span as-is. fn union_spans(a: &data::SpanData, b: &data::SpanData) -> data::SpanData { if a.file_name != b.file_name { return a.clone(); } let (byte_start, line_start, column_start) = if a.byte_start < b.byte_start { (a.byte_start, a.line_start, a.column_start) } else { (b.byte_start, b.line_start, b.column_start) }; let (byte_end, line_end, column_end) = if a.byte_end > b.byte_end { (a.byte_end, a.line_end, a.column_end) } else { (b.byte_end, b.line_end, b.column_end) }; data::SpanData { file_name: a.file_name.clone(), byte_start, byte_end, line_start, line_end, column_start, column_end, } } /// For the purposes of trying to figure out the actual effective nesting range of some type of /// definition, union its span (which just really covers the symbol name) plus the spans of all of /// its descendants. This should end up with a sufficiently reasonable line value. This is a hack. fn recursive_union_spans_of_def( def: &data::Def, file_analysis: &data::Analysis, defs: &Defs, ) -> data::SpanData { let mut span = def.span.clone(); for id in &def.children { // It should already be the case that the children are in the same krate, but better safe // than sorry. if id.krate != def.id.krate { continue; } let kid = defs.get(file_analysis, *id); if let Some(ref kid) = kid { let rec_span = recursive_union_spans_of_def(kid, file_analysis, defs); span = union_spans(&span, &rec_span); } } span } /// Given a list of ids of defs, run recursive_union_spans_of_def on all of them and union up the /// result. Necessary for when dealing with impls. fn union_spans_of_defs( initial_span: &data::SpanData, ids: &[data::Id], file_analysis: &data::Analysis, defs: &Defs, ) -> data::SpanData { let mut span = initial_span.clone(); for id in ids { let kid = defs.get(file_analysis, *id); if let Some(ref kid) = kid { let rec_span = recursive_union_spans_of_def(kid, file_analysis, defs); span = union_spans(&span, &rec_span); } } span } /// If we unioned together a span that only covers 1 or 2 lines, normalize it to None because /// nothing interesting will happen from a presentation perspective. (If we had proper AST info /// about the span, it would be appropriate to keep it and expose it, but this is all derived from /// shoddy inference.) fn ignore_boring_spans(span: &data::SpanData) -> Option<&data::SpanData> { match span { span if span.line_end.0 > span.line_start.0 + 1 => Some(span), _ => None, } } fn pretty_for_impl(imp: &data::Impl, qualname: &str) -> String { let mut pretty = impl_kind_to_human(&imp.kind).to_owned(); pretty.push_str(" "); pretty.push_str(qualname); pretty } fn pretty_for_def(def: &data::Def, qualname: &str) -> String { let mut pretty = def_kind_to_human(def.kind).to_owned(); pretty.push_str(" "); // We use the unsanitized qualname here because it's more human-readable // and the source-analysis pretty name is allowed to have commas and such pretty.push_str(qualname); pretty } fn visit_def( out_data: &mut BTreeSet<String>, kind: AnalysisKind, location: &data::SpanData, qualname: &str, def: &data::Def, context: Option<&str>, nesting: Option<&data::SpanData>, ) { let pretty = pretty_for_def(&def, &qualname); visit_common( out_data, kind, location, qualname, &pretty, context, nesting, ); } fn visit_common( out_data: &mut BTreeSet<String>, kind: AnalysisKind, location: &data::SpanData, qualname: &str, pretty: &str, context: Option<&str>, nesting: Option<&data::SpanData>, ) { // Searchfox uses 1-indexed lines, 0-indexed columns. let col_end = if location.line_start != location.line_end { // Rust spans are multi-line... So we just use the start column as // the end column if it spans multiple rows, searchfox has fallback // code to handle this. location.column_start.zero_indexed().0 } else { location.column_end.zero_indexed().0 }; let loc = Location { lineno: location.line_start.0, col_start: location.column_start.zero_indexed().0, col_end, }; let sanitized = sanitize_symbol(qualname); let target_data = WithLocation { data: AnalysisTarget { kind, pretty: sanitized.clone(), sym: sanitized.clone(), context: String::from(context.unwrap_or("")), contextsym: String::from(context.unwrap_or("")), peek_range: LineRange { start_lineno: 0, end_lineno: 0, }, }, loc: loc.clone(), }; out_data.insert(format!("{}", target_data)); let nesting_range = match nesting { Some(span) => SourceRange { // Hack note: These positions would ideally be those of braces. But they're not, so // while the position:sticky UI stuff should work-ish, other things will not. start_lineno: span.line_start.0, start_col: span.column_start.zero_indexed().0, end_lineno: span.line_end.0, end_col: span.column_end.zero_indexed().0, }, None => SourceRange { start_lineno: 0, start_col: 0, end_lineno: 0, end_col: 0, }, }; let source_data = WithLocation { data: AnalysisSource { syntax: vec![], pretty: pretty.to_string(), sym: vec![sanitized], no_crossref: false, nesting_range, }, loc, }; out_data.insert(format!("{}", source_data)); } /// Normalizes a searchfox user-visible relative file path to be an absolute /// local filesystem path. No attempt is made to validate the existence of the /// path. That's up to the caller. fn searchfox_path_to_local_path(searchfox_path: &Path, tree_info: &TreeInfo) -> PathBuf { if let Ok(objdir_path) = searchfox_path.strip_prefix(tree_info.generated_friendly) { return tree_info.generated.join(objdir_path); } tree_info.srcdir.join(searchfox_path) } fn read_existing_contents(map: &mut BTreeSet<String>, file: &Path) { if let Ok(f) = File::open(file) { let reader = BufReader::new(f); for line in reader.lines() { map.insert(line.unwrap()); } } } fn extract_span_from_source_as_buffer( reader: &mut File, span: &data::SpanData, ) -> io::Result<Box<[u8]>> { reader.seek(std::io::SeekFrom::Start(span.byte_start.into()))?; let len = (span.byte_end - span.byte_start) as usize; let mut buffer: Box<[u8]> = vec![0; len].into_boxed_slice(); reader.read_exact(&mut buffer)?; Ok(buffer) } /// Given a reader and a span from that file, extract the text contained by the span. If the span /// covers multiple lines, then whatever newline delimiters the file has will be included. /// /// In the event of a file read error or the contents not being valid UTF-8, None is returned. /// We will log to log::Error in the event of a file read problem because this can be indicative /// of lower level problems (ex: in vagrant), but not for utf-8 errors which are more expected /// from sketchy source-files. fn extract_span_from_source_as_string( mut reader: &mut File, span: &data::SpanData, ) -> Option<String> { match extract_span_from_source_as_buffer(&mut reader, &span) { Ok(buffer) => match String::from_utf8(buffer.into_vec()) { Ok(s) => Some(s), Err(_) => None, }, // This used to error! but the error payload was always just // `Unable to read file: Custom { kind: UnexpectedEof, error: "failed to fill whole buffer" }` // which was not useful or informative and may be due to invalid spans // being told to us by save-analysis. Err(_) => None, } } fn analyze_file( searchfox_path: &PathBuf, defs: &Defs, file_analysis: &data::Analysis, tree_info: &TreeInfo, ) { use std::io::Write; debug!("Running analyze_file for {}", searchfox_path.display()); let local_source_path = searchfox_path_to_local_path(searchfox_path, tree_info); if !local_source_path.exists() { warn!( "Skipping nonexistent source file with searchfox path '{}' which mapped to local path '{}'", searchfox_path.display(), local_source_path.display() ); return; }; // Attempt to open the source file to extract information not currently available from the // analysis data. Some analysis information may not be emitted if we are unable to access the // file. let maybe_source_file = match File::open(&local_source_path) { Ok(f) => Some(f), Err(_) => None, }; let output_file = tree_info.out_analysis_dir.join(searchfox_path); let mut dataset = BTreeSet::new(); read_existing_contents(&mut dataset, &output_file); let mut output_dir = output_file.clone(); output_dir.pop(); if let Err(err) = fs::create_dir_all(output_dir) { error!( "Couldn't create dir for: {}, {:?}", output_file.display(), err ); return; } let mut file = match File::create(&output_file) { Ok(f) => f, Err(err) => { error!( "Couldn't open output file: {}, {:?}", output_file.display(), err ); return; } }; // Be chatty about the files we're outputting so that it's easier to follow // the path of rust analysis generation. info!( "Writing analysis for '{}' to '{}'", searchfox_path.display(), output_file.display() ); for import in &file_analysis.imports { let id = match import.ref_id { Some(id) => id, None => { debug!( "Dropping import {} ({:?}): {}, no ref", import.name, import.kind, import.value ); continue; } }; let def = match defs.get(file_analysis, id) { Some(def) => def, None => { debug!( "Dropping import {} ({:?}): {}, no def for ref {:?}", import.name, import.kind, import.value, id ); continue; } }; visit_def( &mut dataset, AnalysisKind::Use, &import.span, &def.qualname, &def, None, None, ) } for def in &file_analysis.defs { let parent = def .parent .and_then(|parent_id| defs.get(file_analysis, parent_id)); if let Some(ref parent) = parent { if parent.kind == DefKind::Trait { let trait_dependent_name = construct_qualname(&parent.qualname, &def.name); visit_def( &mut dataset, AnalysisKind::Def, &def.span, &trait_dependent_name, &def, Some(&parent.qualname), None, ) } } let crate_id = &file_analysis.prelude.as_ref().unwrap().crate_id; let qualname = crate_independent_qualname(&def, crate_id); let nested_span = recursive_union_spans_of_def(def, &file_analysis, &defs);<|fim▁hole|> &def.span, &qualname, &def, parent.as_ref().map(|p| &*p.qualname), maybe_nested, ) } // We want to expose impls as "def,namespace" with an inferred nesting_range for their // contents. I don't know if it's a bug or just a dubious design decision, but the impls all // have empty values and no names, so to get a useful string out of them, we need to extract // the contents of their span directly. // // Because the name needs to be extracted from the source file, we omit this step if we were // unable to open the file. if let Some(mut source_file) = maybe_source_file { for imp in &file_analysis.impls { // (for simple.rs at least, there is never a parent) let name = match extract_span_from_source_as_string(&mut source_file, &imp.span) { Some(s) => s, None => continue, }; let crate_id = &file_analysis.prelude.as_ref().unwrap().crate_id; let qualname = construct_qualname(&crate_id.name, &name); let pretty = pretty_for_impl(&imp, &qualname); let nested_span = union_spans_of_defs(&imp.span, &imp.children, &file_analysis, &defs); let maybe_nested = ignore_boring_spans(&nested_span); // XXX visit_common currently never emits any syntax types; we want to pretend this is // a namespace once it does. visit_common( &mut dataset, AnalysisKind::Def, &imp.span, &qualname, &pretty, None, maybe_nested, ) } } for ref_ in &file_analysis.refs { let def = match defs.get(file_analysis, ref_.ref_id) { Some(d) => d, None => { debug!( "Dropping ref {:?}, kind {:?}, no def", ref_.ref_id, ref_.kind ); continue; } }; visit_def( &mut dataset, AnalysisKind::Use, &ref_.span, &def.qualname, &def, /* context = */ None, // TODO /* nesting = */ None, ) } for obj in &dataset { file.write_all(obj.as_bytes()).unwrap(); write!(file, "\n").unwrap(); } } // Replace any backslashes in the path with forward slashes. Paths can be a // combination of backslashes and forward slashes for windows platform builds // because the paths are normalized by a sed script that will match backslashes // and output front-slashes. The sed script could be made smarter. fn linuxized_path(path: &PathBuf) -> PathBuf { if let Some(pathstr) = path.to_str() { if pathstr.find('\\').is_some() { // Pesky backslashes, get rid of them! let converted = pathstr.replace('\\', "/"); // If we're seeing this, it means the paths weren't normalized and // now it's a question of minimizing fallout. if converted.find(":/") == Some(1) { // Starts with a drive letter, so let's turn this into // an absolute path let abs = "/".to_string() + &converted; return PathBuf::from(abs); } // Turn it into a relative path return PathBuf::from(converted); } } // Already a valid path! path.clone() } fn analyze_crate(analysis: &data::Analysis, defs: &Defs, tree_info: &TreeInfo) { // Create and populate per-file Analysis instances from the provided per-crate Analysis file. let mut per_file = HashMap::new(); let crate_name = &*analysis.prelude.as_ref().unwrap().crate_id.name; info!("Analyzing crate: '{}'", crate_name); debug!("Crate prelude: {:?}", analysis.prelude); macro_rules! flat_map_per_file { ($field:ident) => { for item in &analysis.$field { let file_analysis = per_file .entry(linuxized_path(&item.span.file_name)) .or_insert_with(|| { let prelude = analysis.prelude.clone(); let mut analysis = data::Analysis::new(analysis.config.clone()); analysis.prelude = prelude; analysis }); file_analysis.$field.push(item.clone()); } }; } flat_map_per_file!(imports); flat_map_per_file!(defs); flat_map_per_file!(impls); flat_map_per_file!(refs); flat_map_per_file!(macro_refs); flat_map_per_file!(relations); for (searchfox_path, analysis) in per_file.drain() { // Absolute paths mean that the save-analysis data wasn't normalized // into the searchfox path convention, which means we can't generate // analysis data, so just skip. // // This will be the case for libraries built with cargo that have paths // that have prefixes that look like "/cargo/registry/src/github.com-". if searchfox_path.is_absolute() { warn!( "Skipping absolute analysis path {}", searchfox_path.display() ); continue; } analyze_file(&searchfox_path, defs, &analysis, tree_info); } } fn main() { use clap::Arg; env_logger::init(); let matches = app_from_crate!() .args_from_usage( "<src> 'Points to the source root (FILES_ROOT)' <output> 'Points to the directory where searchfox metadata should go (ANALYSIS_ROOT)' <generated> 'Points to the generated source files root (GENERATED)'", ) .arg( Arg::with_name("input") .required(false) .multiple(true) .help("rustc analysis directories"), ) .get_matches(); let srcdir = Path::new(matches.value_of("src").unwrap()); let out_analysis_dir = Path::new(matches.value_of("output").unwrap()); let generated = Path::new(matches.value_of("generated").unwrap()); let tree_info = TreeInfo { srcdir, out_analysis_dir, generated, generated_friendly: &PathBuf::from("__GENERATED__"), }; info!("Tree info: {:?}", tree_info); let input_dirs = match matches.values_of("input") { Some(inputs) => inputs.map(PathBuf::from).collect(), None => vec![], }; let loader = Loader::new(input_dirs); let crates = rls_analysis::read_analysis_from_files(&loader, Default::default(), &[]); info!( "Crates: {:?}", crates.iter().map(|k| &k.id.name).collect::<Vec<_>>() ); // Create and populate Defs, a map from Id to Def, across all crates before beginning analysis. // This is necessary because Def and Ref instances name Defs via Id. let mut defs = Defs::new(); for krate in &crates { for def in &krate.analysis.defs { defs.insert(&krate.analysis, def); } } for krate in crates { analyze_crate(&krate.analysis, &defs, &tree_info); } }<|fim▁end|>
let maybe_nested = ignore_boring_spans(&nested_span); visit_def( &mut dataset, AnalysisKind::Def,
<|file_name|>travels.js<|end_file_name|><|fim▁begin|>angular.module('app.travels', ['pascalprecht.translate']) .controller('TravelsCtrl', function($scope, $http, $ionicModal, $timeout, $ionicLoading, $filter) { $scope.loadMorePagination = true; $scope.travels = []; $scope.page = 0; $scope.doRefresh = function() { /* travels refresh: */ $http.get(urlapi + 'travels?page=' + $scope.page) .then(function(data) { console.log('data success travels'); console.log(data); // for browser console //$scope.travels = data.data; // for UI $scope.travels = $scope.travels.concat(data.data); $scope.$broadcast('scroll.refreshComplete'); //refresher stop $scope.$broadcast('scroll.infiniteScrollComplete'); if (data.data.length < 1) { console.log("setting loadMorePagination to false"); $scope.loadMorePagination = false; $scope.$broadcast('scroll.infiniteScrollComplete'); } }, function(data) { console.log('data error'); $scope.$broadcast('scroll.refreshComplete'); //refresher stop $ionicLoading.show({ template: 'Error connecting server', noBackdrop: true, duration: 2000 }); }); }; $scope.doRefresh(); $scope.paginationNext = function() { if ($scope.loadMorePagination == true) { $scope.page++;<|fim▁hole|> $scope.$broadcast('scroll.infiniteScrollComplete'); } }; });<|fim▁end|>
console.log($scope.page); $scope.doRefresh(); }else{ console.log("limit pagination reached");
<|file_name|>launch.py<|end_file_name|><|fim▁begin|>import subprocess, os, zipfile, requests ## Function Download def download(url, fichier): pass fileName = fichier req = requests.get(url) file = open(fileName, 'wb') for chunk in req.iter_content(100000):<|fim▁hole|>def unzip(source , destination): with zipfile.ZipFile(source) as zf: zf.extractall(destination) nameinfo = open("name.info", "r") ServerName = nameinfo.readline().rstrip() Version = nameinfo.readline().rstrip() VersionServer = nameinfo.readline().rstrip() nameinfo.close() subprocess.call(['java', '-jar', ServerName +'.jar']) fichier = open("eula.txt", "w") fichier.write("eula = true") fichier.close() if not os.path.exists("world"): print("Whitch type of Minecraft server you want to create ?") a=input("[1] Pre-Build (Map and Plugin) Spigot Server [2] Blanc Spigot Server [3] Semi-Build (Plugin pre installed, blanc map) : ") if a == '1': print(VersionServer) if VersionServer == '1.9' or VersionServer == '1.8' or VersionServer == '1.7.10': download('https://raw.githubusercontent.com/dinnozap/MinecraftServerMaker/master/world.zip', 'world.zip') unzip('world.zip', '') if not os.path.exists("plugins"): os.mkdir("plugins") download('https://hub.spigotmc.org/jenkins/job/Spigot-Essentials/lastSuccessfulBuild/artifact/Essentials/target/Essentials-2.x-SNAPSHOT.jar', 'plugins/essentials.jar') download('https://www.spigotmc.org/resources/sexymotd.2474/download?version=73466', 'plugins/motd.jar') subprocess.call(['java', '-jar', ServerName +'.jar']) elif a=='2': subprocess.call(['java', '-jar', ServerName +'.jar']) elif a=='3': if not os.path.exists("plugins"): os.mkdir("plugins") download('https://hub.spigotmc.org/jenkins/job/Spigot-Essentials/lastSuccessfulBuild/artifact/Essentials/target/Essentials-2.x-SNAPSHOT.jar', 'plugins/essentials.jar') download('https://www.spigotmc.org/resources/sexymotd.2474/download?version=73466', 'plugins/motd.jar') subprocess.call(['java', '-jar', ServerName +'.jar'])<|fim▁end|>
file.write(chunk) file.close() print("The download is finish !") ## Function Unzip
<|file_name|>FluxIntegral.cpp<|end_file_name|><|fim▁begin|>/*************************************************************************** FluxIntegral.cpp - Integrates Lx dx L (x1, x2) = int^x2_x1 dx Lx ------------------- begin : December 2006 copyright : (C) 2006 by Maurice Leutenegger email : [email protected] ***************************************************************************/ /* This program is free software; you can redistribute it and/or modify<|fim▁hole|> This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include "FluxIntegral.h" #include <iostream> using namespace std; FluxIntegral::FluxIntegral (Lx* lx) : Integral (), itsLx (lx), itsXKink (itsLx->getXKink ()), itsXOcc (itsLx->getXOcc ()) { return; } FluxIntegral::~FluxIntegral () { return; } double FluxIntegral::integrand (double x) { return itsLx->getLx (x); } /* Integrates Lx over [y,x]. Will mostly be called with y < x, but if not, it swaps them. Note: is there code somewhere else to deal with an unresolved profile? (The code below only deals with the case where it's known to be completely unresolved in advance.) */ Real FluxIntegral::getFlux (Real x, Real y) { if (compare (y, x) == 1) { Real temp = x; x = y; y = temp; } bool xInRange = (compare (fabs(x), 1.) == -1); bool yInRange = (compare (fabs(y), 1.) == -1); if (!xInRange && !yInRange) return 0.; if (!xInRange) x = 1.; if (!yInRange) y = -1.; if ((compare (itsXKink, y) == 1) && (compare (itsXKink, x) == -1)) { return (qag (y, itsXKink) + qag (itsXKink, x)); } if ((compare (itsXOcc, y) == 1) && (compare (itsXOcc, x) == -1)) { return (qag (y, itsXOcc) + qag (itsXOcc, x)); } return qag (y, x); /* The kink coordinate gives the point at which many profiles have a kink at negative x on the blue side of the profile. This kink occurs at the x where the cutoff u0 starts to become important. (At zero optical depth, the profile becomes flat at this point). The "occ" coordinate gives the point in x where occultation begins to become important. I choose to place this not at p = 1 but at p = 1 + e, to avoid edge effects. */ /* if (xInRange && yInRange) { return qag (y, x); } else if (xInRange && !yInRange) { return qag (-1., x); } else if (!xInRange && yInRange) { return qag (y, 1.); } else { return 0.; } */ } // Integrate on x in [-1:1] Real FluxIntegral::getFlux () { return (qag (-1., itsXKink) + qag (itsXKink, itsXOcc) + qag (itsXOcc, 1.)); }<|fim▁end|>
it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version.
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models from django.contrib.auth.models import User class OdooUser(models.Model): user = models.OneToOneField(User)<|fim▁hole|> username = models.CharField(max_length=256)<|fim▁end|>
odoo_id = models.BigIntegerField(primary_key=True)
<|file_name|>exists_movie.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals, division, absolute_import import os import re import logging from path import path from flexget import plugin from flexget.event import event from flexget.config_schema import one_or_more from flexget.plugin import get_plugin_by_name from flexget.utils.tools import TimedDict log = logging.getLogger('exists_movie') class FilterExistsMovie(object): """ Reject existing movies. Syntax: exists_movie: path: /path/to/movies [type: {dirs|files}] [allow_different_qualities: {better|yes|no}] [lookup: {imdb|no}] """ schema = { 'anyOf': [ one_or_more({'type': 'string', 'format': 'path'}), { 'type': 'object', 'properties': { 'path': one_or_more({'type': 'string', 'format': 'path'}), 'allow_different_qualities': {'enum': ['better', True, False], 'default': False}, 'type': {'enum': ['files', 'dirs'], 'default': 'dirs'}, 'lookup': {'enum': ['imdb', False], 'default': False} }, 'required': ['path'], 'additionalProperties': False } ] } dir_pattern = re.compile('\b(cd.\d|subs?|samples?)\b',re.IGNORECASE) file_pattern = re.compile('\.(avi|mkv|mp4|mpg|webm)$',re.IGNORECASE) def __init__(self): self.cache = TimedDict(cache_time='1 hour') def prepare_config(self, config): # if config is not a dict, assign value to 'path' key if not isinstance(config, dict): config = { 'path': config } if not config.get('type'): config['type'] = 'dirs' # if only a single path is passed turn it into a 1 element list if isinstance(config['path'], basestring): config['path'] = [config['path']] return config @plugin.priority(-1) def on_task_filter(self, task, config): if not task.accepted: log.debug('nothing accepted, aborting') return config = self.prepare_config(config) imdb_lookup = plugin.get_plugin_by_name('imdb_lookup').instance incompatible_files = 0 incompatible_entries = 0 count_entries = 0 count_files = 0 # list of imdb ids gathered from paths / cache qualities = {} for folder in config['path']: folder = path(folder).expanduser() # see if this path has already been scanned if folder in self.cache: log.verbose('Using cached scan for %s ...' % folder) qualities.update(self.cache[folder]) continue path_ids = {} if not folder.isdir(): log.critical('Path %s does not exist' % folder) continue log.verbose('Scanning path %s ...' % folder) # Help debugging by removing a lot of noise #logging.getLogger('movieparser').setLevel(logging.WARNING) #logging.getLogger('imdb_lookup').setLevel(logging.WARNING) # scan through items = [] if config.get('type') == 'dirs': for d in folder.walkdirs(errors='ignore'): if self.dir_pattern.search(d.name): continue items.append(d.name) elif config.get('type') == 'files': for f in folder.walkfiles(errors='ignore'): if not self.file_pattern.search(f.name): continue items.append(f.name) for item in items: count_files += 1 movie = get_plugin_by_name('parsing').instance.parse_movie(item) if config.get('lookup') == 'imdb': try: imdb_id = imdb_lookup.imdb_id_lookup(movie_title=movie.name, raw_title=item, session=task.session) if imdb_id in path_ids: log.trace('duplicate %s' % item) continue if imdb_id is not None: log.trace('adding: %s' % imdb_id) path_ids[imdb_id] = movie.quality except plugin.PluginError as e: log.trace('%s lookup failed (%s)' % (item, e.value)) incompatible_files += 1 else: path_ids[movie.name] = movie.quality log.trace('adding: %s' % movie.name) # store to cache and extend to found list self.cache[folder] = path_ids qualities.update(path_ids) log.debug('-- Start filtering entries ----------------------------------') # do actual filtering for entry in task.accepted: count_entries += 1 if config.get('lookup') == 'imdb': key = 'imdb_id' if not entry.get('imdb_id', eval_lazy=False): try: imdb_lookup.lookup(entry) except plugin.PluginError as e: log.trace('entry %s imdb failed (%s)' % (entry['title'], e.value)) incompatible_entries += 1 continue else: key = 'movie_name' if not entry.get('movie_name', eval_lazy=False):<|fim▁hole|> entry['movie_name'] = movie.name # actual filtering if entry[key] in qualities: if config.get('allow_different_qualities') == 'better': if entry['quality'] > qualities[entry[key]]: log.trace('better quality') continue elif config.get('allow_different_qualities'): if entry['quality'] != qualities[entry[key]]: log.trace('wrong quality') continue entry.reject('movie exists') if incompatible_files or incompatible_entries: log.verbose('There were some incompatible items. %s of %s entries ' 'and %s of %s directories could not be verified.' % (incompatible_entries, count_entries, incompatible_files, count_files)) log.debug('-- Finished filtering entries -------------------------------') @event('plugin.register') def register_plugin(): plugin.register(FilterExistsMovie, 'exists_movie', groups=['exists'], api_ver=2)<|fim▁end|>
movie = get_plugin_by_name('parsing').instance.parse_movie(entry['title'])
<|file_name|>list-question.client.js<|end_file_name|><|fim▁begin|>'use strict'; angular.module('applicant-test').controller('ApplicantTestController', ['$scope', '$stateParams', '$location', 'Authentication', 'Questions', '$http', function($scope, $stateParams, $location, Authentication, Questions, $http ) { $scope.authentication =Authentication;<|fim▁hole|> var url = '/test/'; $http.get(url).success(function(response) { $scope.questions = response; console.log('Questions init'); console.log($scope.questions); }); }; // $scope.findOne = function() { // var url = '/test/' + $stateParams.testId; // console.log('Getting stateParams'); // console.log($stateParams); // $http.get(url).success(function(response) { // $scope.questions = response; // console.log('Questions init'); // console.log($scope.questions); // }); // // $scope.test = Questions.get({ // // testId: $stateParams.testId // // }); // }; }]);<|fim▁end|>
$scope.find = function(){
<|file_name|>tailf.py<|end_file_name|><|fim▁begin|>import os import io import stat import time import threading import sublime import sublime_plugin # Set of IDs of view that are being monitored. TAILF_VIEWS = set() STATUS_KEY = 'tailf' class TailF(sublime_plugin.TextCommand): ''' Start monitoring file in `tail -f` line style. ''' def __init__(self, *args, **kwargs): super(TailF, self).__init__(*args, **kwargs) self.prev_file_size = -1 self.prev_mod_time = -1 def run(self, edit): self.view.set_read_only(True) t = threading.Thread(target=self.thread_handler) TAILF_VIEWS.add(self.view.id()) self.view.set_status(STATUS_KEY, 'TailF mode') t.start() def thread_handler(self): while True: if self.view.id() in TAILF_VIEWS: if self.view.file_name() is None: sublime.error_message('File not save on disk') return else: file_stat = os.stat(self.view.file_name()) new_size = file_stat[stat.ST_SIZE] new_mod_time = file_stat[stat.ST_MTIME] if (new_mod_time > self.prev_mod_time or new_size != self.prev_file_size): self.view.run_command('update_file') self.view.run_command('move_to', args={'to': 'eof', 'extend': False}) self.prev_file_size = new_size self.prev_mod_time = new_mod_time time.sleep(self.view.settings().get('tailf_pull_rate')) else: return def description(self): return 'Starts monitoring file on disk' class StopTailF(sublime_plugin.TextCommand): ''' Stop monitoring file command. ''' def run(self, edit): TAILF_VIEWS.remove(self.view.id()) # restore view to previous state self.view.set_read_only(False) self.view.set_scratch(False) self.view.erase_status(STATUS_KEY) def description(self): return 'Stops monitoring file on disk' <|fim▁hole|>class UpdateFile(sublime_plugin.TextCommand): ''' Reloads content of the file and replaces view content with it. ''' def run(self, edit): read_only = self.view.is_read_only() self.view.set_read_only(False) with io.open(self.view.file_name(), 'r', encoding='utf-8-sig') as f: content = f.read() whole_file = sublime.Region(0, self.view.size()) self.view.replace(edit, whole_file, content) self.view.set_read_only(read_only) # don't ask user if he want's to save changes to disk self.view.set_scratch(True) class TailFEventListener(sublime_plugin.EventListener): ''' Listener that removes files from monitored files once file is about to be closed. ''' def on_pre_close(self, view): if view.id() in TAILF_VIEWS: TAILF_VIEWS.remove(view.id())<|fim▁end|>
<|file_name|>test-fsreqwrap-readFile.js<|end_file_name|><|fim▁begin|>'use strict'; const common = require('../common'); const assert = require('assert'); const tick = require('./tick'); const initHooks = require('./init-hooks'); const { checkInvocations } = require('./hook-checks'); const fs = require('fs'); const hooks = initHooks(); hooks.enable(); fs.readFile(__filename, common.mustCall(onread)); function onread() {<|fim▁hole|> const a = as[i]; assert.strictEqual(a.type, 'FSREQWRAP'); assert.strictEqual(typeof a.uid, 'number'); assert.strictEqual(a.triggerAsyncId, lastParent); lastParent = a.uid; } checkInvocations(as[0], { init: 1, before: 1, after: 1, destroy: 1 }, 'reqwrap[0]: while in onread callback'); checkInvocations(as[1], { init: 1, before: 1, after: 1, destroy: 1 }, 'reqwrap[1]: while in onread callback'); checkInvocations(as[2], { init: 1, before: 1, after: 1, destroy: 1 }, 'reqwrap[2]: while in onread callback'); // this callback is called from within the last fs req callback therefore // the last req is still going and after/destroy haven't been called yet checkInvocations(as[3], { init: 1, before: 1 }, 'reqwrap[3]: while in onread callback'); tick(2); } process.on('exit', onexit); function onexit() { hooks.disable(); hooks.sanityCheck('FSREQWRAP'); const as = hooks.activitiesOfTypes('FSREQWRAP'); const a = as.pop(); checkInvocations(a, { init: 1, before: 1, after: 1, destroy: 1 }, 'when process exits'); }<|fim▁end|>
const as = hooks.activitiesOfTypes('FSREQWRAP'); let lastParent = 1; for (let i = 0; i < as.length; i++) {
<|file_name|>app.component.ts<|end_file_name|><|fim▁begin|>import {Component} from '@angular/core'; @Component({ selector: 'app-component', template: ` <div class="container"><|fim▁hole|> <a [routerLink]="['about']">About</a> </p> <router-outlet></router-outlet> </div>` }) export class AppComponent { public isTestable:boolean = true; }<|fim▁end|>
<h1>Angular v4 boilerplate app</h1> <p> <a [routerLink]="['']">Example</a> |
<|file_name|>variables.py<|end_file_name|><|fim▁begin|>__author__ = 'sekely' ''' we are using variables almost everywhere in the code. variables are used to store results, calculations and many more. this of it as the famous "x" from high school x = 5, right? the only thing is, that in Python "x" can store anything ''' # try this code: x = 5 y = x + 3<|fim▁hole|># what about this? will it work? x = 'hello' y = ' ' z = 'world!' w = x + y + z print(w)<|fim▁end|>
print(y)
<|file_name|>icon.rs<|end_file_name|><|fim▁begin|>use image::{Rgba, RgbaImage}; use imageproc::drawing::draw_text_mut; use image::imageops::resize; use rusttype::{Scale, Font}; use std::ptr::null_mut; use std::collections::HashMap; <|fim▁hole|>pub type GeneratedIcon = *mut winapi::shared::windef::HICON__; pub struct IconGenerator { icon_cache: HashMap<u8, GeneratedIcon>, } impl IconGenerator { pub fn new() -> Self { IconGenerator { icon_cache: HashMap::with_capacity(100), } } pub fn generate(&mut self, value: u8) -> GeneratedIcon { if self.icon_cache.contains_key(&value) && value != 0 { return self.icon_cache[&value]; } else { let new_icon = IconGenerator::create_icon(value); self.icon_cache.insert(value, new_icon); new_icon } } fn scale_params(n: usize) -> ((u32, u32), Scale) { match n { 1 => { ((24, 0), Scale { x: 128.0, y: 128.0 }) } 2 => { ((0, 0), Scale { x: 120.0, y: 120.0 }) } _ => { ((0, 20), Scale { x: 80.0, y: 80.0 }) } } } fn create_icon(value: u8) -> GeneratedIcon { let value_to_draw = value.to_string(); let mut image = RgbaImage::new(128, 128); let font = Font::try_from_bytes(include_bytes!("fonts/Arial.ttf")).unwrap(); let scale_params = IconGenerator::scale_params(value_to_draw.len()); let coord = scale_params.0; draw_text_mut( &mut image, Rgba([255u8, 255u8, 255u8, 255u8]), coord.0, coord.1, scale_params.1, &font, &value_to_draw, ); let resized_image = resize( &mut image, 32, 32, image::imageops::FilterType::Lanczos3, ); unsafe { let hbm_mask = winapi::um::wingdi::CreateCompatibleBitmap( winapi::um::winuser::GetDC(null_mut()), 32, 32, ); let bytes_raw = resized_image.into_raw().as_mut_ptr(); let transmuted = std::mem::transmute::<*mut u8, *mut winapi::ctypes::c_void>(bytes_raw); let bitmap: winapi::shared::windef::HBITMAP = winapi::um::wingdi::CreateBitmap(32, 32, 2, 16, transmuted); let mut h_icon = winapi::um::winuser::ICONINFO { fIcon: 1, hbmColor: bitmap, hbmMask: hbm_mask, xHotspot: 0, yHotspot: 0, }; winapi::um::winuser::CreateIconIndirect(&mut h_icon) } } }<|fim▁end|>
<|file_name|>OrganizationTreeController.ts<|end_file_name|><|fim▁begin|>"use strict"; module ServiceRegister { export class OrganizationTreeController implements Affecto.Base.IController { public static $inject = ["$scope", "$routeParams", "$location", "organizationService", "busyIndicationService"]; public model: Tree; public selectedOrganizationId: string; public isEditModeEnabled: boolean; public treeOptions: any; constructor(private $scope: Affecto.Base.IViewScope, $routeParams: IOrganizationRoute, private $location: angular.ILocationService, private organizationService: OrganizationService, private busyIndicationService: Affecto.BusyIndication.IBusyIndicationService) { $scope.controller = this; $scope.model = this.model; this.setSelectedOrganizationId($routeParams); this.createTreeOptions(); this.retrieveOrganizationsAndExpandAllNodes(); this.isEditModeEnabled = false; } public navigateToOrganization(orgId: string, selected: boolean): void { if (selected) { this.$location.path("/Organizations/" + orgId); }<|fim▁hole|> $event.stopPropagation(); this.$location.path("/Organizations/" + node.id + "/Organizations"); } public get canEdit(): boolean { return !this.isEditModeEnabled && this.model != null && this.model.value != null && this.model.value.length > 0; } public enableEditMode(): void { this.isEditModeEnabled = true; } public disableEditMode(): void { this.isEditModeEnabled = false; } private createTreeOptions(): void { this.treeOptions = { templateUrl: "organizationTreeTemplate.html", isOrgTree: true, selectedNodeId: this.selectedOrganizationId } } private setSelectedOrganizationId($routeParams: IOrganizationRoute): void { if ($routeParams != null) { this.selectedOrganizationId = $routeParams.organizationId; } } private retrieveOrganizationsAndExpandAllNodes(): void { this.busyIndicationService.showBusyIndicator("Haetaan organisaatioita..."); this.organizationService.getOrganizationHierarchy() .then((orgs: Tree) => { this.model = orgs; this.model.expandAll(); this.busyIndicationService.hideBusyIndicator(); }); } } }<|fim▁end|>
} public createSubOrganization($event: any, node: any): void {
<|file_name|>print_completion_loop.rs<|end_file_name|><|fim▁begin|>// // use async_std::prelude::*; // use async_std::fs; // use std::sync::Arc; // use eyre::{ // eyre, // Result, // // Context as _, // }; // // use bytes::BufMut; // use futures::stream::{StreamExt}; // use crate::models::VersionedModel; // use crate::print_queue::tasks::{ // Task, // // TaskStatus, // TaskContent, // Part, // Package, // }; // use crate::machine::models::{ // Machine, // MachineStatus, // // Printing, // }; // pub async fn run_print_completion_loop( // ctx: Arc<crate::Context>, // ) -> Result<()> { // let mut task_changes = Task::watch_all_changes(&ctx.db)?; // loop { // use crate::models::versioned_model::Change; // let change = task_changes.next().await // .ok_or_else(|| eyre!("print loop task stream unexpectedly ended"))??; // let was_pending = change.previous // .as_ref() // .map(|t| t.status.is_pending()) // .unwrap_or(true); <|fim▁hole|>// match change { // // Handle settled tasks // Change { next: Some(task), .. } if was_pending && task.status.is_settled() => { // if task.is_print() && task.status.was_successful() { // let config = ctx.machine_config.load(); // let automatic_printing = config.core_plugin()?.model.automatic_printing; // handle_print_completion( // &ctx, // automatic_printing, // &task, // ).await?; // } // // Delete the settled task // Task::remove(&ctx.db, task.id)?; // ctx.db.flush_async().await?; // } // Change { previous: Some(task), next: None, .. } => { // // clean up files on task deletion // if let TaskContent::FilePath(file_path) = task.content { // fs::remove_file(file_path).await?; // } // } // _ => {} // } // } // } // async fn handle_print_completion( // ctx: &Arc<crate::Context>, // automatic_printing: bool, // task: &Task, // ) -> Result<()> { // let print = task.print.as_ref().ok_or_else(|| // eyre!("Missing print for task: {}", task.id) // )?; // // Parts printed update // Part::get_and_update(&ctx.db, print.part_id, |mut part| { // part.printed += 1; // part // })?; // // Parts + Package deletion // let package = Package::find(&ctx.db, |package| { // package.id == print.package_id // })?; // let parts = Part::filter(&ctx.db, |part| { // part.package_id == print.package_id // })?; // if package.started_final_print(&parts) { // Package::remove(&ctx.db, package.id)?; // for part in parts { // Part::remove(&ctx.db, part.id)?; // } // } // Ok(()) // }<|fim▁end|>
<|file_name|>conv.cc<|end_file_name|><|fim▁begin|>#include <iostream> #include "DVB.hh" int main(int argc, char **argv) {<|fim▁hole|> con >> dvbd; // dvbd.set_outtype(VDR_OUT); cout << dvbd; }<|fim▁end|>
ifstream con(argv[1]); DVB dvbd(-1);
<|file_name|>test_domain_translation.py<|end_file_name|><|fim▁begin|>from wordbook.domain.models import Translation def test_translation_dto(): t = Translation( id=1, from_language='en', into_language='pl', word='apple', ipa='ejpyl', simplified='epyl', translated='jabłko', ) assert t.dto_autocomplete() == dict( id=1, word='apple',<|fim▁hole|> translation='jabłko', ipa='ejpyl', simplified='epyl', )<|fim▁end|>
<|file_name|>fda_applications.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import absolute_import<|fim▁hole|>from __future__ import print_function from __future__ import unicode_literals import pytest @pytest.fixture def fda_application(conn, organization): fda_application = { 'id': 'ANDA018659', 'organisation_id': organization, 'drug_name': 'ALLOPURINOL', 'active_ingredients': 'ALLOPURINOL', } fda_application_id = conn['database']['fda_applications'].insert(fda_application) return fda_application_id<|fim▁end|>
from __future__ import division
<|file_name|>tanh_normal_projection_network.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Copyright 2020 The PI-SAC Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Project inputs to a tanh-squashed MultivariateNormalDiag distribution.""" import gin import tensorflow as tf import tensorflow_probability as tfp from tf_agents.distributions import utils as distribution_utils from tf_agents.networks import network from tf_agents.networks import utils as network_utils from tf_agents.specs import distribution_spec from tf_agents.specs import tensor_spec @gin.configurable class TanhNormalProjectionNetwork(network.DistributionNetwork): """Generates a tanh-squashed MultivariateNormalDiag distribution.""" def __init__(self, sample_spec, activation_fn=None, kernel_initializer=None, std_transform=tf.exp, min_std=None, max_std=None, name='TanhNormalProjectionNetwork'): """Creates an instance of TanhNormalProjectionNetwork. Args: sample_spec: A `tensor_spec.BoundedTensorSpec` detailing the shape and dtypes of samples pulled from the output distribution. activation_fn: Activation function to use in dense layer. kernel_initializer: Initializer to use for the kernels of the conv and dense layers. If none is provided a default glorot_uniform std_transform: Transformation function to apply to the stddevs. min_std: Minimum std. max_std: Maximum std. name: A string representing name of the network. """ if len(tf.nest.flatten(sample_spec)) != 1: raise ValueError('Tanh Normal Projection network only supports single' ' spec samples.') output_spec = self._output_distribution_spec(sample_spec, name) super(TanhNormalProjectionNetwork, self).__init__( # We don't need these, but base class requires them. input_tensor_spec=None, state_spec=(), output_spec=output_spec, name=name) self._sample_spec = sample_spec self._std_transform = std_transform self._min_std = min_std self._max_std = max_std if kernel_initializer is None: kernel_initializer = 'glorot_uniform' self._projection_layer = tf.keras.layers.Dense( sample_spec.shape.num_elements() * 2, activation=activation_fn, kernel_initializer=kernel_initializer, name='projection_layer') def _output_distribution_spec(self, sample_spec, network_name): input_param_shapes = { 'loc': sample_spec.shape, 'scale_diag': sample_spec.shape } input_param_spec = { # pylint: disable=g-complex-comprehension name: tensor_spec.TensorSpec( shape=shape, dtype=sample_spec.dtype, name=network_name + '_' + name) for name, shape in input_param_shapes.items() } def distribution_builder(*args, **kwargs): distribution = tfp.distributions.MultivariateNormalDiag(*args, **kwargs) return distribution_utils.scale_distribution_to_spec( distribution, sample_spec)<|fim▁hole|> return distribution_spec.DistributionSpec( distribution_builder, input_param_spec, sample_spec=sample_spec) def call(self, inputs, outer_rank, training=False, mask=None): if inputs.dtype != self._sample_spec.dtype: raise ValueError('Inputs to TanhNormalProjectionNetwork must match the ' 'sample_spec.dtype.') if mask is not None: raise NotImplementedError( 'TanhNormalProjectionNetwork does not yet implement action masking; ' 'got mask={}'.format(mask)) # outer_rank is needed because the projection is not done on the raw # observations so getting the outer rank is hard as there is no spec to # compare to. batch_squash = network_utils.BatchSquash(outer_rank) inputs = batch_squash.flatten(inputs) means_and_stds = self._projection_layer(inputs, training=training) means, stds = tf.split(means_and_stds, num_or_size_splits=2, axis=-1) means = tf.reshape(means, [-1] + self._sample_spec.shape.as_list()) means = tf.cast(means, self._sample_spec.dtype) if self._std_transform is not None: stds = self._std_transform(stds) if self._min_std is not None: stds = tf.maximum(stds, self._min_std) if self._max_std is not None: stds = tf.minimum(stds, self._max_std) stds = tf.cast(stds, self._sample_spec.dtype) means = batch_squash.unflatten(means) stds = batch_squash.unflatten(stds) return self.output_spec.build_distribution(loc=means, scale_diag=stds), ()<|fim▁end|>
<|file_name|>testmagic.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import magic import sys m = magic.open(magic.MIME_TYPE) m.load() for f in sys.argv[1:]: try : print(f, m.file(f)) except : print("Except with %s" % f)<|fim▁end|>
#!/usr/bin/python
<|file_name|>Stylus.java<|end_file_name|><|fim▁begin|>/* * Pixel Dungeon * Copyright (C) 2012-2014 Oleg Dolya * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/> */ package com.watabou.legacy.items; import java.util.ArrayList;<|fim▁hole|>import com.watabou.legacy.actors.hero.Hero; import com.watabou.legacy.effects.particles.PurpleParticle; import com.watabou.legacy.items.armor.Armor; import com.watabou.legacy.scenes.GameScene; import com.watabou.legacy.sprites.ItemSpriteSheet; import com.watabou.legacy.utils.GLog; import com.watabou.legacy.windows.WndBag; import com.watabou.noosa.audio.Sample; public class Stylus extends Item { private static final String TXT_SELECT_ARMOR = "Select an armor to inscribe on"; private static final String TXT_INSCRIBED = "you inscribed the %s on your %s"; private static final float TIME_TO_INSCRIBE = 2; private static final String AC_INSCRIBE = "INSCRIBE"; { name = "arcane stylus"; image = ItemSpriteSheet.STYLUS; stackable = true; } @Override public ArrayList<String> actions( Hero hero ) { ArrayList<String> actions = super.actions( hero ); actions.add( AC_INSCRIBE ); return actions; } @Override public void execute( Hero hero, String action ) { if (action == AC_INSCRIBE) { curUser = hero; GameScene.selectItem( itemSelector, WndBag.Mode.ARMOR, TXT_SELECT_ARMOR ); } else { super.execute( hero, action ); } } @Override public boolean isUpgradable() { return false; } @Override public boolean isIdentified() { return true; } private void inscribe( Armor armor ) { detach( curUser.belongings.backpack ); Class<? extends Armor.Glyph> oldGlyphClass = armor.glyph != null ? armor.glyph.getClass() : null; Armor.Glyph glyph = Armor.Glyph.random(); while (glyph.getClass() == oldGlyphClass) { glyph = Armor.Glyph.random(); } GLog.w( TXT_INSCRIBED, glyph.name(), armor.name() ); armor.inscribe( glyph ); curUser.sprite.operate( curUser.pos ); curUser.sprite.centerEmitter().start( PurpleParticle.BURST, 0.05f, 10 ); Sample.INSTANCE.play( Assets.SND_BURNING ); curUser.spend( TIME_TO_INSCRIBE ); curUser.busy(); } @Override public int price() { return 50 * quantity; } @Override public String info() { return "This arcane stylus is made of some dark, very hard stone. Using it you can inscribe " + "a magical glyph on your armor, but you have no power over choosing what glyph it will be, " + "the stylus will decide it for you."; } private final WndBag.Listener itemSelector = new WndBag.Listener() { @Override public void onSelect( Item item ) { if (item != null) { Stylus.this.inscribe( (Armor)item ); } } }; }<|fim▁end|>
import com.watabou.legacy.Assets;
<|file_name|>borrowck-closures-unique-imm.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. struct Foo { x: int, } pub fn main() { let mut this = &mut Foo { x: 1, }; let r = || {<|fim▁hole|>}<|fim▁end|>
let p = &this.x; &mut this.x; //~ ERROR cannot borrow }; r()
<|file_name|>test_chart_date04.py<|end_file_name|><|fim▁begin|>############################################################################### # # Tests for XlsxWriter. # # Copyright (c), 2013-2015, John McNamara, [email protected] # from datetime import date from ..excel_comparsion_test import ExcelComparisonTest from ...workbook import Workbook class TestCompareXLSXFiles(ExcelComparisonTest): """ Test file created by XlsxWriter against a file created by Excel. """ def setUp(self): self.maxDiff = None filename = 'chart_date04.xlsx' test_dir = 'xlsxwriter/test/comparison/' self.got_filename = test_dir + '_test_' + filename self.exp_filename = test_dir + 'xlsx_files/' + filename self.ignore_files = [] self.ignore_elements = {'xl/charts/chart1.xml': ['<c:formatCode']} def test_create_file(self): """Test the creation of a simple XlsxWriter file.""" workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() chart = workbook.add_chart({'type': 'line'}) date_format = workbook.add_format({'num_format': 14}) chart.axis_ids = [51761152, 51762688] worksheet.set_column('A:A', 12) dates = [date(2013, 1, 1), date(2013, 1, 2), date(2013, 1, 3), date(2013, 1, 4), date(2013, 1, 5), date(2013, 1, 6), date(2013, 1, 7), date(2013, 1, 8), date(2013, 1, 9),<|fim▁hole|> worksheet.write_column('A1', dates, date_format) worksheet.write_column('B1', values) chart.add_series({ 'categories': '=Sheet1!$A$1:$A$10', 'values': '=Sheet1!$B$1:$B$10', }) chart.set_x_axis({ 'date_axis': True, 'minor_unit': 1, 'major_unit': 1, 'minor_unit_type': 'months', 'major_unit_type': 'years', 'num_format': 'dd/mm/yyyy', 'num_format_linked': True, }) worksheet.insert_chart('E9', chart) workbook.close() self.assertExcelEqual()<|fim▁end|>
date(2013, 1, 10)] values = [10, 30, 20, 40, 20, 60, 50, 40, 30, 30]
<|file_name|>ioc.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import from __future__ import unicode_literals import glob import imp import inspect import os import sys from mb.config.config import get_default_config_file from mb.lib import logger from mb.lib import process _log = logger.get_logger('[Ioc]') # plugin types from mb import build_context # BuildContext # NOQA from mb import command # Command # NOQA from mb import template_engine # TemplateEngine # NOQA from mb import version_scheme #VersionScheme # NOQA from mb.config.config import PluginConfig # NOQA def rchop(thestring, ending): if thestring.endswith(ending): return thestring[:-len(ending)] return thestring def _is_plugin_type(object_attr, plugin_type): try: if object_attr == plugin_type: return False return issubclass(object_attr, plugin_type) except: return False _plugin_modules = [build_context, command, template_engine, version_scheme] _plugin_types = [build_context.BuildContext, command.Command, template_engine.TemplateEngine, version_scheme.VersionScheme] _loaded_plugin_definitions = {} _plugin_instances = {} _config = get_default_config_file() if os.path.isdir(_config.plugin_dir): os.chdir(_config.plugin_dir) for file in glob.glob("*.py"): plugin_module_name_template = "silverbp_mb_plugin_" + os.path.splitext(file)[0] + "_%d" for plugin_name_suffix in range(len(sys.modules)): plugin_module_name = plugin_module_name_template % plugin_name_suffix if plugin_module_name not in sys.modules: break with open(file, "r") as plugin_file: _plugin_modules.append(imp.load_module(plugin_module_name, plugin_file, file, ("py", "r", imp.PY_SOURCE))) for module in _plugin_modules: for module_attr in (getattr(module, name) for name in dir(module)): for plugin_type in _plugin_types: if not _is_plugin_type(module_attr, plugin_type): continue _loaded_plugin_definitions[module_attr.__name__] = module_attr _defined_commands = _config.commands _defined_commands['_prerun'] = PluginConfig('MBPreRunCommand', {}, _config) command_plugins = [k for (k, v) in _loaded_plugin_definitions.items() if _is_plugin_type(v, command.Command)] for (k, v) in _config.commands.items(): if v.name not in command_plugins: _log.warn('The following Command: {0} was not found and will not be available'.format(k)) del _defined_commands[k] _log.debug('The following commands will be available: {0}'.format([k for (k, v) in _defined_commands.items() if not k.startswith('_')])) def _load_plugin(plugin): if plugin.name in _plugin_instances.keys(): return _plugin_instances[plugin.name] plugin_definition = _loaded_plugin_definitions[plugin.name] arguments = [] # if the plugin doesn't have a constructor, there's nothing to inject if '__init__' in getattr(plugin_definition, '__dict__', None).keys(): for arg in inspect.getargspec(plugin_definition.__init__)[0][1:]: arguments.append(load_dependency(arg)) instance = plugin_definition(*arguments) available_properties = [x for x, y in inspect.getmembers(instance.__class__, lambda x: isinstance(x, property))] for (key, value) in plugin.config.items(): if key in available_properties: try: setattr(instance, key, value) except Exception as err: _log.warn('There was a problem setting the plugin config: \'{0}\' on \'{1}\' with \'{2}\'.'.format(plugin.name, key, value)) _log.debug('Exception occured while trying to set a plugin config value: {0}'.format(err)) else: _log.warn('The following plugin config: {0}, is not an option to set on {1}'.format(key, plugin.name)) _plugin_instances[plugin.name] = instance return instance def load_dependency(name): if name == 'config': return _config if name == 'process': return process return _load_plugin(getattr(_config, name)) def get_commands(): return [k for (k, v) in _defined_commands.items() if not k.startswith('_')] def load_command(name):<|fim▁hole|> plugin = _defined_commands[name] else: raise StandardError('The following command: {0} is not available'.format(name)) return _load_plugin(plugin)<|fim▁end|>
if name in _defined_commands.keys():
<|file_name|>less.js<|end_file_name|><|fim▁begin|>/** * Compiles LESS files into CSS. * * --------------------------------------------------------------- * * Only the `assets/styles/importer.less` is compiled. * This allows you to control the ordering yourself, i.e. import your * dependencies, mixins, variables, resets, etc. before other stylesheets) * * For usage docs see: * https://github.com/gruntjs/grunt-contrib-less */ module.exports = function(grunt) { grunt.config.set('less', { dev: { files: [/*{ expand: true, cwd: 'assets/styles/', src: ['importer.less'],<|fim▁hole|> dest: '.tmp/public/styles/', ext: '.css' },*/ { expand: true, cwd: 'assets/styles/dashboard/', src: ['dashboard.less'], dest: '.tmp/public/styles/dashboard/', ext: '.css' }] } }); grunt.loadNpmTasks('grunt-contrib-less'); };<|fim▁end|>
<|file_name|>regress-441477-01.js<|end_file_name|><|fim▁begin|>/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ var gTestfile = 'regress-441477-01.js'; //----------------------------------------------------------------------------- var BUGNUMBER = 441477-01; var summary = ''; var actual = 'No Exception'; var expect = 'No Exception'; //----------------------------------------------------------------------------- test(); //----------------------------------------------------------------------------- <|fim▁hole|>function test() { enterFunc ('test'); printBugNumber(BUGNUMBER); printStatus (summary); try { for (i = 0; i < 5;) { if (i > 5) throw "bad"; i++; continue; } } catch(ex) { actual = ex + ''; } reportCompare(expect, actual, summary); exitFunc ('test'); }<|fim▁end|>
<|file_name|>base.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import urllib2 from lxml import etree class Scraper(object): # subclass must override this property url = None def scrape(self): stream = self.get_stream() doc = self.get_document(stream) return self.do_scrape(doc) def get_document(self, stream): doc = etree.parse(stream, etree.HTMLParser(remove_comments=True))<|fim▁hole|> def get_stream(self): return urllib2.urlopen(self.url) def do_scrape(self, doc): raise NotImplementedError()<|fim▁end|>
return doc
<|file_name|>json_merge.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 TiKV Project Authors. Licensed under Apache-2.0. use super::{Json, JsonRef, JsonType}; use crate::codec::{Error, Result}; use std::collections::BTreeMap; impl Json { /// `merge` is the implementation for JSON_MERGE in mysql /// <https://dev.mysql.com/doc/refman/5.7/en/json-modification-functions.html#function_json-merge> /// /// The merge rules are listed as following: /// 1. adjacent arrays are merged to a single array; /// 2. adjacent object are merged to a single object; /// 3. a scalar value is autowrapped as an array before merge; /// 4. an adjacent array and object are merged by autowrapping the object as an array. /// /// See `MergeBinary()` in TiDB `json/binary_function.go` #[allow(clippy::comparison_chain)] pub fn merge(bjs: Vec<JsonRef>) -> Result<Json> { let mut result = vec![]; let mut objects = vec![]; for j in bjs { if j.get_type() != JsonType::Object { if objects.len() == 1 { let o = objects.pop().unwrap(); result.push(MergeUnit::Ref(o)); } else if objects.len() > 1 { // We have adjacent JSON objects, merge them into a single object result.push(MergeUnit::Owned(merge_binary_object(&mut objects)?)); } result.push(MergeUnit::Ref(j)); } else { objects.push(j); } } // Resolve the possibly remained objects if !objects.is_empty() { result.push(MergeUnit::Owned(merge_binary_object(&mut objects)?)); } if result.len() == 1 { return Ok(result.pop().unwrap().into_owned()); } merge_binary_array(&result) } } enum MergeUnit<'a> { Ref(JsonRef<'a>), Owned(Json), } impl<'a> MergeUnit<'a> { fn as_ref(&self) -> JsonRef<'_> { match self { MergeUnit::Ref(r) => *r, MergeUnit::Owned(o) => o.as_ref(), } } fn into_owned(self) -> Json { match self { MergeUnit::Ref(r) => r.to_owned(), MergeUnit::Owned(o) => o, } } } // See `mergeBinaryArray()` in TiDB `json/binary_function.go` fn merge_binary_array(elems: &[MergeUnit]) -> Result<Json> { let mut buf = vec![]; for j in elems.iter() { let j = j.as_ref(); if j.get_type() != JsonType::Array { buf.push(j) } else { let child_count = j.get_elem_count(); for i in 0..child_count { buf.push(j.array_get_elem(i)?); } } } Json::from_ref_array(buf) } // See `mergeBinaryObject()` in TiDB `json/binary_function.go` fn merge_binary_object(objects: &mut Vec<JsonRef>) -> Result<Json> { let mut kv_map: BTreeMap<String, Json> = BTreeMap::new(); for j in objects.drain(..) { let elem_count = j.get_elem_count(); for i in 0..elem_count { let key = j.object_get_key(i); let val = j.object_get_val(i)?; let key = String::from_utf8(key.to_owned()).map_err(Error::from)?; if let Some(old) = kv_map.remove(&key) { let new = Json::merge(vec![old.as_ref(), val])?; kv_map.insert(key, new); } else { kv_map.insert(key, val.to_owned()); } } } Json::from_object(kv_map) } #[cfg(test)] mod tests { use super::*; #[test] fn test_merge() { let test_cases = vec![ vec![r#"{"a": 1}"#, r#"{"b": 2}"#, r#"{"a": 1, "b": 2}"#], vec![r#"{"a": 1}"#, r#"{"a": 2}"#, r#"{"a": [1, 2]}"#], vec![r#"{"a": 1}"#, r#"{"a": [2, 3]}"#, r#"{"a": [1, 2, 3]}"#], vec![ r#"{"a": 1}"#, r#"{"a": {"b": [2, 3]}}"#, r#"{"a": [1, {"b": [2, 3]}]}"#, ], vec![ r#"{"a": {"b": [2, 3]}}"#, r#"{"a": 1}"#, r#"{"a": [{"b": [2, 3]}, 1]}"#, ], vec![ r#"{"a": [1, 2]}"#, r#"{"a": {"b": [3, 4]}}"#, r#"{"a": [1, 2, {"b": [3, 4]}]}"#, ], vec![ r#"{"b": {"c": 2}}"#, r#"{"a": 1, "b": {"d": 1}}"#, r#"{"a": 1, "b": {"c": 2, "d": 1}}"#, ], vec![r#"[1]"#, r#"[2]"#, r#"[1, 2]"#], vec![r#"{"a": 1}"#, r#"[1]"#, r#"[{"a": 1}, 1]"#], vec![r#"[1]"#, r#"{"a": 1}"#, r#"[1, {"a": 1}]"#], vec![r#"{"a": 1}"#, r#"4"#, r#"[{"a": 1}, 4]"#], vec![r#"[1]"#, r#"4"#, r#"[1, 4]"#], vec![r#"4"#, r#"{"a": 1}"#, r#"[4, {"a": 1}]"#],<|fim▁hole|> vec![ r#"{"a": 1, "b": {"c": 3, "d": 4}, "e": [5, 6]}"#, r#"{"c": 7, "b": {"a": 8, "c": 9}, "f": [1, 2]}"#, r#"{"d": 9, "b": {"b": 10, "c": 11}, "e": 8}"#, r#"{ "a": 1, "b": {"a": 8, "b": 10, "c": [3, 9, 11], "d": 4}, "c": 7, "d": 9, "e": [5, 6, 8], "f": [1, 2] }"#, ], ]; for case in test_cases { let (to_be_merged, expect) = case.split_at(case.len() - 1); let jsons = to_be_merged .iter() .map(|s| s.parse::<Json>().unwrap()) .collect::<Vec<Json>>(); let refs = jsons.iter().map(|j| j.as_ref()).collect::<Vec<_>>(); let res = Json::merge(refs).unwrap(); let expect: Json = expect[0].parse().unwrap(); assert_eq!(res, expect); } } }<|fim▁end|>
vec![r#"1"#, r#"[4]"#, r#"[1, 4]"#], vec![r#"4"#, r#"1"#, r#"[4, 1]"#], vec!["1", "2", "3", "[1, 2, 3]"], vec!["[1, 2]", "3", "[4, 5, 6]", "[1, 2, 3, 4, 5, 6]"],
<|file_name|>comp-4309.component.spec.ts<|end_file_name|><|fim▁begin|>/** * @license<|fim▁hole|> * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import { async, ComponentFixture, TestBed } from '@angular/core/testing'; import { Comp4309Component } from './comp-4309.component'; describe('Comp4309Component', () => { let component: Comp4309Component; let fixture: ComponentFixture<Comp4309Component>; beforeEach(async(() => { TestBed.configureTestingModule({ declarations: [ Comp4309Component ] }) .compileComponents(); })); beforeEach(() => { fixture = TestBed.createComponent(Comp4309Component); component = fixture.componentInstance; fixture.detectChanges(); }); it('should create', () => { expect(component).toBeTruthy(); }); });<|fim▁end|>
<|file_name|>base.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import class Newsletter(object): __all__ = ('is_enabled', 'get_subscriptions', 'update_subscription', 'create_or_update_subscription') DEFAULT_LIST_ID = 1 enabled = False def is_enabled(self): return self.enabled def get_subscriptions(self, user): return None <|fim▁hole|> kwargs['create'] = True return self.update_subscription(user, **kwargs)<|fim▁end|>
def update_subscription(self, user, **kwargs): return None def create_or_update_subscription(self, user, **kwargs):
<|file_name|>wizard_nginx.go<|end_file_name|><|fim▁begin|>// Copyright 2017 The go-ethereum Authors // This file is part of go-ethereum. // // go-ethereum is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // go-ethereum is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with go-ethereum. If not, see <http://www.gnu.org/licenses/>. package main<|fim▁hole|> "fmt" "github.com/energicryptocurrency/energi/log" ) // ensureVirtualHost checks whether a reverse-proxy is running on the specified // host machine, and if yes requests a virtual host from the user to host a // specific web service on. If no proxy exists, the method will offer to deploy // one. // // If the user elects not to use a reverse proxy, an empty hostname is returned! func (w *wizard) ensureVirtualHost(client *sshClient, port int, def string) (string, error) { proxy, _ := checkNginx(client, w.network) if proxy != nil { // Reverse proxy is running, if ports match, we need a virtual host if proxy.port == port { fmt.Println() fmt.Printf("Shared port, which domain to assign? (default = %s)\n", def) return w.readDefaultString(def), nil } } // Reverse proxy is not running, offer to deploy a new one fmt.Println() fmt.Println("Allow sharing the port with other services (y/n)? (default = yes)") if w.readDefaultYesNo(true) { nocache := false if proxy != nil { fmt.Println() fmt.Printf("Should the reverse-proxy be rebuilt from scratch (y/n)? (default = no)\n") nocache = w.readDefaultYesNo(false) } if out, err := deployNginx(client, w.network, port, nocache); err != nil { log.Error("Failed to deploy reverse-proxy", "err", err) if len(out) > 0 { fmt.Printf("%s\n", out) } return "", err } // Reverse proxy deployed, ask again for the virtual-host fmt.Println() fmt.Printf("Proxy deployed, which domain to assign? (default = %s)\n", def) return w.readDefaultString(def), nil } // Reverse proxy not requested, deploy as a standalone service return "", nil }<|fim▁end|>
import (
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Simple getopt alternative. //! //! Construct a vector of options, either by using `reqopt`, `optopt`, and `optflag` //! or by building them from components yourself, and pass them to `getopts`, //! along with a vector of actual arguments (not including `argv[0]`). You'll //! either get a failure code back, or a match. You'll have to verify whether //! the amount of 'free' arguments in the match is what you expect. Use `opt_*` //! accessors to get argument values out of the matches object. //! //! Single-character options are expected to appear on the command line with a //! single preceding dash; multiple-character options are expected to be //! proceeded by two dashes. Options that expect an argument accept their //! argument following either a space or an equals sign. Single-character //! options don't require the space. //! //! # Example //! //! The following example shows simple command line parsing for an application //! that requires an input file to be specified, accepts an optional output //! file name following `-o`, and accepts both `-h` and `--help` as optional flags. //! //! ~~~{.rust} //! extern crate getopts; //! use getopts::{optopt,optflag,getopts,OptGroup}; //! use std::os; //! //! fn do_work(inp: &str, out: Option<~str>) { //! println!("{}", inp); //! match out { //! Some(x) => println!("{}", x), //! None => println!("No Output"), //! } //! } //! //! fn print_usage(program: &str, _opts: &[OptGroup]) { //! println!("Usage: {} [options]", program); //! println!("-o\t\tOutput"); //! println!("-h --help\tUsage"); //! } //! //! fn main() { //! let args = os::args(); //! //! let program = args[0].clone(); //! //! let opts = [ //! optopt("o", "", "set output file name", "NAME"), //! optflag("h", "help", "print this help menu") //! ]; //! let matches = match getopts(args.tail(), opts) { //! Ok(m) => { m } //! Err(f) => { fail!(f.to_err_msg()) } //! }; //! if matches.opt_present("h") { //! print_usage(program, opts); //! return; //! } //! let output = matches.opt_str("o"); //! let input: &str = if !matches.free.is_empty() { //! (*matches.free.get(0)).clone() //! } else { //! print_usage(program, opts); //! return; //! }; //! do_work(input, output); //! } //! ~~~ #![crate_id = "getopts#0.11-pre"] #![crate_type = "rlib"] #![crate_type = "dylib"] #![license = "MIT/ASL2"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://static.rust-lang.org/doc/master")] #![feature(globs, phase)] #![deny(missing_doc)] #![deny(deprecated_owned_vector)] #[cfg(test)] #[phase(syntax, link)] extern crate log; use std::cmp::Eq; use std::result::{Err, Ok}; use std::result; use std::strbuf::StrBuf; /// Name of an option. Either a string or a single char. #[deriving(Clone, Eq)] pub enum Name { /// A string representing the long name of an option. /// For example: "help" Long(~str), /// A char representing the short name of an option. /// For example: 'h' Short(char), } /// Describes whether an option has an argument. #[deriving(Clone, Eq)] pub enum HasArg { /// The option requires an argument. Yes, /// The option is just a flag, therefore no argument. No, /// The option argument is optional and it could or not exist. Maybe, } /// Describes how often an option may occur. #[deriving(Clone, Eq)] pub enum Occur { /// The option occurs once. Req, /// The option could or not occur. Optional, /// The option occurs once or multiple times. Multi, } /// A description of a possible option. #[deriving(Clone, Eq)] pub struct Opt { /// Name of the option pub name: Name, /// Whether it has an argument pub hasarg: HasArg, /// How often it can occur pub occur: Occur, /// Which options it aliases pub aliases: Vec<Opt> , } /// One group of options, e.g., both -h and --help, along with /// their shared description and properties. #[deriving(Clone, Eq)] pub struct OptGroup { /// Short Name of the `OptGroup` pub short_name: ~str, /// Long Name of the `OptGroup` pub long_name: ~str, /// Hint pub hint: ~str, /// Description pub desc: ~str, /// Whether it has an argument pub hasarg: HasArg, /// How often it can occur pub occur: Occur } /// Describes wether an option is given at all or has a value. #[deriving(Clone, Eq)] enum Optval { Val(~str), Given, } /// The result of checking command line arguments. Contains a vector /// of matches and a vector of free strings. #[deriving(Clone, Eq)] pub struct Matches { /// Options that matched opts: Vec<Opt> , /// Values of the Options that matched vals: Vec<Vec<Optval> > , /// Free string fragments pub free: Vec<~str>, } /// The type returned when the command line does not conform to the /// expected format. Call the `to_err_msg` method to retrieve the /// error as a string. #[deriving(Clone, Eq, Show)] pub enum Fail_ { /// The option requires an argument but none was passed. ArgumentMissing(~str), /// The passed option is not declared among the possible options. UnrecognizedOption(~str), /// A required option is not present. OptionMissing(~str), /// A single occurence option is being used multiple times. OptionDuplicated(~str), /// There's an argument being passed to a non-argument option. UnexpectedArgument(~str), } /// The type of failure that occurred. #[deriving(Eq)] #[allow(missing_doc)] pub enum FailType { ArgumentMissing_, UnrecognizedOption_, OptionMissing_, OptionDuplicated_, UnexpectedArgument_, } /// The result of parsing a command line with a set of options. pub type Result = result::Result<Matches, Fail_>; impl Name { fn from_str(nm: &str) -> Name { if nm.len() == 1u { Short(nm.char_at(0u)) } else { Long(nm.to_owned()) } } fn to_str(&self) -> ~str { match *self { Short(ch) => ch.to_str(), Long(ref s) => s.to_owned() } } } impl OptGroup { /// Translate OptGroup into Opt. /// (Both short and long names correspond to different Opts). pub fn long_to_short(&self) -> Opt { let OptGroup { short_name: short_name, long_name: long_name, hasarg: hasarg, occur: occur, .. } = (*self).clone(); match (short_name.len(), long_name.len()) { (0,0) => fail!("this long-format option was given no name"), (0,_) => Opt { name: Long((long_name)), hasarg: hasarg, occur: occur, aliases: Vec::new() }, (1,0) => Opt { name: Short(short_name.char_at(0)), hasarg: hasarg, occur: occur, aliases: Vec::new() }, (1,_) => Opt { name: Long((long_name)), hasarg: hasarg, occur: occur, aliases: vec!( Opt { name: Short(short_name.char_at(0)), hasarg: hasarg, occur: occur, aliases: Vec::new() } ) }, (_,_) => fail!("something is wrong with the long-form opt") } } } impl Matches { fn opt_vals(&self, nm: &str) -> Vec<Optval> { match find_opt(self.opts.as_slice(), Name::from_str(nm)) { Some(id) => (*self.vals.get(id)).clone(), None => fail!("No option '{}' defined", nm) } } fn opt_val(&self, nm: &str) -> Option<Optval> { let vals = self.opt_vals(nm); if vals.is_empty() { None } else { Some((*vals.get(0)).clone()) } } /// Returns true if an option was matched. pub fn opt_present(&self, nm: &str) -> bool { !self.opt_vals(nm).is_empty() } /// Returns the number of times an option was matched. pub fn opt_count(&self, nm: &str) -> uint { self.opt_vals(nm).len() } /// Returns true if any of several options were matched. pub fn opts_present(&self, names: &[~str]) -> bool { for nm in names.iter() { match find_opt(self.opts.as_slice(), Name::from_str(*nm)) { Some(id) if !self.vals.get(id).is_empty() => return true, _ => (), }; } false } /// Returns the string argument supplied to one of several matching options or `None`. pub fn opts_str(&self, names: &[~str]) -> Option<~str> { for nm in names.iter() { match self.opt_val(*nm) { Some(Val(ref s)) => return Some(s.clone()), _ => () } } None } /// Returns a vector of the arguments provided to all matches of the given /// option. /// /// Used when an option accepts multiple values. pub fn opt_strs(&self, nm: &str) -> Vec<~str> { let mut acc: Vec<~str> = Vec::new(); let r = self.opt_vals(nm); for v in r.iter() { match *v { Val(ref s) => acc.push((*s).clone()), _ => () } } acc } /// Returns the string argument supplied to a matching option or `None`. pub fn opt_str(&self, nm: &str) -> Option<~str> { let vals = self.opt_vals(nm); if vals.is_empty() { return None::<~str>; } match vals.get(0) { &Val(ref s) => Some((*s).clone()), _ => None } } /// Returns the matching string, a default, or none. /// /// Returns none if the option was not present, `def` if the option was /// present but no argument was provided, and the argument if the option was /// present and an argument was provided. pub fn opt_default(&self, nm: &str, def: &str) -> Option<~str> { let vals = self.opt_vals(nm); if vals.is_empty() { return None; } match vals.get(0) { &Val(ref s) => Some((*s).clone()), _ => Some(def.to_owned()) } } } fn is_arg(arg: &str) -> bool { arg.len() > 1 && arg[0] == '-' as u8 } fn find_opt(opts: &[Opt], nm: Name) -> Option<uint> { // Search main options. let pos = opts.iter().position(|opt| opt.name == nm); if pos.is_some() { return pos } // Search in aliases. for candidate in opts.iter() { if candidate.aliases.iter().position(|opt| opt.name == nm).is_some() { return opts.iter().position(|opt| opt.name == candidate.name); } } None } /// Create a long option that is required and takes an argument. pub fn reqopt(short_name: &str, long_name: &str, desc: &str, hint: &str) -> OptGroup { let len = short_name.len(); assert!(len == 1 || len == 0); OptGroup { short_name: short_name.to_owned(), long_name: long_name.to_owned(), hint: hint.to_owned(), desc: desc.to_owned(), hasarg: Yes, occur: Req } } /// Create a long option that is optional and takes an argument. pub fn optopt(short_name: &str, long_name: &str, desc: &str, hint: &str) -> OptGroup { let len = short_name.len(); assert!(len == 1 || len == 0); OptGroup { short_name: short_name.to_owned(), long_name: long_name.to_owned(), hint: hint.to_owned(), desc: desc.to_owned(), hasarg: Yes, occur: Optional } } /// Create a long option that is optional and does not take an argument. pub fn optflag(short_name: &str, long_name: &str, desc: &str) -> OptGroup { let len = short_name.len(); assert!(len == 1 || len == 0); OptGroup { short_name: short_name.to_owned(), long_name: long_name.to_owned(), hint: ~"", desc: desc.to_owned(), hasarg: No, occur: Optional } } /// Create a long option that can occur more than once and does not /// take an argument. pub fn optflagmulti(short_name: &str, long_name: &str, desc: &str) -> OptGroup { let len = short_name.len(); assert!(len == 1 || len == 0); OptGroup { short_name: short_name.to_owned(), long_name: long_name.to_owned(), hint: ~"", desc: desc.to_owned(), hasarg: No, occur: Multi } } /// Create a long option that is optional and takes an optional argument. pub fn optflagopt(short_name: &str, long_name: &str, desc: &str, hint: &str) -> OptGroup { let len = short_name.len(); assert!(len == 1 || len == 0); OptGroup { short_name: short_name.to_owned(), long_name: long_name.to_owned(), hint: hint.to_owned(), desc: desc.to_owned(), hasarg: Maybe, occur: Optional } } /// Create a long option that is optional, takes an argument, and may occur /// multiple times. pub fn optmulti(short_name: &str, long_name: &str, desc: &str, hint: &str) -> OptGroup { let len = short_name.len(); assert!(len == 1 || len == 0); OptGroup { short_name: short_name.to_owned(), long_name: long_name.to_owned(), hint: hint.to_owned(), desc: desc.to_owned(), hasarg: Yes, occur: Multi } } /// Create a generic option group, stating all parameters explicitly pub fn opt(short_name: &str, long_name: &str, desc: &str, hint: &str, hasarg: HasArg, occur: Occur) -> OptGroup { let len = short_name.len(); assert!(len == 1 || len == 0); OptGroup { short_name: short_name.to_owned(), long_name: long_name.to_owned(), hint: hint.to_owned(), desc: desc.to_owned(), hasarg: hasarg, occur: occur } } impl Fail_ { /// Convert a `Fail_` enum into an error string. pub fn to_err_msg(self) -> ~str { match self { ArgumentMissing(ref nm) => { format!("Argument to option '{}' missing.", *nm) } UnrecognizedOption(ref nm) => { format!("Unrecognized option: '{}'.", *nm) } OptionMissing(ref nm) => { format!("Required option '{}' missing.", *nm) } OptionDuplicated(ref nm) => { format!("Option '{}' given more than once.", *nm) } UnexpectedArgument(ref nm) => { format!("Option '{}' does not take an argument.", *nm) } } } } /// Parse command line arguments according to the provided options. /// /// On success returns `Ok(Opt)`. Use methods such as `opt_present` /// `opt_str`, etc. to interrogate results. Returns `Err(Fail_)` on failure. /// Use `to_err_msg` to get an error message. pub fn getopts(args: &[~str], optgrps: &[OptGroup]) -> Result { let opts: Vec<Opt> = optgrps.iter().map(|x| x.long_to_short()).collect(); let n_opts = opts.len(); fn f(_x: uint) -> Vec<Optval> { return Vec::new(); } let mut vals = Vec::from_fn(n_opts, f); let mut free: Vec<~str> = Vec::new(); let l = args.len(); let mut i = 0; while i < l { let cur = args[i].clone(); let curlen = cur.len(); if !is_arg(cur) { free.push(cur); } else if cur == ~"--" { let mut j = i + 1; while j < l { free.push(args[j].clone()); j += 1; } break; } else { let mut names; let mut i_arg = None; if cur[1] == '-' as u8 { let tail = cur.slice(2, curlen); let tail_eq: Vec<&str> = tail.split('=').collect(); if tail_eq.len() <= 1 { names = vec!(Long(tail.to_owned())); } else { names = vec!(Long((*tail_eq.get(0)).to_owned())); i_arg = Some((*tail_eq.get(1)).to_owned()); } } else { let mut j = 1; let mut last_valid_opt_id = None; names = Vec::new(); while j < curlen { let range = cur.char_range_at(j); let opt = Short(range.ch); /* In a series of potential options (eg. -aheJ), if we see one which takes an argument, we assume all subsequent characters make up the argument. This allows options such as -L/usr/local/lib/foo to be interpreted correctly */ match find_opt(opts.as_slice(), opt.clone()) { Some(id) => last_valid_opt_id = Some(id), None => { let arg_follows = last_valid_opt_id.is_some() && match opts.get(last_valid_opt_id.unwrap()) .hasarg { Yes | Maybe => true, No => false }; if arg_follows && j < curlen { i_arg = Some(cur.slice(j, curlen).to_owned()); break; } else { last_valid_opt_id = None; } } } names.push(opt); j = range.next; } } let mut name_pos = 0; for nm in names.iter() { name_pos += 1; let optid = match find_opt(opts.as_slice(), (*nm).clone()) { Some(id) => id, None => return Err(UnrecognizedOption(nm.to_str())) }; match opts.get(optid).hasarg { No => { if !i_arg.is_none() { return Err(UnexpectedArgument(nm.to_str())); } vals.get_mut(optid).push(Given); } Maybe => { if !i_arg.is_none() { vals.get_mut(optid) .push(Val((i_arg.clone()) .unwrap())); } else if name_pos < names.len() || i + 1 == l || is_arg(args[i + 1]) { vals.get_mut(optid).push(Given); } else { i += 1; vals.get_mut(optid).push(Val(args[i].clone())); } } Yes => { if !i_arg.is_none() { vals.get_mut(optid).push(Val(i_arg.clone().unwrap())); } else if i + 1 == l { return Err(ArgumentMissing(nm.to_str())); } else { i += 1; vals.get_mut(optid).push(Val(args[i].clone())); } } } } } i += 1; } i = 0u; while i < n_opts { let n = vals.get(i).len(); let occ = opts.get(i).occur; if occ == Req { if n == 0 { return Err(OptionMissing(opts.get(i).name.to_str())); } } if occ != Multi { if n > 1 { return Err(OptionDuplicated(opts.get(i).name.to_str())); } } i += 1; } Ok(Matches { opts: opts, vals: vals, free: free }) } /// Derive a usage message from a set of long options. pub fn usage(brief: &str, opts: &[OptGroup]) -> ~str { let desc_sep = "\n" + " ".repeat(24); let mut rows = opts.iter().map(|optref| { let OptGroup{short_name: short_name, long_name: long_name, hint: hint, desc: desc, hasarg: hasarg, ..} = (*optref).clone(); let mut row = StrBuf::from_owned_str(" ".repeat(4)); // short option match short_name.len() { 0 => {} 1 => { row.push_char('-'); row.push_str(short_name); row.push_char(' '); } _ => fail!("the short name should only be 1 ascii char long"), } // long option match long_name.len() { 0 => {} _ => { row.push_str("--"); row.push_str(long_name); row.push_char(' '); } } // arg match hasarg { No => {} Yes => row.push_str(hint), Maybe => { row.push_char('['); row.push_str(hint); row.push_char(']'); } } // FIXME: #5516 should be graphemes not codepoints // here we just need to indent the start of the description let rowlen = row.as_slice().char_len(); if rowlen < 24 { for _ in range(0, 24 - rowlen) { row.push_char(' '); } } else { row.push_str(desc_sep) } // Normalize desc to contain words separated by one space character let mut desc_normalized_whitespace = StrBuf::new(); for word in desc.words() { desc_normalized_whitespace.push_str(word); desc_normalized_whitespace.push_char(' '); } // FIXME: #5516 should be graphemes not codepoints let mut desc_rows = Vec::new(); each_split_within(desc_normalized_whitespace.as_slice(), 54, |substr| { desc_rows.push(substr.to_owned()); true }); // FIXME: #5516 should be graphemes not codepoints // wrapped description row.push_str(desc_rows.connect(desc_sep)); row.into_owned() }); format!("{}\n\nOptions:\n{}\n", brief, rows.collect::<Vec<~str> >().connect("\n")) } fn format_option(opt: &OptGroup) -> ~str { let mut line = StrBuf::new(); if opt.occur != Req { line.push_char('['); } // Use short_name is possible, but fallback to long_name. if opt.short_name.len() > 0 { line.push_char('-'); line.push_str(opt.short_name); } else { line.push_str("--"); line.push_str(opt.long_name); } if opt.hasarg != No { line.push_char(' '); if opt.hasarg == Maybe { line.push_char('['); } line.push_str(opt.hint); if opt.hasarg == Maybe { line.push_char(']'); } } if opt.occur != Req { line.push_char(']'); } if opt.occur == Multi { line.push_str(".."); } line.into_owned() } /// Derive a short one-line usage summary from a set of long options. pub fn short_usage(program_name: &str, opts: &[OptGroup]) -> ~str { let mut line = StrBuf::from_str("Usage: " + program_name + " "); line.push_str(opts.iter().map(format_option).collect::<Vec<~str>>().connect(" ")); line.into_owned() } /// Splits a string into substrings with possibly internal whitespace, /// each of them at most `lim` bytes long. The substrings have leading and trailing /// whitespace removed, and are only cut at whitespace boundaries. /// /// Note: Function was moved here from `std::str` because this module is the only place that /// uses it, and because it was to specific for a general string function. /// /// #Failure: /// /// Fails during iteration if the string contains a non-whitespace /// sequence longer than the limit. fn each_split_within<'a>(ss: &'a str, lim: uint, it: |&'a str| -> bool) -> bool { // Just for fun, let's write this as a state machine: enum SplitWithinState { A, // leading whitespace, initial state B, // words C, // internal and trailing whitespace }<|fim▁hole|> enum Whitespace { Ws, // current char is whitespace Cr // current char is not whitespace } enum LengthLimit { UnderLim, // current char makes current substring still fit in limit OverLim // current char makes current substring no longer fit in limit } let mut slice_start = 0; let mut last_start = 0; let mut last_end = 0; let mut state = A; let mut fake_i = ss.len(); let mut lim = lim; let mut cont = true; // if the limit is larger than the string, lower it to save cycles if lim >= fake_i { lim = fake_i; } let machine: |&mut bool, (uint, char)| -> bool = |cont, (i, c)| { let whitespace = if ::std::char::is_whitespace(c) { Ws } else { Cr }; let limit = if (i - slice_start + 1) <= lim { UnderLim } else { OverLim }; state = match (state, whitespace, limit) { (A, Ws, _) => { A } (A, Cr, _) => { slice_start = i; last_start = i; B } (B, Cr, UnderLim) => { B } (B, Cr, OverLim) if (i - last_start + 1) > lim => fail!("word starting with {} longer than limit!", ss.slice(last_start, i + 1)), (B, Cr, OverLim) => { *cont = it(ss.slice(slice_start, last_end)); slice_start = last_start; B } (B, Ws, UnderLim) => { last_end = i; C } (B, Ws, OverLim) => { last_end = i; *cont = it(ss.slice(slice_start, last_end)); A } (C, Cr, UnderLim) => { last_start = i; B } (C, Cr, OverLim) => { *cont = it(ss.slice(slice_start, last_end)); slice_start = i; last_start = i; last_end = i; B } (C, Ws, OverLim) => { *cont = it(ss.slice(slice_start, last_end)); A } (C, Ws, UnderLim) => { C } }; *cont }; ss.char_indices().advance(|x| machine(&mut cont, x)); // Let the automaton 'run out' by supplying trailing whitespace while cont && match state { B | C => true, A => false } { machine(&mut cont, (fake_i, ' ')); fake_i += 1; } return cont; } #[test] fn test_split_within() { fn t(s: &str, i: uint, u: &[~str]) { let mut v = Vec::new(); each_split_within(s, i, |s| { v.push(s.to_owned()); true }); assert!(v.iter().zip(u.iter()).all(|(a,b)| a == b)); } t("", 0, []); t("", 15, []); t("hello", 15, [~"hello"]); t("\nMary had a little lamb\nLittle lamb\n", 15, [~"Mary had a", ~"little lamb", ~"Little lamb"]); t("\nMary had a little lamb\nLittle lamb\n", ::std::uint::MAX, [~"Mary had a little lamb\nLittle lamb"]); } #[cfg(test)] mod tests { use super::*; use std::result::{Err, Ok}; use std::result; fn check_fail_type(f: Fail_, ft: FailType) { match f { ArgumentMissing(_) => assert!(ft == ArgumentMissing_), UnrecognizedOption(_) => assert!(ft == UnrecognizedOption_), OptionMissing(_) => assert!(ft == OptionMissing_), OptionDuplicated(_) => assert!(ft == OptionDuplicated_), UnexpectedArgument(_) => assert!(ft == UnexpectedArgument_) } } // Tests for reqopt #[test] fn test_reqopt() { let long_args = vec!(~"--test=20"); let opts = vec!(reqopt("t", "test", "testing", "TEST")); let rs = getopts(long_args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { assert!(m.opt_present("test")); assert_eq!(m.opt_str("test").unwrap(), ~"20"); assert!(m.opt_present("t")); assert_eq!(m.opt_str("t").unwrap(), ~"20"); } _ => { fail!("test_reqopt failed (long arg)"); } } let short_args = vec!(~"-t", ~"20"); match getopts(short_args.as_slice(), opts.as_slice()) { Ok(ref m) => { assert!((m.opt_present("test"))); assert_eq!(m.opt_str("test").unwrap(), ~"20"); assert!((m.opt_present("t"))); assert_eq!(m.opt_str("t").unwrap(), ~"20"); } _ => { fail!("test_reqopt failed (short arg)"); } } } #[test] fn test_reqopt_missing() { let args = vec!(~"blah"); let opts = vec!(reqopt("t", "test", "testing", "TEST")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Err(f) => check_fail_type(f, OptionMissing_), _ => fail!() } } #[test] fn test_reqopt_no_arg() { let long_args = vec!(~"--test"); let opts = vec!(reqopt("t", "test", "testing", "TEST")); let rs = getopts(long_args.as_slice(), opts.as_slice()); match rs { Err(f) => check_fail_type(f, ArgumentMissing_), _ => fail!() } let short_args = vec!(~"-t"); match getopts(short_args.as_slice(), opts.as_slice()) { Err(f) => check_fail_type(f, ArgumentMissing_), _ => fail!() } } #[test] fn test_reqopt_multi() { let args = vec!(~"--test=20", ~"-t", ~"30"); let opts = vec!(reqopt("t", "test", "testing", "TEST")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Err(f) => check_fail_type(f, OptionDuplicated_), _ => fail!() } } // Tests for optopt #[test] fn test_optopt() { let long_args = vec!(~"--test=20"); let opts = vec!(optopt("t", "test", "testing", "TEST")); let rs = getopts(long_args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { assert!(m.opt_present("test")); assert_eq!(m.opt_str("test").unwrap(), ~"20"); assert!((m.opt_present("t"))); assert_eq!(m.opt_str("t").unwrap(), ~"20"); } _ => fail!() } let short_args = vec!(~"-t", ~"20"); match getopts(short_args.as_slice(), opts.as_slice()) { Ok(ref m) => { assert!((m.opt_present("test"))); assert_eq!(m.opt_str("test").unwrap(), ~"20"); assert!((m.opt_present("t"))); assert_eq!(m.opt_str("t").unwrap(), ~"20"); } _ => fail!() } } #[test] fn test_optopt_missing() { let args = vec!(~"blah"); let opts = vec!(optopt("t", "test", "testing", "TEST")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { assert!(!m.opt_present("test")); assert!(!m.opt_present("t")); } _ => fail!() } } #[test] fn test_optopt_no_arg() { let long_args = vec!(~"--test"); let opts = vec!(optopt("t", "test", "testing", "TEST")); let rs = getopts(long_args.as_slice(), opts.as_slice()); match rs { Err(f) => check_fail_type(f, ArgumentMissing_), _ => fail!() } let short_args = vec!(~"-t"); match getopts(short_args.as_slice(), opts.as_slice()) { Err(f) => check_fail_type(f, ArgumentMissing_), _ => fail!() } } #[test] fn test_optopt_multi() { let args = vec!(~"--test=20", ~"-t", ~"30"); let opts = vec!(optopt("t", "test", "testing", "TEST")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Err(f) => check_fail_type(f, OptionDuplicated_), _ => fail!() } } // Tests for optflag #[test] fn test_optflag() { let long_args = vec!(~"--test"); let opts = vec!(optflag("t", "test", "testing")); let rs = getopts(long_args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { assert!(m.opt_present("test")); assert!(m.opt_present("t")); } _ => fail!() } let short_args = vec!(~"-t"); match getopts(short_args.as_slice(), opts.as_slice()) { Ok(ref m) => { assert!(m.opt_present("test")); assert!(m.opt_present("t")); } _ => fail!() } } #[test] fn test_optflag_missing() { let args = vec!(~"blah"); let opts = vec!(optflag("t", "test", "testing")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { assert!(!m.opt_present("test")); assert!(!m.opt_present("t")); } _ => fail!() } } #[test] fn test_optflag_long_arg() { let args = vec!(~"--test=20"); let opts = vec!(optflag("t", "test", "testing")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Err(f) => { error!("{:?}", f.clone().to_err_msg()); check_fail_type(f, UnexpectedArgument_); } _ => fail!() } } #[test] fn test_optflag_multi() { let args = vec!(~"--test", ~"-t"); let opts = vec!(optflag("t", "test", "testing")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Err(f) => check_fail_type(f, OptionDuplicated_), _ => fail!() } } #[test] fn test_optflag_short_arg() { let args = vec!(~"-t", ~"20"); let opts = vec!(optflag("t", "test", "testing")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { // The next variable after the flag is just a free argument assert!(*m.free.get(0) == ~"20"); } _ => fail!() } } // Tests for optflagmulti #[test] fn test_optflagmulti_short1() { let args = vec!(~"-v"); let opts = vec!(optflagmulti("v", "verbose", "verbosity")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { assert_eq!(m.opt_count("v"), 1); } _ => fail!() } } #[test] fn test_optflagmulti_short2a() { let args = vec!(~"-v", ~"-v"); let opts = vec!(optflagmulti("v", "verbose", "verbosity")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { assert_eq!(m.opt_count("v"), 2); } _ => fail!() } } #[test] fn test_optflagmulti_short2b() { let args = vec!(~"-vv"); let opts = vec!(optflagmulti("v", "verbose", "verbosity")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { assert_eq!(m.opt_count("v"), 2); } _ => fail!() } } #[test] fn test_optflagmulti_long1() { let args = vec!(~"--verbose"); let opts = vec!(optflagmulti("v", "verbose", "verbosity")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { assert_eq!(m.opt_count("verbose"), 1); } _ => fail!() } } #[test] fn test_optflagmulti_long2() { let args = vec!(~"--verbose", ~"--verbose"); let opts = vec!(optflagmulti("v", "verbose", "verbosity")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { assert_eq!(m.opt_count("verbose"), 2); } _ => fail!() } } #[test] fn test_optflagmulti_mix() { let args = vec!(~"--verbose", ~"-v", ~"-vv", ~"verbose"); let opts = vec!(optflagmulti("v", "verbose", "verbosity")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { assert_eq!(m.opt_count("verbose"), 4); assert_eq!(m.opt_count("v"), 4); } _ => fail!() } } // Tests for optmulti #[test] fn test_optmulti() { let long_args = vec!(~"--test=20"); let opts = vec!(optmulti("t", "test", "testing", "TEST")); let rs = getopts(long_args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { assert!((m.opt_present("test"))); assert_eq!(m.opt_str("test").unwrap(), ~"20"); assert!((m.opt_present("t"))); assert_eq!(m.opt_str("t").unwrap(), ~"20"); } _ => fail!() } let short_args = vec!(~"-t", ~"20"); match getopts(short_args.as_slice(), opts.as_slice()) { Ok(ref m) => { assert!((m.opt_present("test"))); assert_eq!(m.opt_str("test").unwrap(), ~"20"); assert!((m.opt_present("t"))); assert_eq!(m.opt_str("t").unwrap(), ~"20"); } _ => fail!() } } #[test] fn test_optmulti_missing() { let args = vec!(~"blah"); let opts = vec!(optmulti("t", "test", "testing", "TEST")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { assert!(!m.opt_present("test")); assert!(!m.opt_present("t")); } _ => fail!() } } #[test] fn test_optmulti_no_arg() { let long_args = vec!(~"--test"); let opts = vec!(optmulti("t", "test", "testing", "TEST")); let rs = getopts(long_args.as_slice(), opts.as_slice()); match rs { Err(f) => check_fail_type(f, ArgumentMissing_), _ => fail!() } let short_args = vec!(~"-t"); match getopts(short_args.as_slice(), opts.as_slice()) { Err(f) => check_fail_type(f, ArgumentMissing_), _ => fail!() } } #[test] fn test_optmulti_multi() { let args = vec!(~"--test=20", ~"-t", ~"30"); let opts = vec!(optmulti("t", "test", "testing", "TEST")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { assert!(m.opt_present("test")); assert_eq!(m.opt_str("test").unwrap(), ~"20"); assert!(m.opt_present("t")); assert_eq!(m.opt_str("t").unwrap(), ~"20"); let pair = m.opt_strs("test"); assert!(*pair.get(0) == ~"20"); assert!(*pair.get(1) == ~"30"); } _ => fail!() } } #[test] fn test_unrecognized_option() { let long_args = vec!(~"--untest"); let opts = vec!(optmulti("t", "test", "testing", "TEST")); let rs = getopts(long_args.as_slice(), opts.as_slice()); match rs { Err(f) => check_fail_type(f, UnrecognizedOption_), _ => fail!() } let short_args = vec!(~"-u"); match getopts(short_args.as_slice(), opts.as_slice()) { Err(f) => check_fail_type(f, UnrecognizedOption_), _ => fail!() } } #[test] fn test_combined() { let args = vec!(~"prog", ~"free1", ~"-s", ~"20", ~"free2", ~"--flag", ~"--long=30", ~"-f", ~"-m", ~"40", ~"-m", ~"50", ~"-n", ~"-A B", ~"-n", ~"-60 70"); let opts = vec!(optopt("s", "something", "something", "SOMETHING"), optflag("", "flag", "a flag"), reqopt("", "long", "hi", "LONG"), optflag("f", "", "another flag"), optmulti("m", "", "mmmmmm", "YUM"), optmulti("n", "", "nothing", "NOTHING"), optopt("", "notpresent", "nothing to see here", "NOPE")); let rs = getopts(args.as_slice(), opts.as_slice()); match rs { Ok(ref m) => { assert!(*m.free.get(0) == ~"prog"); assert!(*m.free.get(1) == ~"free1"); assert_eq!(m.opt_str("s").unwrap(), ~"20"); assert!(*m.free.get(2) == ~"free2"); assert!((m.opt_present("flag"))); assert_eq!(m.opt_str("long").unwrap(), ~"30"); assert!((m.opt_present("f"))); let pair = m.opt_strs("m"); assert!(*pair.get(0) == ~"40"); assert!(*pair.get(1) == ~"50"); let pair = m.opt_strs("n"); assert!(*pair.get(0) == ~"-A B"); assert!(*pair.get(1) == ~"-60 70"); assert!((!m.opt_present("notpresent"))); } _ => fail!() } } #[test] fn test_multi() { let opts = vec!(optopt("e", "", "encrypt", "ENCRYPT"), optopt("", "encrypt", "encrypt", "ENCRYPT"), optopt("f", "", "flag", "FLAG")); let args_single = vec!(~"-e", ~"foo"); let matches_single = &match getopts(args_single.as_slice(), opts.as_slice()) { result::Ok(m) => m, result::Err(_) => fail!() }; assert!(matches_single.opts_present([~"e"])); assert!(matches_single.opts_present([~"encrypt", ~"e"])); assert!(matches_single.opts_present([~"e", ~"encrypt"])); assert!(!matches_single.opts_present([~"encrypt"])); assert!(!matches_single.opts_present([~"thing"])); assert!(!matches_single.opts_present([])); assert_eq!(matches_single.opts_str([~"e"]).unwrap(), ~"foo"); assert_eq!(matches_single.opts_str([~"e", ~"encrypt"]).unwrap(), ~"foo"); assert_eq!(matches_single.opts_str([~"encrypt", ~"e"]).unwrap(), ~"foo"); let args_both = vec!(~"-e", ~"foo", ~"--encrypt", ~"foo"); let matches_both = &match getopts(args_both.as_slice(), opts.as_slice()) { result::Ok(m) => m, result::Err(_) => fail!() }; assert!(matches_both.opts_present([~"e"])); assert!(matches_both.opts_present([~"encrypt"])); assert!(matches_both.opts_present([~"encrypt", ~"e"])); assert!(matches_both.opts_present([~"e", ~"encrypt"])); assert!(!matches_both.opts_present([~"f"])); assert!(!matches_both.opts_present([~"thing"])); assert!(!matches_both.opts_present([])); assert_eq!(matches_both.opts_str([~"e"]).unwrap(), ~"foo"); assert_eq!(matches_both.opts_str([~"encrypt"]).unwrap(), ~"foo"); assert_eq!(matches_both.opts_str([~"e", ~"encrypt"]).unwrap(), ~"foo"); assert_eq!(matches_both.opts_str([~"encrypt", ~"e"]).unwrap(), ~"foo"); } #[test] fn test_nospace() { let args = vec!(~"-Lfoo", ~"-M."); let opts = vec!(optmulti("L", "", "library directory", "LIB"), optmulti("M", "", "something", "MMMM")); let matches = &match getopts(args.as_slice(), opts.as_slice()) { result::Ok(m) => m, result::Err(_) => fail!() }; assert!(matches.opts_present([~"L"])); assert_eq!(matches.opts_str([~"L"]).unwrap(), ~"foo"); assert!(matches.opts_present([~"M"])); assert_eq!(matches.opts_str([~"M"]).unwrap(), ~"."); } #[test] fn test_long_to_short() { let mut short = Opt { name: Long(~"banana"), hasarg: Yes, occur: Req, aliases: Vec::new(), }; short.aliases = vec!(Opt { name: Short('b'), hasarg: Yes, occur: Req, aliases: Vec::new() }); let verbose = reqopt("b", "banana", "some bananas", "VAL"); assert!(verbose.long_to_short() == short); } #[test] fn test_aliases_long_and_short() { let opts = vec!( optflagmulti("a", "apple", "Desc")); let args = vec!(~"-a", ~"--apple", ~"-a"); let matches = getopts(args.as_slice(), opts.as_slice()).unwrap(); assert_eq!(3, matches.opt_count("a")); assert_eq!(3, matches.opt_count("apple")); } #[test] fn test_usage() { let optgroups = vec!( reqopt("b", "banana", "Desc", "VAL"), optopt("a", "012345678901234567890123456789", "Desc", "VAL"), optflag("k", "kiwi", "Desc"), optflagopt("p", "", "Desc", "VAL"), optmulti("l", "", "Desc", "VAL")); let expected = ~"Usage: fruits Options: -b --banana VAL Desc -a --012345678901234567890123456789 VAL Desc -k --kiwi Desc -p [VAL] Desc -l VAL Desc "; let generated_usage = usage("Usage: fruits", optgroups.as_slice()); debug!("expected: <<{}>>", expected); debug!("generated: <<{}>>", generated_usage); assert_eq!(generated_usage, expected); } #[test] fn test_usage_description_wrapping() { // indentation should be 24 spaces // lines wrap after 78: or rather descriptions wrap after 54 let optgroups = vec!( optflag("k", "kiwi", "This is a long description which won't be wrapped..+.."), // 54 optflag("a", "apple", "This is a long description which _will_ be wrapped..+..")); let expected = ~"Usage: fruits Options: -k --kiwi This is a long description which won't be wrapped..+.. -a --apple This is a long description which _will_ be wrapped..+.. "; let usage = usage("Usage: fruits", optgroups.as_slice()); debug!("expected: <<{}>>", expected); debug!("generated: <<{}>>", usage); assert!(usage == expected) } #[test] fn test_usage_description_multibyte_handling() { let optgroups = vec!( optflag("k", "k\u2013w\u2013", "The word kiwi is normally spelled with two i's"), optflag("a", "apple", "This \u201Cdescription\u201D has some characters that could \ confuse the line wrapping; an apple costs 0.51€ in some parts of Europe.")); let expected = ~"Usage: fruits Options: -k --k–w– The word kiwi is normally spelled with two i's -a --apple This “description” has some characters that could confuse the line wrapping; an apple costs 0.51€ in some parts of Europe. "; let usage = usage("Usage: fruits", optgroups.as_slice()); debug!("expected: <<{}>>", expected); debug!("generated: <<{}>>", usage); assert!(usage == expected) } #[test] fn test_short_usage() { let optgroups = vec!( reqopt("b", "banana", "Desc", "VAL"), optopt("a", "012345678901234567890123456789", "Desc", "VAL"), optflag("k", "kiwi", "Desc"), optflagopt("p", "", "Desc", "VAL"), optmulti("l", "", "Desc", "VAL")); let expected = ~"Usage: fruits -b VAL [-a VAL] [-k] [-p [VAL]] [-l VAL].."; let generated_usage = short_usage("fruits", optgroups.as_slice()); debug!("expected: <<{}>>", expected); debug!("generated: <<{}>>", generated_usage); assert_eq!(generated_usage, expected); } }<|fim▁end|>
<|file_name|>csv_table.py<|end_file_name|><|fim▁begin|># -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*- # vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 # # RecSQL -- a simple mash-up of sqlite and numpy.recsql # Copyright (C) 2007-2016 Oliver Beckstein <[email protected]> # Released under the GNU Public License, version 3 or higher (your choice) """ :mod:`recsql.csv_table` --- Parse a simple CSV table ==================================================== Turn a CSV table into a numpy array. Uses :mod:`csv` (requires python 2.6 or better). .. autoclass:: Table2array :members: __init__, recarray .. autofunction:: make_python_name """ from __future__ import with_statement, absolute_import # notes on csv (from http://farmdev.com/talks/unicode/) # encode temp. to utf-8 # s_bytes = s_uni.encode('utf-8') # do stuff # s_bytes.decode('utf-8') try: # needs python >= 2.6 import csv except ImportError: import warnings warnings.warn("csv module not available (needs python >=2.6)", category=ImportWarning) # ... just go ahead and fail later miserably ... import numpy import re from .convert import Autoconverter # from the csv examples: http://docs.python.org/library/csv.html#csv-examples import codecs class UTF8Recoder(object): """ Iterator that reads an encoded stream and reencodes the input to UTF-8<|fim▁hole|> """ def __init__(self, f, encoding): self.reader = codecs.getreader(encoding)(f) def __iter__(self): return self def next(self): return self.reader.next().encode("utf-8") class UnicodeReader(object): """ A CSV reader which will iterate over lines in the CSV file "f", which is encoded in the given encoding. """ def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds): f = UTF8Recoder(f, encoding) self.reader = csv.reader(f, dialect=dialect, **kwds) def next(self): row = self.reader.next() return [unicode(s, "utf-8") for s in row] def __iter__(self): return self def make_python_name(s, default=None, number_prefix='N',encoding="utf-8"): """Returns a unicode string that can be used as a legal python identifier. :Arguments: *s* string *default* use *default* if *s* is ``None`` *number_prefix* string to prepend if *s* starts with a number """ if s in ('', None): s = default s = str(s) s = re.sub("[^a-zA-Z0-9_]", "_", s) if not re.match('\d', s) is None: s = number_prefix+s return unicode(s, encoding) class Table2array(object): """Read a csv file and provide conversion to a :class:`numpy.recarray`. * Depending on the arguments, autoconversion of values can take place. See :class:`recsql.convert.Autoconverter` for details. * Table column headers are always read from the first row of the file. * Empty rows are discarded. """ def __init__(self, filename=None, tablename="CSV", encoding="utf-8", **kwargs): """Initialize the class. :Arguments: *filename* CSV file (encoded with *encoding*) *name* name of the table *autoconvert* EXPERIMENTAL. ``True``: replace certain values with special python values (see :class:`convert.Autoconverter`) and possibly split values into lists (see *sep*). ``False``: leave everything as it is (numbers as numbers and strings as strings). *mode* mode of the :class:`~convert.Autoconverter` """ if filename is None: raise TypeError("filename is actually required") self.tablename = tablename self.autoconvert = Autoconverter(**kwargs).convert csvtab = UnicodeReader(open(filename, "rb"), encoding=encoding) self.names = [make_python_name(s,default=n,encoding=encoding) for n,s in enumerate(csvtab.next())] # read the rest after the column headers self.records = [tuple(map(self.autoconvert, line)) for line in csvtab \ if len(line) > 0 and not numpy.all(numpy.array(line) == '')] def recarray(self): """Returns data as :class:`numpy.recarray`.""" return numpy.rec.fromrecords(self.records, names=self.names)<|fim▁end|>
<|file_name|>GCEDescriptionParser.java<|end_file_name|><|fim▁begin|>package minerGCE; import org.xml.sax.SAXException; <|fim▁hole|>import ru.ispras.modis.NetBlox.parser.basicParsersAndUtils.XMLIntegerRangeStringProcessor; import ru.ispras.modis.NetBlox.parser.xmlParser.CommonXMLParser; import ru.ispras.modis.NetBlox.parser.xmlParser.XMLStringValueProcessor; import ru.ispras.modis.NetBlox.scenario.RangeOfValues; public class GCEDescriptionParser extends CommonXMLParser { class SupplementaryAlgosIdsProcessor extends XMLStringValueProcessor { @Override public void closeElement() { super.closeElement(); String stringOfIds = getText(); String[] ids = stringOfIds.split(Utils.DELIMITER); for (String stringId : ids) { Utils.checkWhetherIsWordInScenario(stringId, TAG_SUPPLEMENTARY_ALGOS_IDS, "algorithm"); minerDescription.addSupplementaryAlgorithmId(stringId); } } } class LaunchesProcessor extends XMLIntegerRangeStringProcessor { @Override public void closeElement() { super.closeElement(); RangeOfValues<Integer> launchNumbers = getValues(); if (launchNumbers != null && !launchNumbers.isEmpty()) { minerDescription.setLaunchNumbers(launchNumbers); } } } private static final String TAG_SUPPLEMENTARY_ALGOS_IDS = "supplementaryAlgosIds"; private static final String TAG_LAUNCH_NUMBERS = "launchNumbers"; private static final String TAG_MINIMAL_CLIQUE_SIZE = "minimalCliqueSize"; private final XMLStringValueProcessor minimalCliqueSizeParser; private DescriptionGCD_GCE minerDescription; public GCEDescriptionParser() { super(); add(TAG_SUPPLEMENTARY_ALGOS_IDS, new SupplementaryAlgosIdsProcessor()); add(TAG_LAUNCH_NUMBERS, new LaunchesProcessor()); add(TAG_MINIMAL_CLIQUE_SIZE, minimalCliqueSizeParser = new XMLStringValueProcessor()); } @Override public void startDocument() throws SAXException { super.startDocument(); minerDescription = new DescriptionGCD_GCE(); } @Override public void endDocument() throws SAXException { super.endDocument(); String text = minimalCliqueSizeParser.getText(); if (text != null && !text.isEmpty()) { minerDescription.setMinimalCliqueSize(Integer.parseInt(text)); } } public DescriptionGCD_GCE getParsedDescription() { return minerDescription; } }<|fim▁end|>
import ru.ispras.modis.NetBlox.parser.basicParsersAndUtils.Utils;
<|file_name|>tkinter_gui.py<|end_file_name|><|fim▁begin|>from Tkinter import * root = Tk() root.title('first test window') #root.geometry('300x200') frm = Frame(root) frm_l = Frame(frm) Label(frm_l, text='left_top').pack(side=TOP) Label(frm_l, text='left_bottom').pack(side=BOTTOM) frm_l.pack(side=LEFT) frm_r = Frame(frm) Label(frm_r, text='right_top').pack(side=TOP) Label(frm_r, text='right_bottom').pack(side=BOTTOM) frm_r.pack(side=RIGHT) frm.pack(side=TOP) ########################################################## frm1 = Frame(root) var = StringVar() Entry(frm1, textvariable=var).pack(side=TOP) var.set('entry text') t = Text(frm1) t.pack(side=TOP) def print_entry(): t.insert(END, var.get()) Button(frm1, text='copy', command=print_entry).pack(side=TOP) frm1.pack(side=TOP) ########################################################## frm2 = Frame(root) redbutton = Button(frm2, text="Red", fg="red") redbutton.pack( side = LEFT) greenbutton = Button(frm2, text="Brown", fg="brown") greenbutton.pack( side = LEFT ) bluebutton = Button(frm2, text="Blue", fg="blue") bluebutton.pack( side = LEFT ) blackbutton = Button(frm2, text="Black", fg="black") blackbutton.pack( side = BOTTOM) frm2.pack(side=TOP) ###################################################### frm3 = Frame(root) b = Button(frm3, text='move') b.place(bordermode=OUTSIDE, height=100, width=100, x=50, y=50) b.pack() <|fim▁hole|><|fim▁end|>
frm3.pack(side=TOP) root.mainloop()
<|file_name|>exc.py<|end_file_name|><|fim▁begin|>#***************************************************************************** # Copyright 2004-2008 Steve Menard # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software <|fim▁hole|># distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # #***************************************************************************** from jpype import JException, java, JavaException, JProxy, JPackage import unittest, common import traceback def throwIOException() : raise java.io.IOException.PYEXC("Test throw") def throwByJavaException() : JPackage('jpype').exc.ExceptionTest.throwIOException() def suite() : return unittest.makeSuite(ExceptionTestCase) class ExceptionTestCase(common.JPypeTestCase) : def testExceptionThrown(self) : try : self.jpype.exc.ExceptionTest.throwRuntime() assert False except JavaException, ex : print 'Caught a Java exception ...' if ex.javaClass() is java.lang.RuntimeException : print "Caught the exception", ex.message() print ex.stacktrace() else: assert False except Exception, ex: print ex.__class__, isinstance(ex, JavaException) print ex.__class__.__bases__[0].__bases__[0].__bases__ print JavaException assert False print 'if here, everything is fine' def testExceptionByJavaClass(self) : try : self.jpype.exc.ExceptionTest.throwRuntime() assert False except JException(java.lang.RuntimeException), ex : print "Caught the exception", ex.message(), "->", ex.javaClass() print ex.stacktrace() except Exception, ex: print ex assert False # def testThrowException(self) : # d = {"throwIOException" : throwIOException, } # p = JProxy(self.jpype.exc.ExceptionThrower, dict=d) # # assert self.jpype.exc.ExceptionTest.delegateThrow(p) def testThrowException3(self) : d = {"throwIOException" : throwByJavaException, } p = JProxy(self.jpype.exc.ExceptionThrower, dict=d) assert self.jpype.exc.ExceptionTest.delegateThrow(p)<|fim▁end|>
<|file_name|>update_api_docs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 from __future__ import print_function from __future__ import division from __future__ import unicode_literals from __future__ import absolute_import import os import platform import sys import subprocess DIR_OF_THIS_SCRIPT = os.path.dirname( os.path.abspath( __file__ ) ) DIR_OF_DOCS = os.path.join( DIR_OF_THIS_SCRIPT, 'docs' ) def OnWindows():<|fim▁hole|> # On Windows, distutils.spawn.find_executable only works for .exe files # but .bat and .cmd files are also executables, so we use our own # implementation. def FindExecutable( executable ): # Executable extensions used on Windows WIN_EXECUTABLE_EXTS = [ '.exe', '.bat', '.cmd' ] paths = os.environ[ 'PATH' ].split( os.pathsep ) base, extension = os.path.splitext( executable ) if OnWindows() and extension.lower() not in WIN_EXECUTABLE_EXTS: extensions = WIN_EXECUTABLE_EXTS else: extensions = [ '' ] for extension in extensions: executable_name = executable + extension if not os.path.isfile( executable_name ): for path in paths: executable_path = os.path.join( path, executable_name ) if os.path.isfile( executable_path ): return executable_path else: return executable_name return None def GenerateApiDocs(): npm = FindExecutable( 'npm' ) if not npm: sys.exit( 'ERROR: NPM is required to generate API docs.' ) os.chdir( os.path.join( DIR_OF_DOCS ) ) subprocess.call( [ npm, 'install', '--production' ] ) bootprint = FindExecutable( os.path.join( DIR_OF_DOCS, 'node_modules', '.bin', 'bootprint' ) ) api = os.path.join( DIR_OF_DOCS, 'openapi.yml' ) subprocess.call( [ bootprint, 'openapi', api, DIR_OF_DOCS ] ) if __name__ == '__main__': GenerateApiDocs()<|fim▁end|>
return platform.system() == 'Windows'
<|file_name|>ParamHelper.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * $Header:$ */ package org.apache.beehive.netui.util; import java.util.Map; import java.util.List; import java.lang.reflect.Array; import org.apache.beehive.netui.util.logging.Logger; /** * This class is used by NetUI tags that use parameters. */ public class ParamHelper { private static final Logger logger = Logger.getInstance(ParamHelper.class); /** * Add a new parameter or update an existing parameter's list of values. * <p/> * <em>Implementation Note:</em> in the case that a Map was provided for * the <code>value</code> parameter, the this returns without doing * anything; in any other case, params is updated (even in * <code>value</code> is null). * </p> * <p/> * If value is some object (not an array or list), the string * representation of that object is added as a value for name. If the * value is a list (or array) of objects, then the string representation * of each element is added as a value for name. When there are multiple * values for a name, then an array of Strings is used in Map. * </p> * * @param params an existing Map of names and values to update * @param name the name of the parameter to add or update * @param value an item or list of items to put into the map * @throws IllegalArgumentException in the case that either the params * <p/> * or name given was null */ public static void addParam(Map params, String name, Object value) { if (params == null) throw new IllegalArgumentException("Parameter map cannot be null"); if (name == null) throw new IllegalArgumentException("Parameter name cannot be null"); if (value instanceof Map) { logger.warn(Bundle.getString("Tags_BadParameterType", name)); return; } if (value == null) value = ""; // check to see if we are adding a new element // or if this is an existing element Object o = params.get(name); int length = 0; if (o != null) { assert (o instanceof String || o instanceof String[]); if (o.getClass().isArray()) { length = Array.getLength(o); } else { length++; } } // check how much size the output needs to be if (value.getClass().isArray()) { length += Array.getLength(value); } else if (value instanceof List) { length += ((List) value).size(); } else {<|fim▁hole|> return; //System.err.println("Number of vaues:" + length); // if there is only a single value push it to the parameter table if (length == 1) { if (value.getClass().isArray()) { Object val = Array.get(value, 0); if (val != null) params.put(name,val.toString()); else params.put(name,""); } else if (value instanceof List) { List list = (List) value; Object val = list.get(0); if (val != null) params.put(name,val.toString()); else params.put(name,""); } else params.put(name,value.toString()); return; } // allocate the string for the multiple values String[] values = new String[length]; int offset = 0; // if we had old values, push them to the new array if (o != null) { if (o.getClass().isArray()) { String[] obs = (String[]) o; for (;offset<obs.length;offset++) { values[offset] = obs[offset]; } } else { values[0] = o.toString(); offset = 1; } } // now move the new values to the array starting at the offset // position if (value.getClass().isArray()) { //need to convert this array into a String[] int size = Array.getLength(value); for (int i=0; i < size; i++) { Object val = Array.get(value, i); if (val != null) values[i+offset] = val.toString(); else values[i+offset] = ""; } } else if (value instanceof List) { List list = (List) value; int size = list.size(); for (int i=0; i < size; i++) { if (list.get(i) != null) values[i+offset] = list.get(i).toString(); else values[i+offset] = ""; } } else { values[offset] = value.toString(); } // store the new values array params.put(name, values); } }<|fim▁end|>
length++; } if (length == 0)
<|file_name|>initializers.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python # -*- coding: utf-8 -*- import numpy as np import tensorflow as tf __all__ = [ 'Initializer', 'Zeros', 'Ones', 'Constant', 'RandomUniform', 'RandomNormal', 'TruncatedNormal', 'deconv2d_bilinear_upsampling_initializer' ] class Initializer(object): """Initializer base class: all initializers inherit from this class. """ def __call__(self, shape, dtype=None): """Returns a tensor object initialized as specified by the initializer. Parameters ---------- shape : tuple of int. The shape of the tensor. dtype : Optional dtype of the tensor. If not provided will return tensor of `tf.float32`. Returns ------- """ raise NotImplementedError def get_config(self): """Returns the configuration of the initializer as a JSON-serializable dict. Returns ------- A JSON-serializable Python dict. """ return {} @classmethod def from_config(cls, config): """Instantiates an initializer from a configuration dictionary. Parameters ---------- config : A python dictionary. It will typically be the output of `get_config`. Returns ------- An Initializer instance. """ if 'dtype' in config: config.pop('dtype') return cls(**config) class Zeros(Initializer): """Initializer that generates tensors initialized to 0. """ def __call__(self, shape, dtype=tf.float32): return tf.zeros(shape, dtype=dtype) class Ones(Initializer): """Initializer that generates tensors initialized to 1. """ def __call__(self, shape, dtype=tf.float32): return tf.ones(shape, dtype=dtype) class Constant(Initializer): """Initializer that generates tensors initialized to a constant value. Parameters ---------- value : A python scalar or a numpy array. The assigned value. """ def __init__(self, value=0): self.value = value def __call__(self, shape, dtype=None): return tf.constant(self.value, shape=shape, dtype=dtype) def get_config(self): return {"value": self.value} class RandomUniform(Initializer): """Initializer that generates tensors with a uniform distribution. Parameters ---------- minval : A python scalar or a scalar tensor. Lower bound of the range of random values to generate. maxval : A python scalar or a scalar tensor. Upper bound of the range of random values to generate. seed : A Python integer. Used to seed the random generator. """ def __init__(self, minval=-0.05, maxval=0.05, seed=None): self.minval = minval self.maxval = maxval self.seed = seed def __call__(self, shape, dtype=tf.float32): return tf.random.uniform(shape, self.minval, self.maxval, dtype=dtype, seed=self.seed) def get_config(self): return {"minval": self.minval, "maxval": self.maxval, "seed": self.seed} class RandomNormal(Initializer): """Initializer that generates tensors with a normal distribution. Parameters ---------- mean : A python scalar or a scalar tensor. Mean of the random values to generate. stddev : A python scalar or a scalar tensor. Standard deviation of the random values to generate. seed : A Python integer. Used to seed the random generator. """ def __init__(self, mean=0.0, stddev=0.05, seed=None): self.mean = mean self.stddev = stddev self.seed = seed def __call__(self, shape, dtype=tf.float32): return tf.random.normal(shape, self.mean, self.stddev, dtype=dtype, seed=self.seed) <|fim▁hole|>class TruncatedNormal(Initializer): """Initializer that generates a truncated normal distribution. These values are similar to values from a `RandomNormal` except that values more than two standard deviations from the mean are discarded and re-drawn. This is the recommended initializer for neural network weights and filters. Parameters ---------- mean : A python scalar or a scalar tensor. Mean of the random values to generate. stddev : A python scalar or a scalar tensor. Standard deviation of the andom values to generate. seed : A Python integer. Used to seed the random generator. """ def __init__(self, mean=0.0, stddev=0.05, seed=None): self.mean = mean self.stddev = stddev self.seed = seed def __call__(self, shape, dtype=tf.float32): return tf.random.truncated_normal(shape, self.mean, self.stddev, dtype=dtype, seed=self.seed) def get_config(self): return {"mean": self.mean, "stddev": self.stddev, "seed": self.seed} def deconv2d_bilinear_upsampling_initializer(shape): """Returns the initializer that can be passed to DeConv2dLayer for initializing the weights in correspondence to channel-wise bilinear up-sampling. Used in segmentation approaches such as [FCN](https://arxiv.org/abs/1605.06211) Parameters ---------- shape : tuple of int The shape of the filters, [height, width, output_channels, in_channels]. It must match the shape passed to DeConv2dLayer. Returns ------- ``tf.constant_initializer`` A constant initializer with weights set to correspond to per channel bilinear upsampling when passed as W_int in DeConv2dLayer """ if shape[0] != shape[1]: raise Exception('deconv2d_bilinear_upsampling_initializer only supports symmetrical filter sizes') if shape[3] < shape[2]: raise Exception( 'deconv2d_bilinear_upsampling_initializer behaviour is not defined for num_in_channels < num_out_channels ' ) filter_size = shape[0] num_out_channels = shape[2] num_in_channels = shape[3] # Create bilinear filter kernel as numpy array bilinear_kernel = np.zeros([filter_size, filter_size], dtype=np.float32) scale_factor = (filter_size + 1) // 2 if filter_size % 2 == 1: center = scale_factor - 1 else: center = scale_factor - 0.5 for x in range(filter_size): for y in range(filter_size): bilinear_kernel[x, y] = (1 - abs(x - center) / scale_factor) * (1 - abs(y - center) / scale_factor) weights = np.zeros((filter_size, filter_size, num_out_channels, num_in_channels), dtype=np.float32) for i in range(num_out_channels): weights[:, :, i, i] = bilinear_kernel # assign numpy array to constant_initalizer and pass to get_variable return tf.constant_initializer(value=weights) # Alias zeros = Zeros ones = Ones constant = Constant random_uniform = RandomUniform random_normal = RandomNormal truncated_normal = TruncatedNormal<|fim▁end|>
def get_config(self): return {"mean": self.mean, "stddev": self.stddev, "seed": self.seed}
<|file_name|>glib_gobject.rs<|end_file_name|><|fim▁begin|>// GObject Introspection Rust bindings. // Copyright (C) 2014 Luis Araujo <[email protected]> // This library is free software; you can redistribute it and/or // modify it under the terms of the GNU Lesser General Public // License as published by the Free Software Foundation; either // version 2.1 of the License, or (at your option) any later version. <|fim▁hole|>// This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA extern crate libc; use glib_gobject::libc ::{c_void, c_char, c_int, c_uint, c_float, c_long, c_ulong, c_double, size_t}; /* GObject */ pub type GType = size_t; pub type GBoolean = c_int; pub type GPointer = *c_void; pub type GQuark = c_uint; pub enum GData {} #[deriving(Show, FromPrimitive)] pub enum GParamFlags { GParamReadable = 1 << 0, GParamWritable = 1 << 1, GParamConstruct = 1 << 2, GParamConstructOnly = 1 << 3, GParamLaxValidation = 1 << 4, GParamStaticName = 1 << 5, GParamStaticNick = 1 << 6, GParamSTaticBlurb = 1 << 7, /* User defined flags go up to 30 */ GParamDeprecated = 1 << 31 } #[deriving(Show, FromPrimitive)] pub enum GSignalFlags { GSignalRunFirst = 1 << 0, GSignalRunLast = 1 << 1, GSignalRunCleanup = 1 << 2, GSignalNoRecurse = 1 << 3, GSignalDetailed = 1 << 4, GSignalAction = 1 << 5, GSignalNoHooks = 1 << 6, GSignalMustCollect = 1 << 7, GSignalDeprecated = 1 << 8 } pub struct GObject { g_type_instance: GTypeInstance, /*< private >*/ // volatile guint ref_count; ref_count: c_uint, qdata: *GData } struct GTypeInstance { /*< private >*/ g_class: *GTypeClass } struct GTypeClass { /*< private >*/ g_type: GType } pub enum GValueData { GValueDataVInt(c_int), GValueDataVUInt(c_uint), GValueDataVLong(c_long), GValueDataVULong(c_ulong), GValueDataVInt64(i64), GValueDataVUInt64(u64), GValueDataVFloat(c_float), GValueDataVDouble(c_double), GValueDataVPointer(GPointer) } pub struct GValue { /*< private >*/ g_type: GType, /* public for GTypeValueTable methods */ data: [GValueData, ..2] } /* GLib */ pub enum GOptionGroup {} pub enum GMappedFile {} /* TODO: Get higher level structs for lists using generics */ pub struct GSList { data: GPointer, next: *GSList } pub struct GList { data: GPointer, next: *GList, prev: *GList } pub struct GError { domain: GQuark, code: c_int, message: *c_char }<|fim▁end|>
<|file_name|>mask.rs<|end_file_name|><|fim▁begin|>/* * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree.<|fim▁hole|> FragmentDefinition, FragmentSpread, InlineFragment, OperationDefinition, Program, ScalarField, Selection, Transformed, Transformer, VariableDefinition, }; use indexmap::map::Entry; use intern::string_key::StringKeyIndexMap; use schema::Schema; use std::{ops::RangeFull, sync::Arc}; /// Transform to inline fragment spreads with @relay(mask:false) pub fn mask(program: &Program) -> Program { let mut transform = Mask::new(program); transform .transform_program(program) .replace_or_else(|| program.clone()) } type JoinedArguments<'s> = StringKeyIndexMap<&'s VariableDefinition>; struct Mask<'s> { program: &'s Program, current_reachable_arguments: Vec<&'s VariableDefinition>, } impl<'s> Mask<'s> { fn new(program: &'s Program) -> Self { Self { program, current_reachable_arguments: vec![], } } fn join_current_arguments_to_fragment(&mut self, fragment: &mut FragmentDefinition) { let mut joined_arguments = JoinedArguments::default(); for variable in &fragment.used_global_variables { joined_arguments.insert(variable.name.item, variable); } for arg in self.current_reachable_arguments.drain(..) { match joined_arguments.entry(arg.name.item) { Entry::Vacant(entry) => { entry.insert(arg); } Entry::Occupied(mut entry) => { let prev_arg = entry.get(); if self .program .schema .is_type_subtype_of(&arg.type_, &prev_arg.type_) { entry.insert(arg); } } } } let range = RangeFull; fragment.used_global_variables = joined_arguments .drain(range) .map(|(_, v)| v) .cloned() .collect(); } } impl<'s> Transformer for Mask<'s> { const NAME: &'static str = "MaskTransform"; const VISIT_ARGUMENTS: bool = false; const VISIT_DIRECTIVES: bool = false; fn transform_operation( &mut self, operation: &OperationDefinition, ) -> Transformed<OperationDefinition> { let result = self.default_transform_operation(operation); self.current_reachable_arguments.clear(); result } fn transform_fragment( &mut self, fragment: &FragmentDefinition, ) -> Transformed<FragmentDefinition> { let result = self.default_transform_fragment(fragment); if self.current_reachable_arguments.is_empty() { result } else { Transformed::Replace(match result { Transformed::Keep => { let mut new_fragment = fragment.clone(); self.join_current_arguments_to_fragment(&mut new_fragment); new_fragment } Transformed::Replace(mut new_fragment) => { self.join_current_arguments_to_fragment(&mut new_fragment); new_fragment } Transformed::Delete => { panic!("Unexpected fragment deletion in mask transform."); } }) } } fn transform_fragment_spread(&mut self, spread: &FragmentSpread) -> Transformed<Selection> { if RelayDirective::is_unmasked_fragment_spread(spread) { let fragment = self.program.fragment(spread.fragment.item).unwrap(); self.current_reachable_arguments .extend(&fragment.used_global_variables); Transformed::Replace(Selection::InlineFragment(Arc::new(InlineFragment { type_condition: Some(fragment.type_condition), directives: vec![], selections: self .transform_selections(&fragment.selections) .replace_or_else(|| fragment.selections.to_vec()), }))) } else { Transformed::Keep } } fn transform_scalar_field(&mut self, _field: &ScalarField) -> Transformed<Selection> { Transformed::Keep } }<|fim▁end|>
*/ use crate::relay_directive::RelayDirective; use graphql_ir::{
<|file_name|>memfd.rs<|end_file_name|><|fim▁begin|>use libc; use std::os::unix::io::RawFd; use {Errno, Result};<|fim▁hole|>use std::ffi::CStr; bitflags!( pub struct MemFdCreateFlag: libc::c_uint { const MFD_CLOEXEC = 0x0001; const MFD_ALLOW_SEALING = 0x0002; } ); pub fn memfd_create(name: &CStr, flags: MemFdCreateFlag) -> Result<RawFd> { use sys::syscall::{syscall, MEMFD_CREATE}; let res = unsafe { syscall(MEMFD_CREATE, name.as_ptr(), flags.bits()) }; Errno::result(res).map(|r| r as RawFd) }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># This package will contain the spiders of your Scrapy project # # Please refer to the documentation for information on how to create and manage # your spiders. import json from urllib import urlencode from scrapy import log from scrapy.http import Request from scrapy.selector import Selector from scrapy.contrib.loader import ItemLoader from scrapy.contrib.loader.processor import Identity, TakeFirst from torabot.spy.spiders.redis import RedisSpider from torabot.spy.items import Result from ..items import ( Bangumi, User, Post, SearchResult, SearchResultPost, Recommendation, QueryResult, ) class Bilibili(RedisSpider): name = 'bilibili' def __init__(self, life=60, *args, **kargs): super(Bilibili, self).__init__(*args, life=life, **kargs) def make_requests_from_query(self, query): query = json.loads(query) for req in { 'bangumi': self.make_bangumi_requests, 'user': self.make_user_requests, 'username': self.make_username_requests, 'query': self.make_query_requests, }[query['method']](query): yield req def make_username_requests(self, query): yield Request( make_username_search_uri(query['username']), callback=self.parse_username_prepare, meta=dict(query=query), dont_filter=True, ) def make_query_requests(self, query): yield Request( make_query_uri(query['query']), callback=self.parse_query, meta=dict(query=query), dont_filter=True, ) def make_bangumi_requests(self, query): yield Request( 'http://www.bilibili.tv/index/bangumi.json', callback=self.parse_bangumi, meta=dict(query=query), dont_filter=True, ) def make_user_requests(self, query): yield Request( 'http://space.bilibili.tv/' + query['user_id'], callback=self.parse_user, meta=dict(query=query), dont_filter=True, ) def parse_bangumi(self, response): query = response.meta['query'] try: return Bangumi( query=query, content=json.loads(response.body_as_unicode()) ) except: log.msg('parse failed', level=log.ERROR) return Result(ok=False, query=query) def parse_user(self, response): query = response.meta['query'] try: sel = Selector(response) return User( user_uri=response.url, query=query, posts=[make_post(sub) for sub in sel.xpath('//div[@class="main_list"]/ul/li')] ) except Exception as e: return failed(query, str(e)) def parse_query(self, response): query = response.meta['query'] try: sel = Selector(response) return QueryResult(<|fim▁hole|> query=query, posts=[make_search_post(sub) for sub in sel.xpath('//ul[@class="result"]/li')] ) except Exception as e: return failed(query, str(e)) def parse_username_prepare(self, response): query = response.meta['query'] try: sel = Selector(response) posts = [] for li in sel.xpath('//ul[@class="result"]/li'): post = make_search_post(li) if query['username'] == post['upper']: return Request( post['user_uri'], callback=self.parse_user, meta=dict(query=query), dont_filter=True, ) posts.append(post) return SearchResult( query=query, posts=[], recommendations=make_recommendations(posts), ) except Exception as e: return failed(query, str(e)) def make_recommendations(posts): def gen(): names = {} for p in posts: r = make_recommendation(p) if r['username'] not in names: yield r names[r['username']] = 1 return list(gen()) def make_recommendation(post): return Recommendation( user_uri=post['user_uri'], username=post['upper'], ) def failed(query, message): log.msg('parse failed: %s' % message, level=log.ERROR) return Result(ok=False, query=query, message=message) class SearchResultPostLoader(ItemLoader): default_item_class = SearchResultPost default_input_processor = Identity() default_output_processor = TakeFirst() def date_in(self, values): for s in values: yield s.strip() def make_search_post(sel): loader = SearchResultPostLoader(selector=sel) loader.add_xpath('title', 'string(.//div[@class="t"])') loader.add_xpath('upper', 'string(.//a[@class="upper"])') loader.add_xpath('kind', 'string(.//div[@class="t"]/span)') loader.add_xpath('date', 'string(.//i[@class="date"])') loader.add_xpath('intro', 'string(.//i[@class="intro"])') # mylist don't have title a, use first a instead # loader.add_xpath('uri', './/a[@class="title"]/@href') loader.add_xpath('uri', './/a/@href') loader.add_xpath('user_uri', './/a[@class="upper"]/@href') loader.add_xpath('cover', './/a[@class="title"]//img/@src') post = loader.load_item() if post.get('title', '') and post['title'].startswith(post.get('kind', '')): post['title'] = post['title'][len(post.get('kind', '')):] return post class PostLoader(ItemLoader): default_item_class = Post default_input_processor = Identity() default_output_processor = TakeFirst() def ctime_in(self, values): for s in values: yield s[5:] def make_post(sel): loader = PostLoader(selector=sel) loader.add_xpath('title', 'string(.//a[@class="title"])') loader.add_xpath('uri', './/a[@class="title"]/@href') loader.add_xpath('cover', './/img/@src') loader.add_xpath('kind', 'string(.//a[@class="l"])') loader.add_xpath('ctime', 'string(.//div[@class="c"])') loader.add_xpath('desc', 'string(.//div[@class="q"])') return loader.load_item() def make_username_search_uri(username): return make_query_uri(u'@author %s' % username) def make_query_uri(query): return 'http://www.bilibili.tv/search?' + urlencode({ 'keyword': query.encode('utf-8'), 'orderby': 'senddate', })<|fim▁end|>
uri=response.url,
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""The Energy integration.""" from __future__ import annotations from homeassistant.components import frontend from homeassistant.core import HomeAssistant from homeassistant.helpers import discovery from homeassistant.helpers.typing import ConfigType from . import websocket_api from .const import DOMAIN from .data import async_get_manager async def is_configured(hass: HomeAssistant) -> bool: """Return a boolean to indicate if energy is configured.""" manager = await async_get_manager(hass) if manager.data is None: return False return bool(manager.data != manager.default_preferences()) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up Energy.""" websocket_api.async_setup(hass) frontend.async_register_built_in_panel(hass, DOMAIN, DOMAIN, "mdi:lightning-bolt") hass.async_create_task( discovery.async_load_platform(hass, "sensor", DOMAIN, {}, config) ) hass.data[DOMAIN] = {<|fim▁hole|> } return True<|fim▁end|>
"cost_sensors": {},
<|file_name|>fork_jit_tool.cpp<|end_file_name|><|fim▁begin|>/*BEGIN_LEGAL Intel Open Source License Copyright (c) 2002-2013 Intel Corporation. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of the Intel Corporation nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE INTEL OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. END_LEGAL */ /*! @file * A test for callbacks around fork in jit mode. */ #include <stdio.h> #include <sys/types.h> #include <unistd.h> #include <stdlib.h> #include "pin.H" #include <iostream> #include <fstream> using namespace std;<|fim▁hole|>INT32 Usage() { cerr << "This pin tool registers callbacks around fork().\n" "\n"; cerr << KNOB_BASE::StringKnobSummary(); cerr << endl; return -1; } pid_t parent_pid; PIN_LOCK lock; VOID BeforeFork(THREADID threadid, const CONTEXT* ctxt, VOID * arg) { PIN_GetLock(&lock, threadid+1); cerr << "TOOL: Before fork." << endl; PIN_ReleaseLock(&lock); parent_pid = PIN_GetPid(); } VOID AfterForkInParent(THREADID threadid, const CONTEXT* ctxt, VOID * arg) { PIN_GetLock(&lock, threadid+1); cerr << "TOOL: After fork in parent." << endl; PIN_ReleaseLock(&lock); if (PIN_GetPid() != parent_pid) { cerr << "PIN_GetPid() fails in parent process" << endl; exit(-1); } } VOID AfterForkInChild(THREADID threadid, const CONTEXT* ctxt, VOID * arg) { PIN_GetLock(&lock, threadid+1); cerr << "TOOL: After fork in child." << endl; PIN_ReleaseLock(&lock); if ((PIN_GetPid() == parent_pid) || (getppid() != parent_pid)) { cerr << "PIN_GetPid() fails in child process" << endl; exit(-1); } } int main(INT32 argc, CHAR **argv) { PIN_InitSymbols(); if( PIN_Init(argc,argv) ) { return Usage(); } // Initialize the pin lock PIN_InitLock(&lock); // Register a notification handler that is called when the application // forks a new process. PIN_AddForkFunction(FPOINT_BEFORE, BeforeFork, 0); PIN_AddForkFunction(FPOINT_AFTER_IN_PARENT, AfterForkInParent, 0); PIN_AddForkFunction(FPOINT_AFTER_IN_CHILD, AfterForkInChild, 0); // Never returns PIN_StartProgram(); return 0; }<|fim▁end|>
<|file_name|>465-generate-image-assets.py<|end_file_name|><|fim▁begin|><|fim▁hole|> def run(): for chunk in chunked(Webapp.objects.all(), 50): for app in chunk: try: generate_image_assets.delay(app) except Exception: pass<|fim▁end|>
from amo.utils import chunked from mkt.developers.tasks import generate_image_assets from mkt.webapps.models import Webapp
<|file_name|>282 Expression Add Operators.py<|end_file_name|><|fim▁begin|>""" Given a string that contains only digits 0-9 and a target value, return all possibilities to add binary operators (not unary) +, -, or * between the digits so they evaluate to the target value. Examples: "123", 6 -> ["1+2+3", "1*2*3"] "232", 8 -> ["2*3+2", "2+3*2"] "105", 5 -> ["1*0+5","10-5"] "00", 0 -> ["0+0", "0-0", "0*0"] "3456237490", 9191 -> [] """ __author__ = 'Daniel' class Solution(object): def addOperators(self, num, target): """ Adapted from https://leetcode.com/discuss/58614/java-standard-backtrace-ac-solutoin-short-and-clear Algorithm: 1. DFS 2. Special handling for multiplication 3. Detect invalid number with leading 0's :type num: str :type target: int :rtype: List[str] """ ret = [] self.dfs(num, target, 0, "", 0, 0, ret) return ret def dfs(self, num, target, pos, cur_str, cur_val, mul, ret): if pos >= len(num): if cur_val == target: ret.append(cur_str) else: for i in xrange(pos, len(num)): if i != pos and num[pos] == "0": continue nxt_val = int(num[pos:i+1]) if not cur_str: self.dfs(num, target, i+1, "%d"%nxt_val, nxt_val, nxt_val, ret)<|fim▁hole|> else: self.dfs(num, target, i+1, cur_str+"+%d"%nxt_val, cur_val+nxt_val, nxt_val, ret) self.dfs(num, target, i+1, cur_str+"-%d"%nxt_val, cur_val-nxt_val, -nxt_val, ret) self.dfs(num, target, i+1, cur_str+"*%d"%nxt_val, cur_val-mul+mul*nxt_val, mul*nxt_val, ret) if __name__ == "__main__": assert Solution().addOperators("232", 8) == ["2+3*2", "2*3+2"]<|fim▁end|>
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|>from sympy.core.numbers import comp, Rational from sympy.physics.optics.utils import (refraction_angle, fresnel_coefficients, deviation, brewster_angle, critical_angle, lens_makers_formula, mirror_formula, lens_formula, hyperfocal_distance, transverse_magnification) from sympy.physics.optics.medium import Medium from sympy.physics.units import e0 from sympy import symbols, sqrt, Matrix, oo from sympy.geometry.point import Point3D from sympy.geometry.line import Ray3D from sympy.geometry.plane import Plane from sympy.utilities.pytest import raises ae = lambda a, b, n: comp(a, b, 10**-n) def test_refraction_angle(): n1, n2 = symbols('n1, n2') m1 = Medium('m1') m2 = Medium('m2') r1 = Ray3D(Point3D(-1, -1, 1), Point3D(0, 0, 0)) i = Matrix([1, 1, 1]) n = Matrix([0, 0, 1]) normal_ray = Ray3D(Point3D(0, 0, 0), Point3D(0, 0, 1)) P = Plane(Point3D(0, 0, 0), normal_vector=[0, 0, 1]) assert refraction_angle(r1, 1, 1, n) == Matrix([ [ 1], [ 1], [-1]]) assert refraction_angle([1, 1, 1], 1, 1, n) == Matrix([ [ 1], [ 1], [-1]]) assert refraction_angle((1, 1, 1), 1, 1, n) == Matrix([ [ 1], [ 1], [-1]]) assert refraction_angle(i, 1, 1, [0, 0, 1]) == Matrix([ [ 1], [ 1], [-1]]) assert refraction_angle(i, 1, 1, (0, 0, 1)) == Matrix([ [ 1], [ 1], [-1]]) assert refraction_angle(i, 1, 1, normal_ray) == Matrix([ [ 1], [ 1], [-1]]) assert refraction_angle(i, 1, 1, plane=P) == Matrix([ [ 1], [ 1], [-1]]) assert refraction_angle(r1, 1, 1, plane=P) == \ Ray3D(Point3D(0, 0, 0), Point3D(1, 1, -1)) assert refraction_angle(r1, m1, 1.33, plane=P) == \ Ray3D(Point3D(0, 0, 0), Point3D(Rational(100, 133), Rational(100, 133), -789378201649271*sqrt(3)/1000000000000000)) assert refraction_angle(r1, 1, m2, plane=P) == \ Ray3D(Point3D(0, 0, 0), Point3D(1, 1, -1)) assert refraction_angle(r1, n1, n2, plane=P) == \ Ray3D(Point3D(0, 0, 0), Point3D(n1/n2, n1/n2, -sqrt(3)*sqrt(-2*n1**2/(3*n2**2) + 1))) assert refraction_angle(r1, 1.33, 1, plane=P) == 0 # TIR assert refraction_angle(r1, 1, 1, normal_ray) == \ Ray3D(Point3D(0, 0, 0), direction_ratio=[1, 1, -1]) assert ae(refraction_angle(0.5, 1, 2), 0.24207, 5) assert ae(refraction_angle(0.5, 2, 1), 1.28293, 5) raises(ValueError, lambda: refraction_angle(r1, m1, m2, normal_ray, P)) raises(TypeError, lambda: refraction_angle(m1, m1, m2)) # can add other values for arg[0] raises(TypeError, lambda: refraction_angle(r1, m1, m2, None, i)) raises(TypeError, lambda: refraction_angle(r1, m1, m2, m2)) def test_fresnel_coefficients(): assert all(ae(i, j, 5) for i, j in zip( fresnel_coefficients(0.5, 1, 1.33), [0.11163, -0.17138, 0.83581, 0.82862])) assert all(ae(i, j, 5) for i, j in zip( fresnel_coefficients(0.5, 1.33, 1),<|fim▁hole|> [-0.07726, 0.20482, 1.22724, 1.20482])) m1 = Medium('m1') m2 = Medium('m2', n=2) assert all(ae(i, j, 5) for i, j in zip( fresnel_coefficients(0.3, m1, m2), [0.31784, -0.34865, 0.65892, 0.65135])) ans = [[-0.23563, -0.97184], [0.81648, -0.57738]] got = fresnel_coefficients(0.6, m2, m1) for i, j in zip(got, ans): for a, b in zip(i.as_real_imag(), j): assert ae(a, b, 5) def test_deviation(): n1, n2 = symbols('n1, n2') r1 = Ray3D(Point3D(-1, -1, 1), Point3D(0, 0, 0)) n = Matrix([0, 0, 1]) i = Matrix([-1, -1, -1]) normal_ray = Ray3D(Point3D(0, 0, 0), Point3D(0, 0, 1)) P = Plane(Point3D(0, 0, 0), normal_vector=[0, 0, 1]) assert deviation(r1, 1, 1, normal=n) == 0 assert deviation(r1, 1, 1, plane=P) == 0 assert deviation(r1, 1, 1.1, plane=P).evalf(3) + 0.119 < 1e-3 assert deviation(i, 1, 1.1, normal=normal_ray).evalf(3) + 0.119 < 1e-3 assert deviation(r1, 1.33, 1, plane=P) is None # TIR assert deviation(r1, 1, 1, normal=[0, 0, 1]) == 0 assert deviation([-1, -1, -1], 1, 1, normal=[0, 0, 1]) == 0 assert ae(deviation(0.5, 1, 2), -0.25793, 5) assert ae(deviation(0.5, 2, 1), 0.78293, 5) def test_brewster_angle(): m1 = Medium('m1', n=1) m2 = Medium('m2', n=1.33) assert ae(brewster_angle(m1, m2), 0.93, 2) m1 = Medium('m1', permittivity=e0, n=1) m2 = Medium('m2', permittivity=e0, n=1.33) assert ae(brewster_angle(m1, m2), 0.93, 2) assert ae(brewster_angle(1, 1.33), 0.93, 2) def test_critical_angle(): m1 = Medium('m1', n=1) m2 = Medium('m2', n=1.33) assert ae(critical_angle(m2, m1), 0.85, 2) def test_lens_makers_formula(): n1, n2 = symbols('n1, n2') m1 = Medium('m1', permittivity=e0, n=1) m2 = Medium('m2', permittivity=e0, n=1.33) assert lens_makers_formula(n1, n2, 10, -10) == 5*n2/(n1 - n2) assert ae(lens_makers_formula(m1, m2, 10, -10), -20.15, 2) assert ae(lens_makers_formula(1.33, 1, 10, -10), 15.15, 2) def test_mirror_formula(): u, v, f = symbols('u, v, f') assert mirror_formula(focal_length=f, u=u) == f*u/(-f + u) assert mirror_formula(focal_length=f, v=v) == f*v/(-f + v) assert mirror_formula(u=u, v=v) == u*v/(u + v) assert mirror_formula(u=oo, v=v) == v assert mirror_formula(u=oo, v=oo) is oo assert mirror_formula(focal_length=oo, u=u) == -u assert mirror_formula(u=u, v=oo) == u assert mirror_formula(focal_length=oo, v=oo) is oo assert mirror_formula(focal_length=f, v=oo) == f assert mirror_formula(focal_length=oo, v=v) == -v assert mirror_formula(focal_length=oo, u=oo) is oo assert mirror_formula(focal_length=f, u=oo) == f assert mirror_formula(focal_length=oo, u=u) == -u raises(ValueError, lambda: mirror_formula(focal_length=f, u=u, v=v)) def test_lens_formula(): u, v, f = symbols('u, v, f') assert lens_formula(focal_length=f, u=u) == f*u/(f + u) assert lens_formula(focal_length=f, v=v) == f*v/(f - v) assert lens_formula(u=u, v=v) == u*v/(u - v) assert lens_formula(u=oo, v=v) == v assert lens_formula(u=oo, v=oo) is oo assert lens_formula(focal_length=oo, u=u) == u assert lens_formula(u=u, v=oo) == -u assert lens_formula(focal_length=oo, v=oo) is -oo assert lens_formula(focal_length=oo, v=v) == v assert lens_formula(focal_length=f, v=oo) == -f assert lens_formula(focal_length=oo, u=oo) is oo assert lens_formula(focal_length=oo, u=u) == u assert lens_formula(focal_length=f, u=oo) == f raises(ValueError, lambda: lens_formula(focal_length=f, u=u, v=v)) def test_hyperfocal_distance(): f, N, c = symbols('f, N, c') assert hyperfocal_distance(f=f, N=N, c=c) == f**2/(N*c) assert ae(hyperfocal_distance(f=0.5, N=8, c=0.0033), 9.47, 2) def test_transverse_magnification(): si, so = symbols('si, so') assert transverse_magnification(si, so) == -si/so assert transverse_magnification(30, 15) == -2<|fim▁end|>
<|file_name|>scale_gradient.py<|end_file_name|><|fim▁begin|># Copyright 2019 The Sonnet Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ """TensorFlow op that scales gradient for backwards pass.""" from typing import Tuple from sonnet.src import types import tensorflow as tf @tf.custom_gradient def scale_gradient( t: tf.Tensor, scale: types.FloatLike ) -> Tuple[tf.Tensor, types.GradFn]: """Scales gradients for the backwards pass. Args: t: A Tensor. scale: The scale factor for the gradient on the backwards pass. <|fim▁hole|> def grad(dy: tf.Tensor) -> Tuple[tf.Tensor, None]: """Scaled gradient.""" return scale * dy, None return t, grad<|fim▁end|>
Returns: A Tensor same as input, with scaled backward gradient. """
<|file_name|>test-stats-exporter.ts<|end_file_name|><|fim▁begin|>/** * Copyright 2018 OpenCensus Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import { Logger, MeasureUnit, globalStats, Measurement, AggregationType } from '@opencensus/core'; import { AzureStatsExporter, } from '../src/azure-stats'; import { AzureStatsExporterOptions, IllegalOptionsError } from '../src/types'; import { describe, it } from 'mocha'; import { assert } from 'chai'; import * as sinon from 'sinon'; class MockLogger implements Logger { level?: string; // tslint:disable-next-line:no-any debugBuffer: any[] = []; errorMessagesBuffer: any[] = []; infoBuffer: any[] = []; cleanAll() { this.debugBuffer = []; this.errorMessagesBuffer = []; } // tslint:disable-next-line:no-any debug(message: string, ...args: any[]) { this.debugBuffer.push(...args); } // tslint:disable-next-line:no-any error(message: string, ...args: any[]) { this.errorMessagesBuffer.push(message); } // tslint:disable-next-line:no-any warn(...args: any[]) {} // tslint:disable-next-line:no-any info(message: string, ...args: any[]) { this.infoBuffer.push(message); } } /** * Tests construction of the exporter. * Specifically, that instrumentation keys are valid. */ describe('Exporter Construction', () => { const INVALID_INSTRUMENTATION_KEY_ERROR_MSG = 'You must provide a valid instrumentation key.'; let exporter: AzureStatsExporter; const mockLogger = new MockLogger(); afterEach(() => { if (exporter) exporter.stop(); mockLogger.cleanAll(); }); it('Throws an error if no instrumentation key is provided.', () => { const options: AzureStatsExporterOptions = { instrumentationKey: undefined, logger: mockLogger }; assert.throws(() => { // This should throw an error. exporter = new AzureStatsExporter(options); }, IllegalOptionsError, INVALID_INSTRUMENTATION_KEY_ERROR_MSG) assert(mockLogger.errorMessagesBuffer.length === 1, 'There was not exactly one error log.'); assert(mockLogger.errorMessagesBuffer[0] === INVALID_INSTRUMENTATION_KEY_ERROR_MSG, 'Incorrect message given.'); }); it('Throws an error if the provided instrumentation key is an empty string.', () => { const options: AzureStatsExporterOptions = { instrumentationKey: '', logger: mockLogger }; assert.throws(() => { // This should throw an error. exporter = new AzureStatsExporter(options); }, IllegalOptionsError, INVALID_INSTRUMENTATION_KEY_ERROR_MSG); assert(mockLogger.errorMessagesBuffer.length === 1, 'There was not exactly one error log.'); assert(mockLogger.errorMessagesBuffer[0] === INVALID_INSTRUMENTATION_KEY_ERROR_MSG, 'Incorrect message given.'); }); it('Attempts to start the exporter if a seemingly valid instrumentation key is provided.', () => { const options: AzureStatsExporterOptions = { instrumentationKey: 'seemingly-valid', logger: mockLogger }; assert }); }); //Sends simple metric to onRecord method //Checks Logger and hopefully telemetry object describe('Single-Value Stats Exporting', () => { // Define dependencies for the exporter. const mockLogger = new MockLogger(); let exporterOptions: AzureStatsExporterOptions; // Define the exporter itself. let exporter: AzureStatsExporter; // Define the test metric. const measure = globalStats.createMeasureDouble( 'opencensus.io/test/double', MeasureUnit.UNIT, 'Measure Double' ); const measurement: Measurement = { measure: measure, value: 25 }; let stub; before(() => { exporterOptions = { instrumentationKey: 'fake-instrumentation-key', logger: mockLogger, }; exporter = new AzureStatsExporter(exporterOptions); stub = sinon.stub(exporter, "exportSingleMetric"); }); afterEach(() => { exporter.stop(); mockLogger.cleanAll(); globalStats.clear();<|fim▁hole|> stub.resetBehavior(); }); it('Should export a simple metric.', () => { exporter.onRecord(undefined, measurement, undefined); assert(stub.called, 'Application Insights SDk was not called'); }); }); describe('Batch Functionality', () => { const mockLogger = new MockLogger(); let exporterOptions: AzureStatsExporterOptions; let exporter: AzureStatsExporter; const measure = globalStats.createMeasureDouble( 'opencensus.io/test/doule', MeasureUnit.UNIT, 'Measure Double' ); const view = globalStats.createView( 'test/view', measure, AggregationType.COUNT, null, 'This is a test view.' ); before(() => { exporterOptions = { instrumentationKey: 'fake-instrumentation-key', logger: mockLogger }; exporter = new AzureStatsExporter(exporterOptions); }); afterEach(() => { exporter.stop(); mockLogger.cleanAll(); globalStats.clear(); }); it('Should register the metric contained within a view, when a new view is registered.', () => { exporter.onRegisterView(view); assert(mockLogger.infoBuffer.length === 1, 'There was not an info log message.'); assert(mockLogger.infoBuffer[0] === 'Now tracking measure: ' + measure.name) }); });<|fim▁end|>
<|file_name|>praha_premierecinemas.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from .base_premierecinemas import BasePremierecinemasCinemaSpider <|fim▁hole|>class Spider(BasePremierecinemasCinemaSpider): name = 'praha-premierecinemas' calendar_url = 'http://www.premierecinemas.cz/'<|fim▁end|>
<|file_name|>development.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from .base import * # memcache<|fim▁hole|> 'default' : { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache' } }<|fim▁end|>
CACHES = {
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from flask import Blueprint, request, render_template from ..load import processing_results from ..abbr import get_abbr_map abbr_map = get_abbr_map() liner_mod = Blueprint('liner', __name__, template_folder='templates', static_folder='static') @liner_mod.route('/liner', methods=['GET', 'POST']) def liner(): if request.method == 'POST': query = request.form['liner-text'] text = query.split('.')[:-1] if len(text) == 0: return render_template('projects/line.html', message='Please separate each line with "."') abbr_expanded_text = "" for word in query.split(): if word in abbr_map: abbr_expanded_text += abbr_map[word] else: abbr_expanded_text += word<|fim▁hole|> abbr_expanded_text += " " data, emotion_sents, score, line_sentiment, text, length = processing_results(text) return render_template('projects/line.html', data=[data, emotion_sents, score, zip(text, line_sentiment), length, abbr_expanded_text]) else: return render_template('projects/line.html')<|fim▁end|>
<|file_name|>timing_diagram.py<|end_file_name|><|fim▁begin|>import pdb class TimingDiagram: def print_diagram(self, xtsm_object): pdb.set_trace() seq = xtsm_object.XTSM.getActiveSequence() cMap=seq.getOwnerXTSM().getDescendentsByType("ChannelMap")[0] #channelHeir=cMap.createTimingGroupHeirarchy() #channelRes=cMap.findTimingGroupResolutions() #Parser out put node. Use TimingProffer #Control arrays hold what is actually coming out. seq.collectTimingProffers() edge_timings = seq.TimingProffer.data['Edge'] class Edge: def __init__(self, timing_group, channel_number, time, value, tag, name, initial_value, holding_value): self.timing_group = timing_group self.channel_number = channel_number self.time = time self.value = value self.tag = tag self.max = 0 self.min = 0 self.name = name self.holding_value = holding_value self.initial_value = initial_value def is_same(self,edge): if ((self.timing_group == edge.timing_group) and (self.channel_number == edge.channel_number) and (self.time == edge.time) and (self.value == edge.value) and (self.tag == edge.tag)): return True else: return False edges = [] longest_name = 0 for edge in edge_timings: for channel in cMap.Channel: tgroup = int(channel.TimingGroup.PCDATA) tgroupIndex = int(channel.TimingGroupIndex.PCDATA) if tgroup == int(edge[0]) and tgroupIndex == int(edge[1]): name = channel.ChannelName.PCDATA init_val = '' hold_val = '' try: init_val = channel.InitialValue.PCDATA except AttributeError: init_val = 'None ' try: hold_val = channel.HoldingValue.PCDATA except AttributeError: hold_val = 'None ' if len(name) > longest_name: longest_name = len(name) edges.append(Edge(edge[0],edge[1],edge[2],edge[3],edge[4], name, init_val,hold_val)) #pdb.set_trace() unique_group_channels = [] for edge in edges: is_found = False for ugc in unique_group_channels: if edge.is_same(ugc): is_found = True if not is_found: unique_group_channels.append(edge) from operator import itemgetter edge_timings_by_group = sorted(edge_timings, key=itemgetter(2)) edge_timings_by_group_list = [] for edge in edge_timings_by_group: edge_timings_by_group_list.append(edge.tolist()) #print edge_timings for p in edge_timings_by_group_list: print p unique_times = [] for edge in edges: is_found = False for t in unique_times: if edge.time == t.time: is_found = True if not is_found: unique_times.append(edge) #pdb.set_trace() for ugc in unique_group_channels: s = ugc.name.rjust(longest_name) current_edge = edges[0] previous_edge = edges[0] is_first = True for t in unique_times: is_found = False for edge in edges:<|fim▁hole|> current_edge = edge if is_first: s = s + '|' + str('%7s' % str(current_edge.initial_value)) is_first = False previous_edge.value = current_edge.initial_value if previous_edge.value == 'None ': previous_edge.value = 0 if is_found: if current_edge.value > previous_edge.value: s += '^' + str('%7s' % str(current_edge.value)) else: s += 'v' + str('%7s' % str(current_edge.value)) previous_edge = current_edge else: s += '|' + '.'*7 s = s + '|' + str('%7s' % str(current_edge.holding_value)) print s s = "Time (ms)".rjust(longest_name) + '|' + str('%7s' % str("Initial")) for t in unique_times: s += '|' + str('%7s' % str(t.time)) s = s + '|' + str('%7s' % str("Holding")) print s<|fim▁end|>
if edge.timing_group == ugc.timing_group and edge.channel_number == ugc.channel_number and edge.time == t.time: is_found = True
<|file_name|>Benchmark.ts<|end_file_name|><|fim▁begin|>namespace ZincDB { export class Benchmark { benchmarkContext: BenchmarkContext; sampleResults: number[]; defaultOptions: BenchmarkOptions; constructor(benchmarkContext: BenchmarkContext, options?: BenchmarkOptions) { this.benchmarkContext = benchmarkContext; if (options) this.defaultOptions = options; else options = { maximumSamples: 20, maximumTime: 100 }; this.sampleResults = []; } run(benchmarkedFunction: Action, testTitle: string, options?: BenchmarkOptions): number { this.sampleResults.length = 0; if (!options) options = this.defaultOptions; let sampleCount = 0; <|fim▁hole|> if (this.benchmarkContext.beforeEach) this.benchmarkContext.beforeEach(); // Actual run const sampleStartTime = Timer.getTimestamp(); benchmarkedFunction.call(this.benchmarkContext); const sampleEndTime = Timer.getTimestamp(); // // Teardown if (this.benchmarkContext.afterEach) this.benchmarkContext.afterEach(); // Calcs const sampleElapsedTime = sampleEndTime - sampleStartTime; this.sampleResults.push(sampleElapsedTime); //console.log("Iteration " + iterationCount + ": " + iterationElapsedTime.toFixed(3)); sampleCount++; } while (sampleCount < options.maximumSamples && Timer.getTimestamp() - testStartTime < options.maximumTime); // calculate result time const result = this.getResult(); const message = `${testTitle}: ${result.toFixed(3)}ms (${(1000 / result).toFixed(0)} runs/s, ${sampleCount} sampled)`; log(message, true); return result; } runAll(excludeList: any[]) { let excludedFunctions = ["beforeEach", "afterEach", "constructor"]; excludedFunctions = excludedFunctions.concat(excludeList); const propertyList = Object.getOwnPropertyNames(Object.getPrototypeOf(this.benchmarkContext)); for (const propertyName of propertyList) if ((typeof this.benchmarkContext[propertyName] === "function") && excludedFunctions.indexOf(propertyName) === -1 && excludedFunctions.indexOf(this.benchmarkContext[propertyName]) === -1) this.run(this.benchmarkContext[propertyName], propertyName); } getResult(): number { this.sampleResults.sort(Comparers.ascendingNumberComparer); return this.sampleResults[Math.floor(this.sampleResults.length / 2)]; } static run(testFunction: Action, testTitle: string, context: BenchmarkContext = {}, options?: BenchmarkOptions): number { const benchmark = new Benchmark(context); return benchmark.run(testFunction, testTitle, options); } } export interface BenchmarkContext { beforeEach?: Action; afterEach?: Action; [memberName: string]: any; } export interface BenchmarkOptions { maximumTime: number; maximumSamples: number; logToDocument?: boolean; } }<|fim▁end|>
const testStartTime = Timer.getTimestamp(); do { // Setup
<|file_name|>oEmbed.js<|end_file_name|><|fim▁begin|>/* @flow */ import oEmbedStorage from './oEmbedStorage'; import getContentType from './getContentType'; import regexes from './regexes'; import providers from './providers'; import type { Embed } from './oEmbedTypes'; function getProperty(prop: string, type: ?string): RegExp { if (typeof type === 'string') { return new RegExp("<meta[^>]*property[ ]*=[ ]*['|\"]og:" + type + ':' + prop + "['|\"][^>]*[>]", 'i'); } return new RegExp("<meta[^>]*property[ ]*=[ ]*['|\"]og:" + prop + "['|\"][^>]*[>]", 'i'); } function getContent(regex) { return regex[0].match(regexes.content)[0].match(/['|"].*/)[0].slice(1); } function decodeText(text) { return text .replace(/&lt;/g, '<') .replace(/&gt;/g, '>') .replace(/&amp;/g, '&') .replace(/&quot;/g, '"') .replace(/&nbsp;/g, ' ') .replace(/&#(x?)(\d+);/g, (m, p1, p2) => String.fromCharCode(((p1 === 'x') ? parseInt(p2, 16) : p2))); } function parseHTML(body): ?Embed { const data: Embed = { type: 'link', }; const props = [ 'title', 'description' ]; for (let i = 0; i < props.length; i++) { const match = body.match(getProperty(props[i])); if (match && match.length) { data[props[i]] = decodeText(getContent(match)); } } const propsWithType = [ 'width', 'height' ]; for (let i = 0; i < propsWithType.length; i++) { const types = [ 'video', 'image' ]; for (let j = 0; j < types.length; j++) { const match = body.match(getProperty(propsWithType[i], types[j])); if (match && match.length) { data['thumbnail_' + propsWithType[i]] = parseInt(getContent(match), 10); } } } const imageUrl = body.match(regexes.image); if (imageUrl) { data.thumbnail_url = getContent(imageUrl); } if (!data.title) { const matches = body.match(regexes.title); if (matches && matches.length) { const title = matches[0].match(/[>][^<]*/); if (title && title.length) { data.title = decodeText(title[0].slice(1)); } } } if (!data.description) { const matches = body.match(regexes.description); if (matches && matches.length) { const description = matches[0].match(regexes.content)[0].match(/['|"][^'|^"]*/); if (description && description.length) { data.description = decodeText(description[0].slice(1)); } } } if (Object.keys(data).length > 1) {<|fim▁hole|> return null; } function extractLink(body) { const res = body.match(regexes.link); if (res && res.length) { return res[0].match(/http[s]?:\/\/[^"']*/i)[0].replace(/&amp;/g, '&'); } return null; } async function fetchData(url: string): Promise<Embed> { const body = await fetch(url).then(res => res.text()).then(text => text.replace(/(\r\n|\n|\r)/g, '')); const dataUrl = extractLink(body); let data; if (dataUrl) { data = await (await fetch(dataUrl)).json(); } else { data = parseHTML(body); } if (data) { oEmbedStorage.set(url, data); return data; } else { throw new Error('Failed to get data from HTML'); } } export default async function(url: string): Promise<Embed> { if (typeof url !== 'string') { throw new TypeError('URL must be a string'); } if (!/^https?:\/\//i.test(url)) { throw new Error("URL must start with 'http://' or 'https://'"); } const json = await oEmbedStorage.get(url); if (json) { return json; } let endpoint; for (let i = 0, l = providers.length; i < l; i++) { const provider = providers[i]; if (provider[0].test(url)) { endpoint = provider[1] + '?format=json&maxheight=240&url=' + encodeURIComponent(url); } } if (endpoint) { const data = await fetch(endpoint).then(res => res.json()); oEmbedStorage.set(url, data); return data; } const contentType = await getContentType(url); if (contentType) { if (contentType.indexOf('image') > -1) { return { type: 'link', thumbnail_url: url, }; } else if (contentType.indexOf('text/html') > -1) { return fetchData(url); } } throw new Error('No oEmbed data found for ' + url); }<|fim▁end|>
return data; }
<|file_name|>helloworld_grpc_pb.js<|end_file_name|><|fim▁begin|>// GENERATED CODE -- DO NOT EDIT! // Original file comments: // Copyright 2015, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // 'use strict'; var grpc = require('grpc'); var helloworld_pb = require('./helloworld_pb.js'); function serialize_HelloReply(arg) { if (!(arg instanceof helloworld_pb.HelloReply)) { throw new Error('Expected argument of type HelloReply'); } return new Buffer(arg.serializeBinary()); } function deserialize_HelloReply(buffer_arg) { return helloworld_pb.HelloReply.deserializeBinary(new Uint8Array(buffer_arg)); } function serialize_HelloRequest(arg) { if (!(arg instanceof helloworld_pb.HelloRequest)) { throw new Error('Expected argument of type HelloRequest'); } return new Buffer(arg.serializeBinary()); } <|fim▁hole|>function deserialize_HelloRequest(buffer_arg) { return helloworld_pb.HelloRequest.deserializeBinary(new Uint8Array(buffer_arg)); } // The greeting service definition. var GreeterService = exports.GreeterService = { // Sends a greeting sayHello: { path: '/helloworld.Greeter/SayHello', requestStream: false, responseStream: false, requestType: helloworld_pb.HelloRequest, responseType: helloworld_pb.HelloReply, requestSerialize: serialize_HelloRequest, requestDeserialize: deserialize_HelloRequest, responseSerialize: serialize_HelloReply, responseDeserialize: deserialize_HelloReply, }, }; exports.GreeterClient = grpc.makeGenericClientConstructor(GreeterService);<|fim▁end|>
<|file_name|>learning-experiment.py<|end_file_name|><|fim▁begin|><|fim▁hole|># This file is part of Lerot. # # Lerot is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Lerot is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with Lerot. If not, see <http://www.gnu.org/licenses/>. try: from include import * except: pass from experiment import GenericExperiment if __name__ == "__main__": experiment = GenericExperiment() experiment.run()<|fim▁end|>
#!/usr/bin/python
<|file_name|>chats.py<|end_file_name|><|fim▁begin|>import asyncio import inspect import itertools import string import typing from .. import helpers, utils, hints from ..requestiter import RequestIter from ..tl import types, functions, custom if typing.TYPE_CHECKING: from .telegramclient import TelegramClient _MAX_PARTICIPANTS_CHUNK_SIZE = 200 _MAX_ADMIN_LOG_CHUNK_SIZE = 100 _MAX_PROFILE_PHOTO_CHUNK_SIZE = 100 class _ChatAction: _str_mapping = { 'typing': types.SendMessageTypingAction(), 'contact': types.SendMessageChooseContactAction(), 'game': types.SendMessageGamePlayAction(), 'location': types.SendMessageGeoLocationAction(), 'record-audio': types.SendMessageRecordAudioAction(), 'record-voice': types.SendMessageRecordAudioAction(), # alias 'record-round': types.SendMessageRecordRoundAction(), 'record-video': types.SendMessageRecordVideoAction(), 'audio': types.SendMessageUploadAudioAction(1), 'voice': types.SendMessageUploadAudioAction(1), # alias 'song': types.SendMessageUploadAudioAction(1), # alias 'round': types.SendMessageUploadRoundAction(1), 'video': types.SendMessageUploadVideoAction(1), 'photo': types.SendMessageUploadPhotoAction(1), 'document': types.SendMessageUploadDocumentAction(1), 'file': types.SendMessageUploadDocumentAction(1), # alias 'cancel': types.SendMessageCancelAction() } def __init__(self, client, chat, action, *, delay, auto_cancel): self._client = client self._chat = chat self._action = action self._delay = delay self._auto_cancel = auto_cancel self._request = None self._task = None self._running = False async def __aenter__(self): self._chat = await self._client.get_input_entity(self._chat) # Since `self._action` is passed by reference we can avoid # recreating the request all the time and still modify # `self._action.progress` directly in `progress`. self._request = functions.messages.SetTypingRequest( self._chat, self._action) self._running = True self._task = self._client.loop.create_task(self._update()) return self async def __aexit__(self, *args): self._running = False if self._task: self._task.cancel() try: await self._task except asyncio.CancelledError: pass self._task = None __enter__ = helpers._sync_enter __exit__ = helpers._sync_exit async def _update(self): try: while self._running: await self._client(self._request) await asyncio.sleep(self._delay) except ConnectionError: pass except asyncio.CancelledError: if self._auto_cancel: await self._client(functions.messages.SetTypingRequest( self._chat, types.SendMessageCancelAction())) def progress(self, current, total): if hasattr(self._action, 'progress'): self._action.progress = 100 * round(current / total) class _ParticipantsIter(RequestIter): async def _init(self, entity, filter, search, aggressive): if isinstance(filter, type): if filter in (types.ChannelParticipantsBanned, types.ChannelParticipantsKicked, types.ChannelParticipantsSearch, types.ChannelParticipantsContacts): # These require a `q` parameter (support types for convenience) filter = filter('') else: filter = filter() entity = await self.client.get_input_entity(entity) ty = helpers._entity_type(entity) if search and (filter or ty != helpers._EntityType.CHANNEL): # We need to 'search' ourselves unless we have a PeerChannel search = search.casefold() self.filter_entity = lambda ent: ( search in utils.get_display_name(ent).casefold() or search in (getattr(ent, 'username', None) or '').casefold() ) else: self.filter_entity = lambda ent: True # Only used for channels, but we should always set the attribute self.requests = [] if ty == helpers._EntityType.CHANNEL: self.total = (await self.client( functions.channels.GetFullChannelRequest(entity) )).full_chat.participants_count if self.limit <= 0: raise StopAsyncIteration self.seen = set() if aggressive and not filter: self.requests.extend(functions.channels.GetParticipantsRequest( channel=entity, filter=types.ChannelParticipantsSearch(x), offset=0, limit=_MAX_PARTICIPANTS_CHUNK_SIZE, hash=0 ) for x in (search or string.ascii_lowercase)) else: self.requests.append(functions.channels.GetParticipantsRequest( channel=entity, filter=filter or types.ChannelParticipantsSearch(search), offset=0, limit=_MAX_PARTICIPANTS_CHUNK_SIZE, hash=0 )) elif ty == helpers._EntityType.CHAT: full = await self.client( functions.messages.GetFullChatRequest(entity.chat_id)) if not isinstance( full.full_chat.participants, types.ChatParticipants): # ChatParticipantsForbidden won't have ``.participants`` self.total = 0 raise StopAsyncIteration self.total = len(full.full_chat.participants.participants) users = {user.id: user for user in full.users} for participant in full.full_chat.participants.participants: user = users[participant.user_id] if not self.filter_entity(user): continue user = users[participant.user_id] user.participant = participant self.buffer.append(user) return True else: self.total = 1 if self.limit != 0: user = await self.client.get_entity(entity) if self.filter_entity(user): user.participant = None self.buffer.append(user) return True async def _load_next_chunk(self): if not self.requests: return True # Only care about the limit for the first request # (small amount of people, won't be aggressive). # # Most people won't care about getting exactly 12,345 # members so it doesn't really matter not to be 100% # precise with being out of the offset/limit here. self.requests[0].limit = min( self.limit - self.requests[0].offset, _MAX_PARTICIPANTS_CHUNK_SIZE) if self.requests[0].offset > self.limit: return True results = await self.client(self.requests) for i in reversed(range(len(self.requests))): participants = results[i] if not participants.users: self.requests.pop(i) continue self.requests[i].offset += len(participants.participants) users = {user.id: user for user in participants.users} for participant in participants.participants: user = users[participant.user_id] if not self.filter_entity(user) or user.id in self.seen: continue self.seen.add(participant.user_id) user = users[participant.user_id] user.participant = participant self.buffer.append(user) class _AdminLogIter(RequestIter): async def _init( self, entity, admins, search, min_id, max_id, join, leave, invite, restrict, unrestrict, ban, unban, promote, demote, info, settings, pinned, edit, delete ): if any((join, leave, invite, restrict, unrestrict, ban, unban, promote, demote, info, settings, pinned, edit, delete)): events_filter = types.ChannelAdminLogEventsFilter( join=join, leave=leave, invite=invite, ban=restrict, unban=unrestrict, kick=ban, unkick=unban, promote=promote, demote=demote, info=info, settings=settings, pinned=pinned, edit=edit, delete=delete ) else: events_filter = None self.entity = await self.client.get_input_entity(entity) admin_list = [] if admins: if not utils.is_list_like(admins): admins = (admins,) for admin in admins: admin_list.append(await self.client.get_input_entity(admin)) self.request = functions.channels.GetAdminLogRequest( self.entity, q=search or '', min_id=min_id, max_id=max_id, limit=0, events_filter=events_filter, admins=admin_list or None ) async def _load_next_chunk(self): self.request.limit = min(self.left, _MAX_ADMIN_LOG_CHUNK_SIZE) r = await self.client(self.request) entities = {utils.get_peer_id(x): x for x in itertools.chain(r.users, r.chats)} self.request.max_id = min((e.id for e in r.events), default=0) for ev in r.events: if isinstance(ev.action, types.ChannelAdminLogEventActionEditMessage): ev.action.prev_message._finish_init( self.client, entities, self.entity) ev.action.new_message._finish_init( self.client, entities, self.entity) elif isinstance(ev.action, types.ChannelAdminLogEventActionDeleteMessage): ev.action.message._finish_init( self.client, entities, self.entity) self.buffer.append(custom.AdminLogEvent(ev, entities)) if len(r.events) < self.request.limit: return True class _ProfilePhotoIter(RequestIter): async def _init( self, entity, offset, max_id ): entity = await self.client.get_input_entity(entity) ty = helpers._entity_type(entity) if ty == helpers._EntityType.USER: self.request = functions.photos.GetUserPhotosRequest( entity, offset=offset, max_id=max_id, limit=1 ) else: self.request = functions.messages.SearchRequest( peer=entity, q='', filter=types.InputMessagesFilterChatPhotos(), min_date=None, max_date=None, offset_id=0, add_offset=offset, limit=1, max_id=max_id, min_id=0, hash=0 ) if self.limit == 0: self.request.limit = 1 result = await self.client(self.request) if isinstance(result, types.photos.Photos): self.total = len(result.photos) elif isinstance(result, types.messages.Messages): self.total = len(result.messages) else: # Luckily both photosSlice and messages have a count for total self.total = getattr(result, 'count', None) async def _load_next_chunk(self): self.request.limit = min(self.left, _MAX_PROFILE_PHOTO_CHUNK_SIZE) result = await self.client(self.request) if isinstance(result, types.photos.Photos): self.buffer = result.photos self.left = len(self.buffer) self.total = len(self.buffer) elif isinstance(result, types.messages.Messages): self.buffer = [x.action.photo for x in result.messages if isinstance(x.action, types.MessageActionChatEditPhoto)] self.left = len(self.buffer) self.total = len(self.buffer) elif isinstance(result, types.photos.PhotosSlice): self.buffer = result.photos self.total = result.count if len(self.buffer) < self.request.limit: self.left = len(self.buffer) else: self.request.offset += len(result.photos) else: self.buffer = [x.action.photo for x in result.messages if isinstance(x.action, types.MessageActionChatEditPhoto)] self.total = getattr(result, 'count', None) if len(result.messages) < self.request.limit: self.left = len(self.buffer) elif result.messages: self.request.add_offset = 0 self.request.offset_id = result.messages[-1].id class ChatMethods: # region Public methods def iter_participants( self: 'TelegramClient', entity: 'hints.EntityLike', limit: float = None, *, search: str = '', filter: 'types.TypeChannelParticipantsFilter' = None, aggressive: bool = False) -> _ParticipantsIter: """ Iterator over the participants belonging to the specified chat. The order is unspecified. Arguments entity (`entity`): The entity from which to retrieve the participants list. limit (`int`): Limits amount of participants fetched. search (`str`, optional): Look for participants with this string in name/username. If ``aggressive is True``, the symbols from this string will be used. filter (:tl:`ChannelParticipantsFilter`, optional): The filter to be used, if you want e.g. only admins Note that you might not have permissions for some filter. This has no effect for normal chats or users. .. note:: The filter :tl:`ChannelParticipantsBanned` will return *restricted* users. If you want *banned* users you should use :tl:`ChannelParticipantsKicked` instead. aggressive (`bool`, optional): Aggressively looks for all participants in the chat. This is useful for channels since 20 July 2018, Telegram added a server-side limit where only the first 200 members can be retrieved. With this flag set, more than 200 will be often be retrieved. This has no effect if a ``filter`` is given. Yields The :tl:`User` objects returned by :tl:`GetParticipantsRequest` with an additional ``.participant`` attribute which is the matched :tl:`ChannelParticipant` type for channels/megagroups or :tl:`ChatParticipants` for normal chats. Example .. code-block:: python # Show all user IDs in a chat async for user in client.iter_participants(chat): print(user.id) # Search by name async for user in client.iter_participants(chat, search='name'): print(user.username) # Filter by admins from telethon.tl.types import ChannelParticipantsAdmins async for user in client.iter_participants(chat, filter=ChannelParticipantsAdmins): print(user.first_name) """ return _ParticipantsIter( self, limit, entity=entity, filter=filter, search=search, aggressive=aggressive ) async def get_participants( self: 'TelegramClient', *args, **kwargs) -> 'hints.TotalList': """ Same as `iter_participants()`, but returns a `TotalList <telethon.helpers.TotalList>` instead. Example .. code-block:: python users = await client.get_participants(chat) print(users[0].first_name) for user in users: if user.username is not None: print(user.username) """ return await self.iter_participants(*args, **kwargs).collect() get_participants.__signature__ = inspect.signature(iter_participants) def iter_admin_log( self: 'TelegramClient', entity: 'hints.EntityLike', limit: float = None, *, max_id: int = 0, min_id: int = 0, search: str = None, admins: 'hints.EntitiesLike' = None, join: bool = None, leave: bool = None, invite: bool = None, restrict: bool = None, unrestrict: bool = None, ban: bool = None, unban: bool = None, promote: bool = None, demote: bool = None, info: bool = None, settings: bool = None, pinned: bool = None, edit: bool = None, delete: bool = None) -> _AdminLogIter: """ Iterator over the admin log for the specified channel. The default order is from the most recent event to to the oldest. Note that you must be an administrator of it to use this method. If none of the filters are present (i.e. they all are `None`), *all* event types will be returned. If at least one of them is `True`, only those that are true will be returned. Arguments entity (`entity`): The channel entity from which to get its admin log. limit (`int` | `None`, optional): Number of events to be retrieved. The limit may also be `None`, which would eventually return the whole history. max_id (`int`): All the events with a higher (newer) ID or equal to this will be excluded. min_id (`int`): All the events with a lower (older) ID or equal to this will be excluded. search (`str`): The string to be used as a search query. admins (`entity` | `list`): If present, the events will be filtered by these admins (or single admin) and only those caused by them will be returned. join (`bool`): If `True`, events for when a user joined will be returned. leave (`bool`): If `True`, events for when a user leaves will be returned. invite (`bool`): If `True`, events for when a user joins through an invite link will be returned. restrict (`bool`): If `True`, events with partial restrictions will be returned. This is what the API calls "ban". unrestrict (`bool`): If `True`, events removing restrictions will be returned. This is what the API calls "unban". ban (`bool`): If `True`, events applying or removing all restrictions will be returned. This is what the API calls "kick" (restricting all permissions removed is a ban, which kicks the user). unban (`bool`): If `True`, events removing all restrictions will be returned. This is what the API calls "unkick". promote (`bool`): If `True`, events with admin promotions will be returned. demote (`bool`): If `True`, events with admin demotions will be returned. info (`bool`): If `True`, events changing the group info will be returned. settings (`bool`): If `True`, events changing the group settings will be returned. pinned (`bool`): If `True`, events of new pinned messages will be returned. edit (`bool`): If `True`, events of message edits will be returned. delete (`bool`): If `True`, events of message deletions will be returned. Yields Instances of `AdminLogEvent <telethon.tl.custom.adminlogevent.AdminLogEvent>`. Example .. code-block:: python async for event in client.iter_admin_log(channel): if event.changed_title: print('The title changed from', event.old, 'to', event.new) """ return _AdminLogIter( self, limit, entity=entity, admins=admins, search=search, min_id=min_id, max_id=max_id, join=join, leave=leave, invite=invite, restrict=restrict, unrestrict=unrestrict, ban=ban, unban=unban, promote=promote, demote=demote, info=info, settings=settings, pinned=pinned, edit=edit, delete=delete ) async def get_admin_log( self: 'TelegramClient', *args, **kwargs) -> 'hints.TotalList': """ Same as `iter_admin_log()`, but returns a ``list`` instead. Example .. code-block:: python # Get a list of deleted message events which said "heck" events = await client.get_admin_log(channel, search='heck', delete=True) # Print the old message before it was deleted print(events[0].old) """ return await self.iter_admin_log(*args, **kwargs).collect() get_admin_log.__signature__ = inspect.signature(iter_admin_log) def iter_profile_photos( self: 'TelegramClient', entity: 'hints.EntityLike', limit: int = None, *, offset: int = 0, max_id: int = 0) -> _ProfilePhotoIter: """ Iterator over a user's profile photos or a chat's photos. The order is from the most recent photo to the oldest. Arguments entity (`entity`): The entity from which to get the profile or chat photos. limit (`int` | `None`, optional): Number of photos to be retrieved. The limit may also be `None`, which would eventually all the photos that are still available. offset (`int`): How many photos should be skipped before returning the first one. max_id (`int`): The maximum ID allowed when fetching photos. Yields Instances of :tl:`Photo`. Example .. code-block:: python # Download all the profile photos of some user async for photo in client.iter_profile_photos(user): await client.download_media(photo) """ return _ProfilePhotoIter( self, limit, entity=entity, offset=offset, max_id=max_id ) async def get_profile_photos( self: 'TelegramClient', *args, **kwargs) -> 'hints.TotalList': """ Same as `iter_profile_photos()`, but returns a `TotalList <telethon.helpers.TotalList>` instead. Example .. code-block:: python # Get the photos of a channel photos = await client.get_profile_photos(channel) # Download the oldest photo await client.download_media(photos[-1]) """ return await self.iter_profile_photos(*args, **kwargs).collect() get_profile_photos.__signature__ = inspect.signature(iter_profile_photos) def action( self: 'TelegramClient', entity: 'hints.EntityLike', action: 'typing.Union[str, types.TypeSendMessageAction]', *, delay: float = 4, auto_cancel: bool = True) -> 'typing.Union[_ChatAction, typing.Coroutine]': """ Returns a context-manager object to represent a "chat action". Chat actions indicate things like "user is typing", "user is uploading a photo", etc. If the action is ``'cancel'``, you should just ``await`` the result, since it makes no sense to use a context-manager for it. See the example below for intended usage. Arguments entity (`entity`): The entity where the action should be showed in. action (`str` | :tl:`SendMessageAction`): The action to show. You can either pass a instance of :tl:`SendMessageAction` or better, a string used while: * ``'typing'``: typing a text message. * ``'contact'``: choosing a contact. * ``'game'``: playing a game. * ``'location'``: choosing a geo location. * ``'record-audio'``: recording a voice note. You may use ``'record-voice'`` as alias. * ``'record-round'``: recording a round video. * ``'record-video'``: recording a normal video. * ``'audio'``: sending an audio file (voice note or song). You may use ``'voice'`` and ``'song'`` as aliases. * ``'round'``: uploading a round video. * ``'video'``: uploading a video file. * ``'photo'``: uploading a photo. * ``'document'``: uploading a document file. You may use ``'file'`` as alias. * ``'cancel'``: cancel any pending action in this chat. Invalid strings will raise a ``ValueError``. delay (`int` | `float`): The delay, in seconds, to wait between sending actions. For example, if the delay is 5 and it takes 7 seconds to do something, three requests will be made at 0s, 5s, and 7s to cancel the action. auto_cancel (`bool`): Whether the action should be cancelled once the context manager exists or not. The default is `True`, since you don't want progress to be shown when it has already completed. Returns Either a context-manager object or a coroutine. Example .. code-block:: python # Type for 2 seconds, then send a message async with client.action(chat, 'typing'): await asyncio.sleep(2) await client.send_message(chat, 'Hello world! I type slow ^^') # Cancel any previous action await client.action(chat, 'cancel') # Upload a document, showing its progress (most clients ignore this) async with client.action(chat, 'document') as action: await client.send_file(chat, zip_file, progress_callback=action.progress) """ if isinstance(action, str): try: action = _ChatAction._str_mapping[action.lower()] except KeyError: raise ValueError('No such action "{}"'.format(action)) from None elif not isinstance(action, types.TLObject) or action.SUBCLASS_OF_ID != 0x20b2cc21: # 0x20b2cc21 = crc32(b'SendMessageAction') if isinstance(action, type): raise ValueError('You must pass an instance, not the class') else: raise ValueError('Cannot use {} as action'.format(action)) if isinstance(action, types.SendMessageCancelAction): # ``SetTypingRequest.resolve`` will get input peer of ``entity``. return self(functions.messages.SetTypingRequest( entity, types.SendMessageCancelAction())) return _ChatAction( self, entity, action, delay=delay, auto_cancel=auto_cancel) async def edit_admin( self: 'TelegramClient', entity: 'hints.EntityLike', user: 'hints.EntityLike', *, change_info: bool = None, post_messages: bool = None, edit_messages: bool = None, delete_messages: bool = None, ban_users: bool = None, invite_users: bool = None, pin_messages: bool = None, add_admins: bool = None, is_admin: bool = None, title: str = None) -> types.Updates: """ Edits admin permissions for someone in a chat. Raises an error if a wrong combination of rights are given (e.g. you don't have enough permissions to grant one). Unless otherwise stated, permissions will work in channels and megagroups. Arguments entity (`entity`): The channel, megagroup or chat where the promotion should happen. user (`entity`): The user to be promoted. change_info (`bool`, optional): Whether the user will be able to change info. post_messages (`bool`, optional): Whether the user will be able to post in the channel. This will only work in broadcast channels. edit_messages (`bool`, optional): Whether the user will be able to edit messages in the channel. This will only work in broadcast channels. delete_messages (`bool`, optional): Whether the user will be able to delete messages. ban_users (`bool`, optional): Whether the user will be able to ban users. invite_users (`bool`, optional): Whether the user will be able to invite users. Needs some testing. pin_messages (`bool`, optional): Whether the user will be able to pin messages. add_admins (`bool`, optional): Whether the user will be able to add admins. <|fim▁hole|> Whether the user will be an admin in the chat. This is the only permission available in small group chats, and when used in megagroups, all non-explicitly set permissions will have this value. Essentially, only passing ``is_admin=True`` will grant all permissions, but you can still disable those you need. title (`str`, optional): The custom title (also known as "rank") to show for this admin. This text will be shown instead of the "admin" badge. This will only work in channels and megagroups. When left unspecified or empty, the default localized "admin" badge will be shown. Returns The resulting :tl:`Updates` object. Example .. code-block:: python # Allowing `user` to pin messages in `chat` await client.edit_admin(chat, user, pin_messages=True) # Granting all permissions except for `add_admins` await client.edit_admin(chat, user, is_admin=True, add_admins=False) """ entity = await self.get_input_entity(entity) user = await self.get_input_entity(user) ty = helpers._entity_type(user) if ty != helpers._EntityType.USER: raise ValueError('You must pass a user entity') perm_names = ( 'change_info', 'post_messages', 'edit_messages', 'delete_messages', 'ban_users', 'invite_users', 'pin_messages', 'add_admins' ) ty = helpers._entity_type(entity) if ty == helpers._EntityType.CHANNEL: # If we try to set these permissions in a megagroup, we # would get a RIGHT_FORBIDDEN. However, it makes sense # that an admin can post messages, so we want to avoid the error if post_messages or edit_messages: # TODO get rid of this once sessions cache this information if entity.channel_id not in self._megagroup_cache: full_entity = await self.get_entity(entity) self._megagroup_cache[entity.channel_id] = full_entity.megagroup if self._megagroup_cache[entity.channel_id]: post_messages = None edit_messages = None perms = locals() return await self(functions.channels.EditAdminRequest(entity, user, types.ChatAdminRights(**{ # A permission is its explicit (not-None) value or `is_admin`. # This essentially makes `is_admin` be the default value. name: perms[name] if perms[name] is not None else is_admin for name in perm_names }), rank=title or '')) elif ty == helpers._EntityType.CHAT: # If the user passed any permission in a small # group chat, they must be a full admin to have it. if is_admin is None: is_admin = any(locals()[x] for x in perm_names) return await self(functions.messages.EditChatAdminRequest( entity, user, is_admin=is_admin)) else: raise ValueError('You can only edit permissions in groups and channels') async def edit_permissions( self: 'TelegramClient', entity: 'hints.EntityLike', user: 'typing.Optional[hints.EntityLike]' = None, until_date: 'hints.DateLike' = None, *, view_messages: bool = True, send_messages: bool = True, send_media: bool = True, send_stickers: bool = True, send_gifs: bool = True, send_games: bool = True, send_inline: bool = True, send_polls: bool = True, change_info: bool = True, invite_users: bool = True, pin_messages: bool = True) -> types.Updates: """ Edits user restrictions in a chat. Set an argument to `False` to apply a restriction (i.e. remove the permission), or omit them to use the default `True` (i.e. don't apply a restriction). Raises an error if a wrong combination of rights are given (e.g. you don't have enough permissions to revoke one). By default, each boolean argument is `True`, meaning that it is true that the user has access to the default permission and may be able to make use of it. If you set an argument to `False`, then a restriction is applied regardless of the default permissions. It is important to note that `True` does *not* mean grant, only "don't restrict", and this is where the default permissions come in. A user may have not been revoked the ``pin_messages`` permission (it is `True`) but they won't be able to use it if the default permissions don't allow it either. Arguments entity (`entity`): The channel or megagroup where the restriction should happen. user (`entity`, optional): If specified, the permission will be changed for the specific user. If left as `None`, the default chat permissions will be updated. until_date (`DateLike`, optional): When the user will be unbanned. If the due date or duration is longer than 366 days or shorter than 30 seconds, the ban will be forever. Defaults to ``0`` (ban forever). view_messages (`bool`, optional): Whether the user is able to view messages or not. Forbidding someone from viewing messages equals to banning them. This will only work if ``user`` is set. send_messages (`bool`, optional): Whether the user is able to send messages or not. send_media (`bool`, optional): Whether the user is able to send media or not. send_stickers (`bool`, optional): Whether the user is able to send stickers or not. send_gifs (`bool`, optional): Whether the user is able to send animated gifs or not. send_games (`bool`, optional): Whether the user is able to send games or not. send_inline (`bool`, optional): Whether the user is able to use inline bots or not. send_polls (`bool`, optional): Whether the user is able to send polls or not. change_info (`bool`, optional): Whether the user is able to change info or not. invite_users (`bool`, optional): Whether the user is able to invite other users or not. pin_messages (`bool`, optional): Whether the user is able to pin messages or not. Returns The resulting :tl:`Updates` object. Example .. code-block:: python from datetime import timedelta # Banning `user` from `chat` for 1 minute await client.edit_permissions(chat, user, timedelta(minutes=1), view_messages=False) # Banning `user` from `chat` forever await client.edit_permissions(chat, user, view_messages=False) # Kicking someone (ban + un-ban) await client.edit_permissions(chat, user, view_messages=False) await client.edit_permissions(chat, user) """ entity = await self.get_input_entity(entity) ty = helpers._entity_type(entity) if ty != helpers._EntityType.CHANNEL: raise ValueError('You must pass either a channel or a supergroup') rights = types.ChatBannedRights( until_date=until_date, view_messages=not view_messages, send_messages=not send_messages, send_media=not send_media, send_stickers=not send_stickers, send_gifs=not send_gifs, send_games=not send_games, send_inline=not send_inline, send_polls=not send_polls, change_info=not change_info, invite_users=not invite_users, pin_messages=not pin_messages ) if user is None: return await self(functions.messages.EditChatDefaultBannedRightsRequest( peer=entity, banned_rights=rights )) user = await self.get_input_entity(user) ty = helpers._entity_type(user) if ty != helpers._EntityType.USER: raise ValueError('You must pass a user entity') if isinstance(user, types.InputPeerSelf): raise ValueError('You cannot restrict yourself') return await self(functions.channels.EditBannedRequest( channel=entity, user_id=user, banned_rights=rights )) async def kick_participant( self: 'TelegramClient', entity: 'hints.EntityLike', user: 'typing.Optional[hints.EntityLike]' ): """ Kicks a user from a chat. Kicking yourself (``'me'``) will result in leaving the chat. .. note:: Attempting to kick someone who was banned will remove their restrictions (and thus unbanning them), since kicking is just ban + unban. Arguments entity (`entity`): The channel or chat where the user should be kicked from. user (`entity`, optional): The user to kick. Example .. code-block:: python # Kick some user from some chat await client.kick_participant(chat, user) # Leaving chat await client.kick_participant(chat, 'me') """ entity = await self.get_input_entity(entity) user = await self.get_input_entity(user) if helpers._entity_type(user) != helpers._EntityType.USER: raise ValueError('You must pass a user entity') ty = helpers._entity_type(entity) if ty == helpers._EntityType.CHAT: await self(functions.messages.DeleteChatUserRequest(entity.chat_id, user)) elif ty == helpers._EntityType.CHANNEL: if isinstance(user, types.InputPeerSelf): await self(functions.channels.LeaveChannelRequest(entity)) else: await self(functions.channels.EditBannedRequest( channel=entity, user_id=user, banned_rights=types.ChatBannedRights(until_date=None, view_messages=True) )) await asyncio.sleep(0.5) await self(functions.channels.EditBannedRequest( channel=entity, user_id=user, banned_rights=types.ChatBannedRights(until_date=None) )) else: raise ValueError('You must pass either a channel or a chat') # endregion<|fim▁end|>
is_admin (`bool`, optional): Whether the user will be an admin in the chat. This will only work in small group chats.
<|file_name|>ModelDestination.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2007, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that<|fim▁hole|> * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.media.sound; /** * This class is used to identify destinations in connection blocks, * see ModelConnectionBlock. * * @author Karl Helgason */ public final class ModelDestination { public static final ModelIdentifier DESTINATION_NONE = null; public static final ModelIdentifier DESTINATION_KEYNUMBER = new ModelIdentifier("noteon", "keynumber"); public static final ModelIdentifier DESTINATION_VELOCITY = new ModelIdentifier("noteon", "velocity"); public static final ModelIdentifier DESTINATION_PITCH = new ModelIdentifier("osc", "pitch"); // cent public static final ModelIdentifier DESTINATION_GAIN = new ModelIdentifier("mixer", "gain"); // cB public static final ModelIdentifier DESTINATION_PAN = new ModelIdentifier("mixer", "pan"); // 0.1 % public static final ModelIdentifier DESTINATION_REVERB = new ModelIdentifier("mixer", "reverb"); // 0.1 % public static final ModelIdentifier DESTINATION_CHORUS = new ModelIdentifier("mixer", "chorus"); // 0.1 % public static final ModelIdentifier DESTINATION_LFO1_DELAY = new ModelIdentifier("lfo", "delay", 0); // timecent public static final ModelIdentifier DESTINATION_LFO1_FREQ = new ModelIdentifier("lfo", "freq", 0); // cent public static final ModelIdentifier DESTINATION_LFO2_DELAY = new ModelIdentifier("lfo", "delay", 1); // timecent public static final ModelIdentifier DESTINATION_LFO2_FREQ = new ModelIdentifier("lfo", "freq", 1); // cent public static final ModelIdentifier DESTINATION_EG1_DELAY = new ModelIdentifier("eg", "delay", 0); // timecent public static final ModelIdentifier DESTINATION_EG1_ATTACK = new ModelIdentifier("eg", "attack", 0); // timecent public static final ModelIdentifier DESTINATION_EG1_HOLD = new ModelIdentifier("eg", "hold", 0); // timecent public static final ModelIdentifier DESTINATION_EG1_DECAY = new ModelIdentifier("eg", "decay", 0); // timecent public static final ModelIdentifier DESTINATION_EG1_SUSTAIN = new ModelIdentifier("eg", "sustain", 0); // 0.1 % (I want this to be value not %) public static final ModelIdentifier DESTINATION_EG1_RELEASE = new ModelIdentifier("eg", "release", 0); // timecent public static final ModelIdentifier DESTINATION_EG1_SHUTDOWN = new ModelIdentifier("eg", "shutdown", 0); // timecent public static final ModelIdentifier DESTINATION_EG2_DELAY = new ModelIdentifier("eg", "delay", 1); // timecent public static final ModelIdentifier DESTINATION_EG2_ATTACK = new ModelIdentifier("eg", "attack", 1); // timecent public static final ModelIdentifier DESTINATION_EG2_HOLD = new ModelIdentifier("eg", "hold", 1); // 0.1 % public static final ModelIdentifier DESTINATION_EG2_DECAY = new ModelIdentifier("eg", "decay", 1); // timecent public static final ModelIdentifier DESTINATION_EG2_SUSTAIN = new ModelIdentifier("eg", "sustain", 1); // 0.1 % ( I want this to be value not %) public static final ModelIdentifier DESTINATION_EG2_RELEASE = new ModelIdentifier("eg", "release", 1); // timecent public static final ModelIdentifier DESTINATION_EG2_SHUTDOWN = new ModelIdentifier("eg", "shutdown", 1); // timecent public static final ModelIdentifier DESTINATION_FILTER_FREQ = new ModelIdentifier("filter", "freq", 0); // cent public static final ModelIdentifier DESTINATION_FILTER_Q = new ModelIdentifier("filter", "q", 0); // cB private ModelIdentifier destination = DESTINATION_NONE; private ModelTransform transform = new ModelStandardTransform(); public ModelDestination() { } public ModelDestination(ModelIdentifier id) { destination = id; } public ModelIdentifier getIdentifier() { return destination; } public void setIdentifier(ModelIdentifier destination) { this.destination = destination; } public ModelTransform getTransform() { return transform; } public void setTransform(ModelTransform transform) { this.transform = transform; } }<|fim▁end|>
* accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation,
<|file_name|>ResponseCode.java<|end_file_name|><|fim▁begin|>/* * Copyright 1999-2020 Alibaba Group Holding Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.nacos.api.remote.response; /** * ResponseCode. * * @author liuzunfei * @version $Id: ResponseCode.java, v 0.1 2020年07月14日 2:04 PM liuzunfei Exp $ */ public enum ResponseCode { /** * Request success. */ SUCCESS(200, "Response ok"), /** * Request failed. */ FAIL(500, "Response fail"); int code; String desc; ResponseCode(int code, String desc) { this.code = code; this.desc = desc; } /** * Getter method for property <tt>code</tt>. * * @return property value of code */ public int getCode() { return code; } /** * Setter method for property <tt>code</tt>. * * @param code value to be assigned to property code */<|fim▁hole|> public void setCode(int code) { this.code = code; } /** * Getter method for property <tt>desc</tt>. * * @return property value of desc */ public String getDesc() { return desc; } /** * Setter method for property <tt>desc</tt>. * * @param desc value to be assigned to property desc */ public void setDesc(String desc) { this.desc = desc; } }<|fim▁end|>
<|file_name|>loops-reject-labels-shadowing-lifetimes.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Issue #21633: reject duplicate loop labels in function bodies. // This is testing interaction between lifetime-params and labels. // compile-pass #![allow(dead_code, unused_variables)] fn foo() { fn foo<'a>() { 'a: loop { break 'a; } //~^ WARN label name `'a` shadows a lifetime name that is already in scope } struct Struct<'b, 'c> { _f: &'b i8, _g: &'c i8 } enum Enum<'d, 'e> { A(&'d i8), B(&'e i8) } impl<'d, 'e> Struct<'d, 'e> { fn meth_okay() { 'a: loop { break 'a; } 'b: loop { break 'b; } 'c: loop { break 'c; } } } impl <'d, 'e> Enum<'d, 'e> { fn meth_okay() { 'a: loop { break 'a; } 'b: loop { break 'b; } 'c: loop { break 'c; } } } impl<'bad, 'c> Struct<'bad, 'c> { fn meth_bad(&self) { 'bad: loop { break 'bad; } //~^ WARN label name `'bad` shadows a lifetime name that is already in scope } } impl<'b, 'bad> Struct<'b, 'bad> { fn meth_bad2(&self) { 'bad: loop { break 'bad; } //~^ WARN label name `'bad` shadows a lifetime name that is already in scope } } impl<'b, 'c> Struct<'b, 'c> { fn meth_bad3<'bad>(x: &'bad i8) { 'bad: loop { break 'bad; } //~^ WARN label name `'bad` shadows a lifetime name that is already in scope } fn meth_bad4<'a,'bad>(x: &'a i8, y: &'bad i8) { 'bad: loop { break 'bad; } //~^ WARN label name `'bad` shadows a lifetime name that is already in scope } } impl <'bad, 'e> Enum<'bad, 'e> { fn meth_bad(&self) { 'bad: loop { break 'bad; } //~^ WARN label name `'bad` shadows a lifetime name that is already in scope } } impl <'d, 'bad> Enum<'d, 'bad> { fn meth_bad2(&self) { 'bad: loop { break 'bad; } //~^ WARN label name `'bad` shadows a lifetime name that is already in scope } } impl <'d, 'e> Enum<'d, 'e> { fn meth_bad3<'bad>(x: &'bad i8) { 'bad: loop { break 'bad; } //~^ WARN label name `'bad` shadows a lifetime name that is already in scope } fn meth_bad4<'a,'bad>(x: &'bad i8) { 'bad: loop { break 'bad; } //~^ WARN label name `'bad` shadows a lifetime name that is already in scope } } trait HasDefaultMethod1<'bad> { fn meth_okay() { 'c: loop { break 'c; } } fn meth_bad(&self) { 'bad: loop { break 'bad; } //~^ WARN label name `'bad` shadows a lifetime name that is already in scope } } trait HasDefaultMethod2<'a,'bad> {<|fim▁hole|> 'bad: loop { break 'bad; } //~^ WARN label name `'bad` shadows a lifetime name that is already in scope } } trait HasDefaultMethod3<'a,'b> { fn meth_bad<'bad>(&self) { 'bad: loop { break 'bad; } //~^ WARN label name `'bad` shadows a lifetime name that is already in scope } } } pub fn main() { foo(); }<|fim▁end|>
fn meth_bad(&self) {
<|file_name|>TileWideSmallImageAndText01-success.js<|end_file_name|><|fim▁begin|>exports.setupMockScopes = function (nock) { var scopes = []; var scope; scope = nock('https://login.live.com:443') .post('/accesstoken.srf', "grant_type=client_credentials&client_id=ms-app%3A%2F%2Fs-1-15-2-145565886-1510793020-2797717260-1526195933-3912359816-44086043-2211002316&client_secret=FF9yfJLxSH3uI32wNKGye643bAZ4zBz7&scope=notify.windows.com") .reply(200, "{\"token_type\":\"bearer\",\"access_token\":\"EgAaAQMAAAAEgAAACoAAnbulrnfYRgTGFe6tudBvFCf/ng+gabEI1++PZpSEktmcoU3/Tj9tEKuZj4Q1eV+UJhR4DWtNgnpVgBq9CkTQfEyCqZ/kUWtlAk98dwXfsmFJMI5FL/AvPnD2CCNiXtuTNCs/HB10Hbr1ZTemjbdby5Ht8AIhQqr9Cz7KI6sZM5eJAFoAiQAAAAAAaoEORO9LJFHvSyRR60gEAA0ANjcuMTg1LjE0OC44AAAAAABcAG1zLWFwcDovL3MtMS0xNS0yLTE0NTU2NTg4Ni0xNTEwNzkzMDIwLTI3OTc3MTcyNjAtMTUyNjE5NTkzMy0zOTEyMzU5ODE2LTQ0MDg2MDQzLTIyMTEwMDIzMTYA\",\"expires_in\":86400}", { 'cache-control': 'no-store', 'content-length': '436', 'content-type': 'application/json', server: 'Microsoft-IIS/7.5', ppserver: 'PPV: 30 H: BAYIDSLGN1I33 V: 0', date: 'Wed, 20 Feb 2013 04:07:10 GMT', connection: 'close' }); scopes.push(scope);scope = nock('https://bn1.notify.windows.com:443') .post('/?token=AgYAAACFGdWBiRCTypHebfvngI7DuNBXWuGjdiczDOZ7bSgkbCRrD2M1b10CpzCmipzknHbU4nLzapQbooXzJ%2fVwHAfSl%2fWMk8OsetohEVMlsIicoLP99rDg7g2AdENA99DZoAU%3d', "<tile><visual><binding template=\"TileWideSmallImageAndText01\"><image id=\"1\" src=\"http://textParam1.com\" alt=\"http://textParam2.com\"/><text id=\"1\">http://textParam3.com</text></binding></visual></tile>") .reply(200, "", { 'content-length': '0', 'x-wns-notificationstatus': 'received',<|fim▁hole|> 'x-wns-debug-trace': 'BN1WNS1011837', date: 'Wed, 20 Feb 2013 04:07:12 GMT' }); scopes.push(scope);return scopes; };<|fim▁end|>
'x-wns-msg-id': '72C92964293B8020',
<|file_name|>message_value.cpp<|end_file_name|><|fim▁begin|>#include "message_value.h" //#include <iostream> MessageValue::MessageValue() : message_array(NULL), sequence_name(NULL) {} MessageValue::MessageValue(char type, char* name) : message_type(type) { sequence_name = name; length = 1 + strlen(sequence_name) + 1; if (message_array != NULL) { delete[] message_array; message_array = NULL; } message_array = new char[length]; message_array[0] = message_type; strcpy(&message_array[1], sequence_name); } MessageValue::MessageValue(char* message) { if (message[0] == CURRENT_LABEL || message[0] == INCOMING_LABEL || message[0] == NEIGHBOR_NODE) { message_type = message[0]; sequence_name = &message[1]; } else { message_type = NEIGHBOR_NODE; sequence_name = message; } length = 1 + strlen(sequence_name) + 1; if (message_array != NULL) { delete[] message_array; message_array = NULL; } message_array = new char[length]; message_array[0] = message_type; strcpy(&message_array[1], sequence_name); } MessageValue::~MessageValue() { if (message_array) { //std::cout << "deleting array " << std::endl; //std::cout << message_array << std::endl; delete[] message_array; message_array = NULL; //std::cout << "it's now deleted" << std::endl; } //std::cout << "exiting destructor" << std::endl; } MessageValue::MessageValue(const MessageValue& rhs) {<|fim▁hole|> delete[] (this -> message_array); this -> message_array = NULL; } this -> message_array = new char[length]; memcpy(this -> message_array, rhs.message_array, length); } char* MessageValue::get_message_array() { return message_array; } void MessageValue::set_name(char* name) { strcpy(sequence_name, name); } char* MessageValue::get_name() { return sequence_name; } void MessageValue::set_type(char type) { message_type = type; if (message_array != NULL) { message_array[0] = type; } } char MessageValue::get_type() { return message_type; } MessageValue& MessageValue::operator=( const MessageValue& rhs ) { if (this -> message_array != NULL) { delete[] (this -> message_array); this -> message_array = NULL; } this -> length = rhs.length; this -> sequence_name = rhs.sequence_name; this -> message_type = rhs.message_type; this -> message_array = new char[length]; memcpy(this -> message_array, rhs.message_array, length); } bool MessageValue::operator==(MessageValue& rhs) { return ( !strcmp( this -> sequence_name, rhs.get_name() ) && this -> message_type == rhs.get_type() ); } void MessageValue::assign(MessageValue* rhs) { this -> message_type = rhs -> get_type(); this -> sequence_name = rhs -> get_name(); if (this -> message_array != NULL) { delete[] message_array; message_array = NULL; } length = rhs -> get_length(); message_array = new char[length]; strcpy( this -> message_array, rhs -> get_message_array()); } int MessageValue::get_length() { return length; }<|fim▁end|>
this -> length = rhs.length; this -> sequence_name = rhs.sequence_name; this -> message_type = rhs.message_type; if (this -> message_array != NULL) {
<|file_name|>bs_erf_numba_guvec_par.py<|end_file_name|><|fim▁begin|># Copyright (C) 2017-2018 Intel Corporation # # SPDX-License-Identifier: MIT import base_bs_erf import numba as nb from math import log, sqrt, exp, erf def black_scholes_numba_opt(price, strike, t, mr, sig_sig_two, vol, call, put): P = float( price [0] ) S = strike [0] T = t [0] a = log(P / S) b = T * mr[0] z = T * sig_sig_two[0] c = 0.25 * z y = 1./sqrt(z) w1 = (a - b + c) * y w2 = (a - b - c) * y d1 = 0.5 + 0.5 * erf(w1) d2 = 0.5 + 0.5 * erf(w2) Se = exp(b) * S res = P * d1 - Se * d2 call [0] = res put [0] = res - P + Se black_scholes_numba_opt_vec = nb.guvectorize('(f8[::1],f8[::1],f8[::1],f8[:],f8[:],f8[:],f8[::1],f8[::1])', '(),(),(),(),(),()->(),()', nopython=True, target="parallel", fastmath=False)(black_scholes_numba_opt) @nb.jit def black_scholes(nopt, price, strike, t, rate, vol, call, put): sig_sig_two = vol*vol*2 mr = -rate black_scholes_numba_opt_vec(price, strike, t, mr, sig_sig_two, vol, call, put) <|fim▁hole|><|fim▁end|>
base_bs_erf.run("Numba@guvec-par", black_scholes, pass_args=True)
<|file_name|>ObservedMethodsResouce.java<|end_file_name|><|fim▁begin|>package org.valuereporter.observation; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import org.valuereporter.QueryOperations; import org.valuereporter.WriteOperations; import javax.ws.rs.*; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.util.ArrayList; import java.util.List; /** * @author <a href="mailto:[email protected]">Erik Drolshammer</a> */ @Component @Path("/observedmethods") public class ObservedMethodsResouce { private static final Logger log = LoggerFactory.getLogger(ObservedMethodsResouce.class); private final QueryOperations queryOperations; private final WriteOperations writeOperations; private final ObjectMapper mapper; /** @Autowired public ObservedMethodsResouce(QueryOperations queryOperations, WriteOperations writeOperations, ObjectMapper mapper) { this.queryOperations = queryOperations; this.writeOperations = writeOperations; this.mapper = mapper; } **/ @Autowired public ObservedMethodsResouce(ObservationsService observationsService, ObjectMapper mapper) { this.queryOperations = observationsService; this.writeOperations = observationsService; this.mapper = mapper; } //http://localhost:4901/reporter/observe/observedmethods/{prefix}/{name} /** * A request with no filtering parameters should return a list of all observations. * * @param prefix prefix used to identify running process * @param name package.classname.method * @return List of observations */ @GET @Path("/{prefix}/{name}") @Produces(MediaType.APPLICATION_JSON) public Response findObservationsByName(@PathParam("prefix") String prefix,@PathParam("name") String name) { final List<ObservedMethod> observedMethods; //Should also support no queryParams -> findAll if (name != null ) { log.trace("findObservationsByName name={}", name); observedMethods = queryOperations.findObservationsByName(prefix, name); } else { throw new UnsupportedOperationException("You must supply a name. <package.classname.method>"); } Writer strWriter = new StringWriter(); try { mapper.writeValue(strWriter, observedMethods); <|fim▁hole|> return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity("Error converting to requested format.").build(); } return Response.ok(strWriter.toString()).build(); } //http://localhost:4901/reporter/observe/observedmethods/{prefix} @POST @Path("/{prefix}") @Produces(MediaType.APPLICATION_JSON) public Response addObservationMethod(@PathParam("prefix") String prefix, String jsonBody){ log.trace("addObservationMethod prefix {} , jsonBody {}.", prefix, jsonBody); List<ObservedMethod> observedMethods = null; try { observedMethods = mapper.readValue(jsonBody, new TypeReference<ArrayList<ObservedMethodJson>>(){ }); if (observedMethods != null) { for (ObservedMethod observedMethod : observedMethods) { observedMethod.setPrefix(prefix); } } } catch (IOException e) { log.warn("Unexpected error trying to produce list of ObservedMethod from \n prefix {} \n json {}, \n Reason {}",prefix, jsonBody, e.getMessage()); return Response.status(Response.Status.NOT_ACCEPTABLE).entity("Error converting to requested format.").build(); } long updatedCount = writeOperations.addObservations(prefix,observedMethods); String message = "added " + updatedCount + " observedMethods."; Writer strWriter = new StringWriter(); try { mapper.writeValue(strWriter, message); } catch (IOException e) { log.error("Could not convert {} to JSON.", updatedCount, e); return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity("Error converting to requested format.").build(); } return Response.ok(strWriter.toString()).build(); } }<|fim▁end|>
} catch (IOException e) { log.error("Could not convert {} ObservedMethod to JSON.", observedMethods.size(), e);
<|file_name|>GFitsBinTable.hpp<|end_file_name|><|fim▁begin|>/*************************************************************************** * GFitsBinTable.hpp - FITS binary table class * * ----------------------------------------------------------------------- * * copyright (C) 2008-2018 by Juergen Knoedlseder * * ----------------------------------------------------------------------- * * * * This program is free software: you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation, either version 3 of the License, or * * (at your option) any later version. * * * * This program is distributed in the hope that it will be useful, * * but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * * GNU General Public License for more details. * * * * You should have received a copy of the GNU General Public License * * along with this program. If not, see <http://www.gnu.org/licenses/>. * * * ***************************************************************************/ /** * @file GFitsBinTable.hpp * @brief FITS binary table class definition * @author Juergen Knoedlseder */ #ifndef GFITSBINTABLE_HPP #define GFITSBINTABLE_HPP /* __ Includes ___________________________________________________________ */ #include "GFitsTable.hpp" /***********************************************************************//** * @class GFitsBinTable * * @brief FITS binary table class ***************************************************************************/ class GFitsBinTable : public GFitsTable { public: // Constructors and destructors GFitsBinTable(void); explicit GFitsBinTable(const int& nrows); GFitsBinTable(const GFitsBinTable& table); virtual ~GFitsBinTable(void); // Operators GFitsBinTable& operator=(const GFitsBinTable& table); <|fim▁hole|> virtual GFitsBinTable* clone(void) const; virtual std::string classname(void) const; HDUType exttype(void) const; private: // Private methods void init_members(void); void copy_members(const GFitsBinTable& table); void free_members(void); void init_table_header(void); }; /***********************************************************************//** * @brief Return class name * * @return String containing the class name ("GFitsBinTable"). ***************************************************************************/ inline std::string GFitsBinTable::classname(void) const { return ("GFitsBinTable"); } /***********************************************************************//** * @brief Return extension type * * @return Extension type (HT_BIN_TABLE). ***************************************************************************/ inline GFitsHDU::HDUType GFitsBinTable::exttype(void) const { return (HT_BIN_TABLE); } #endif /* GFITSBINTABLE_HPP */<|fim▁end|>
// Implemented pure virtual methods virtual void clear(void);
<|file_name|>test.py<|end_file_name|><|fim▁begin|>import sys,os import numpy as np #os.environ["EPICS_CA_AUTO_ADDR_LIST"] = "NO" #os.environ["EPICS_CA_ADDR_LIST"] = "192.168.82.10"<|fim▁hole|> a = VIMC.velaINJMagnetController(True,False) print( np.array(a.getQuadNames()))<|fim▁end|>
#os.environ["EPICS_CA_MAX_ARRAY_BYTES"] = "10000000000" import velaINJMagnetControl as VIMC
<|file_name|>gcc_preprocess.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import optparse import os import subprocess import sys from util import build_utils def DoGcc(options): build_utils.MakeDirectory(os.path.dirname(options.output)) gcc_cmd = [ 'gcc', # invoke host gcc. '-E', # stop after preprocessing. '-D', 'ANDROID', # Specify ANDROID define for pre-processor. '-x', 'c-header', # treat sources as C header files '-P', # disable line markers, i.e. '#line 309' '-I', options.include_path, '-o', options.output, options.template ] build_utils.CheckCallDie(gcc_cmd) def main(argv): parser = optparse.OptionParser() parser.add_option('--include-path', help='Include path for gcc.') parser.add_option('--template', help='Path to template.') parser.add_option('--output', help='Path for generated file.') parser.add_option('--stamp', help='Path to touch on success.') # TODO(newt): remove this once http://crbug.com/177552 is fixed in ninja. parser.add_option('--ignore', help='Ignored.') options, _ = parser.parse_args() DoGcc(options) if options.stamp: build_utils.Touch(options.stamp)<|fim▁hole|> if __name__ == '__main__': sys.exit(main(sys.argv))<|fim▁end|>
<|file_name|>unpack_pak_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright 2018 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import unpack_pak import unittest class UnpackPakTest(unittest.TestCase): def testMapFileLine(self): self.assertTrue(unpack_pak.ParseLine(' {"path.js", IDR_PATH}')) def testGzippedMapFileLine(self): self.assertTrue(unpack_pak.ParseLine(' {"path.js", IDR_PATH, false}')) self.assertTrue(unpack_pak.ParseLine(' {"path.js", IDR_PATH, true}')) <|fim▁hole|> def testGetFileAndDirName(self): (f, d) = unpack_pak.GetFileAndDirName( 'out/build/gen/foo/foo.unpak', 'out/build/gen/foo', 'a/b.js') self.assertEquals('b.js', f) self.assertEquals('out/build/gen/foo/foo.unpak/a', d) def testGetFileAndDirNameForGeneratedResource(self): (f, d) = unpack_pak.GetFileAndDirName( 'out/build/gen/foo/foo.unpak', 'out/build/gen/foo', '@out_folder@/out/build/gen/foo/a/b.js') self.assertEquals('b.js', f) self.assertEquals('out/build/gen/foo/foo.unpak/a', d) if __name__ == '__main__': unittest.main()<|fim▁end|>
<|file_name|>p24.rs<|end_file_name|><|fim▁begin|>use rand::{weak_rng, Rng, SeedableRng}; use util::{MT19937Rng, prng_crypt}; <|fim▁hole|>#[test] fn run() { let mut wk_rng = weak_rng(); // rng seeded from weak 16-bit seed let seed = wk_rng.next_u32() & 0xFFFF; let mut ks_rng = MT19937Rng::from_seed(seed); let attack_input = "AAAAAAAAAAAAAA".as_bytes(); let rand_prefix: [u8; 2] = wk_rng.gen(); let mut input = Vec::new(); input.extend_from_slice(&rand_prefix); input.extend_from_slice(attack_input); let ctxt = prng_crypt(&mut ks_rng, &input); let test_input = "__AAAAAAAAAAAAAA".as_bytes(); // brute force seeds let rec_seed = (0..(1 << 16)) .find(|&s: &u32| { ks_rng.reseed(s); let test_ctxt = prng_crypt(&mut ks_rng, &test_input); &ctxt[2..] == &test_ctxt[2..] }); assert!(rec_seed.is_some()); assert_eq!(seed, rec_seed.unwrap()); }<|fim▁end|>
<|file_name|>0260_auto_20180416_1839.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.11.10 on 2018-04-16 16:39 from __future__ import unicode_literals import base.models.learning_unit_year import django.core.validators from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('base', '0259_auto_20180416_1404'), ] operations = [ migrations.RemoveField( model_name='learningunit', name='acronym', ), migrations.RemoveField( model_name='learningunit', name='title', ), migrations.AlterField( model_name='learningcontaineryear', name='common_title', field=models.CharField(blank=True, max_length=255, null=True, verbose_name='common_official_title'), ), migrations.AlterField( model_name='learningcontaineryear', name='common_title_english', field=models.CharField(blank=True, max_length=250, null=True, verbose_name='common_official_english_title'), ), migrations.AlterField( model_name='learningcontaineryear', name='container_type', field=models.CharField(choices=[('COURSE', 'COURSE'), ('INTERNSHIP', 'INTERNSHIP'), ('DISSERTATION', 'DISSERTATION'), ('OTHER_COLLECTIVE', 'OTHER_COLLECTIVE'), ('OTHER_INDIVIDUAL', 'OTHER_INDIVIDUAL'), ('MASTER_THESIS', 'MASTER_THESIS'), ('EXTERNAL', 'EXTERNAL')], max_length=20, verbose_name='type'),<|fim▁hole|> migrations.AlterField( model_name='learningcontaineryear', name='language', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='reference.Language', verbose_name='language'), ), migrations.AlterField( model_name='learningunit', name='faculty_remark', field=models.TextField(blank=True, null=True, verbose_name='faculty_remark'), ), migrations.AlterField( model_name='learningunit', name='other_remark', field=models.TextField(blank=True, null=True, verbose_name='other_remark'), ), migrations.AlterField( model_name='learningunit', name='periodicity', field=models.CharField(choices=[('ANNUAL', 'ANNUAL'), ('BIENNIAL_EVEN', 'BIENNIAL_EVEN'), ('BIENNIAL_ODD', 'BIENNIAL_ODD')], default='ANNUAL', max_length=20, verbose_name='periodicity'), ), migrations.AlterField( model_name='learningunityear', name='academic_year', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base.AcademicYear', validators=[base.models.learning_unit_year.academic_year_validator], verbose_name='academic_year'), ), migrations.AlterField( model_name='learningunityear', name='acronym', field=models.CharField(db_index=True, max_length=15, validators=[django.core.validators.RegexValidator('^[BLMW][A-Z]{2,4}\\d{4}[A-Z0-9]{0,1}$')], verbose_name='code'), ), migrations.AlterField( model_name='learningunityear', name='internship_subtype', field=models.CharField(blank=True, choices=[('TEACHING_INTERNSHIP', 'TEACHING_INTERNSHIP'), ('CLINICAL_INTERNSHIP', 'CLINICAL_INTERNSHIP'), ('PROFESSIONAL_INTERNSHIP', 'PROFESSIONAL_INTERNSHIP'), ('RESEARCH_INTERNSHIP', 'RESEARCH_INTERNSHIP')], max_length=250, null=True, verbose_name='internship_subtype'), ), migrations.AlterField( model_name='learningunityear', name='quadrimester', field=models.CharField(blank=True, choices=[('Q1', 'Q1'), ('Q2', 'Q2'), ('Q1&2', 'Q1&2'), ('Q1|2', 'Q1|2'), ('Q3', 'Q3')], max_length=4, null=True, verbose_name='quadrimester'), ), migrations.AlterField( model_name='learningunityear', name='specific_title', field=models.CharField(blank=True, max_length=255, null=True, verbose_name='official_title_proper_to_UE'), ), migrations.AlterField( model_name='learningunityear', name='specific_title_english', field=models.CharField(blank=True, max_length=250, null=True, verbose_name='official_english_title_proper_to_UE'), ), migrations.AlterField( model_name='learningunityear', name='status', field=models.BooleanField(default=False, verbose_name='active_title'), ), migrations.AlterField( model_name='learningunityear', name='subtype', field=models.CharField(choices=[('FULL', 'FULL'), ('PARTIM', 'PARTIM')], default='FULL', max_length=50), ), ]<|fim▁end|>
),
<|file_name|>test_babymaker.py<|end_file_name|><|fim▁begin|>from babymaker import BabyMaker, EnumType, IntType, StringType, UUIDType, FieldType, DatetimeType, FloatType, EmbedType import unittest import string import sys from datetime import datetime, timedelta class TestMakeSomeBabies(unittest.TestCase): def test_make_one(self): fields = { "id": UUIDType() } female_of_the_species = BabyMaker(fields) one = female_of_the_species.make_one() self.assertTrue("id" in one) def test_make_some(self): fields = { "id": UUIDType() } female_of_the_species = BabyMaker(fields) some = list(female_of_the_species.make_some(8)) self.assertEquals(len(some), 8) for one in some: self.assertTrue("id" in one) def test_uuid_field_hex_format(self): fields = { "id": UUIDType(format="hex_str") } female_of_the_species = BabyMaker(fields) one = female_of_the_species.make_one() the_id = one.get("id") self.assertEquals(len(the_id), 32) for char in the_id: self.assertTrue(char in string.hexdigits) def test_uuid_field_default_format(self): fields = { "id": UUIDType() } female_of_the_species = BabyMaker(fields) one = female_of_the_species.make_one() the_id = one.get("id") self.assertEquals(len(the_id), 36) for char in the_id: self.assertTrue(char in string.hexdigits + "-") def test_uuid_field_int_format(self): fields = { "id": UUIDType("int") } female_of_the_species = BabyMaker(fields) one = female_of_the_species.make_one() the_id = one.get("id") self.assertIsInstance(the_id, int) def test_uuid_field_int_str_format(self): fields = { "id": UUIDType("int_str") } female_of_the_species = BabyMaker(fields) one = female_of_the_species.make_one()<|fim▁hole|> will_it_blend = int(the_id) def test_int_field(self): fields = { "id": IntType(min_value=10, max_value=11) } female_of_the_species = BabyMaker(fields) one = female_of_the_species.make_one() the_id = one.get("id") self.assertIsInstance(the_id, int) self.assertTrue(the_id >= 10) self.assertTrue(the_id <= 11) fields = { "id": IntType() } female_of_the_species = BabyMaker(fields) one = female_of_the_species.make_one() the_id = one.get("id") self.assertIsInstance(the_id, int) self.assertTrue(the_id >= 0) self.assertTrue(the_id <= sys.maxsize) def test_float_field(self): fields = { "id": FloatType(min_value=2.0, max_value=10.0) } female_of_the_species = BabyMaker(fields) one = female_of_the_species.make_one() the_id = one.get("id") self.assertIsInstance(the_id, float) self.assertTrue(the_id >= 2.0) self.assertTrue(the_id <= 10.0) fields = { "id": FloatType() } female_of_the_species = BabyMaker(fields) one = female_of_the_species.make_one() the_id = one.get("id") self.assertIsInstance(the_id, float) self.assertTrue(the_id >= 0.0) self.assertTrue(the_id <= 1.0) for baby in female_of_the_species.make_some(100): the_id = baby.get("id") self.assertIsInstance(the_id, float) self.assertTrue(the_id >= 0.0) self.assertTrue(the_id <= 1.0) fields = { "id": FloatType(min_value=1.0) } female_of_the_species = BabyMaker(fields) one = female_of_the_species.make_one() the_id = one.get("id") self.assertIsInstance(the_id, float) self.assertTrue(the_id >= 1.0) self.assertTrue(the_id <= 2.0) def test_string_field(self): fields = { "id": StringType(min_size=10, max_size=22) } female_of_the_species = BabyMaker(fields) one = female_of_the_species.make_one() the_id = one.get("id") self.assertIsInstance(the_id, str) for char in the_id: self.assertTrue(char in string.printable) self.assertTrue(len(the_id) >= 10) self.assertTrue(len(the_id) <= 22) fields = { "id": StringType() } female_of_the_species = BabyMaker(fields) one = female_of_the_species.make_one() the_id = one.get("id") self.assertIsInstance(the_id, str) for char in the_id: self.assertTrue(char in string.printable) self.assertTrue(len(the_id) >= 0) self.assertTrue(len(the_id) <= 64) def test_string_field_with_limited_chars(self): allowed_chars = "paul" fields = { "id": StringType(allowed_chars=allowed_chars, min_size=10, max_size=22) } female_of_the_species = BabyMaker(fields) one = female_of_the_species.make_one() the_id = one.get("id") self.assertIsInstance(the_id, str) for char in the_id: self.assertTrue(char in allowed_chars) self.assertTrue(len(the_id) >= 10) self.assertTrue(len(the_id) <= 22) def test_enum_type(self): choices = [1, 8, "paul", 12, None] fields = { "id": EnumType(choices=choices) } female_of_the_species = BabyMaker(fields) some = list(female_of_the_species.make_some(88)) self.assertEquals(len(some), 88) for one in some: the_id = one.get("id") self.assertTrue(the_id in choices) def test_base_field_type(self): fields = { "id": FieldType() } female_of_the_species = BabyMaker(fields) some = list(female_of_the_species.make_some(88)) self.assertEquals(len(some), 88) for one in some: the_id = one.get("id") self.assertIsNone(the_id) def test_datetime_type(self): start = datetime(1976, 7, 15) end = datetime(1977, 7, 15) fields = { "created": DatetimeType(start, end) } female_of_the_species = BabyMaker(fields) some = list(female_of_the_species.make_some(88)) self.assertEquals(len(some), 88) for one in some: created = one.get("created") self.assertIsInstance(created, datetime) self.assertTrue(created <= end) self.assertTrue(created >= start) def test_datetime_notime_type(self): start = datetime(1976, 7, 15) end = datetime(1977, 7, 15) fields = { "created": DatetimeType(start, end, include_time=False) } female_of_the_species = BabyMaker(fields) some = list(female_of_the_species.make_some(88)) self.assertEquals(len(some), 88) for one in some: created = one.get("created") self.assertIsInstance(created, datetime) self.assertEquals(created.hour, 0) self.assertEquals(created.minute, 0) self.assertEquals(created.second, 0) self.assertTrue(created <= end) self.assertTrue(created >= start) def test_datetime_incremental_type(self): start = datetime(1976, 7, 15) end = datetime(1977, 7, 15) delta = timedelta(weeks=1) fields = { "created": DatetimeType(start, end, increment=delta) } female_of_the_species = BabyMaker(fields) some = list(female_of_the_species.make_some(56)) self.assertEquals(len(some), 56) test_value = start for one in some: created = one.get("created") self.assertIsInstance(created, datetime) self.assertTrue(created <= end) self.assertTrue(created >= start) self.assertEquals(created, test_value) test_value += delta if test_value >= end: test_value = start def test_datetime_decremental_type(self): start = datetime(1976, 7, 15) end = datetime(1977, 7, 15) delta = timedelta(weeks=-1) fields = { "created": DatetimeType(start, end, increment=delta) } female_of_the_species = BabyMaker(fields) some = list(female_of_the_species.make_some(56)) self.assertEquals(len(some), 56) test_value = end for one in some: created = one.get("created") self.assertIsInstance(created, datetime) self.assertTrue(created <= end) self.assertTrue(created >= start) self.assertEquals(created, test_value) test_value += delta if test_value <= start: test_value = end def test_embedded_maker(self): fields = { "id": UUIDType() } female_of_the_species = BabyMaker(fields) fields2 = { "inbed": EmbedType(female_of_the_species), "id": UUIDType() } grandma = BabyMaker(fields2) one = grandma.make_one() self.assertTrue("id" in one) self.assertTrue("inbed" in one) self.assertTrue("id" in one.inbed)<|fim▁end|>
the_id = one.get("id") for char in the_id: self.assertTrue(char in string.digits)
<|file_name|>env.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2<|fim▁hole|>import json, web from lib.log import Log class Env(object): @staticmethod def get(key): if key and key in web.ctx.env: return web.ctx.env[key] else: return web.ctx.env @staticmethod def set(key, value): web.ctx.env[key] = value @staticmethod def setFromFile(file): fenv = open(file) jenv = json.load(fenv) for key,value in jenv.items(): web.ctx.env[key] = value<|fim▁end|>
# -*- coding: utf-8 -*-
<|file_name|>step6.py<|end_file_name|><|fim▁begin|>import json f = open('text-stripped-3.json') out = open('text-lines.json', 'w') start_obj = json.load(f) <|fim▁hole|>characters_on_stage = [] currently_speaking = None last_scene = '1.1' for i in range(len(start_obj['data'])): obj = start_obj['data'][i] if obj['type'] == 'entrance': if obj['characters'] in characters_on_stage: raise Exception('Character tried to enter stage when already on stage at object ' + str(i)) characters_on_stage = characters_on_stage + obj['characters'] elif obj['type'] == 'exeunt': characters_on_stage = [] elif obj['type'] == 'exit': characters_on_stage = [char for char in characters_on_stage if char not in obj['characters']] elif obj['type'] == 'speaker tag': if obj['speaker'] not in characters_on_stage: raise Exception('Character tried to speak when not on stage at object ' + str(i), start_obj['data'][i + 1]) currently_speaking = obj['speaker'] elif obj['type'] == 'line': if currently_speaking == None: raise Exception('A line did not have an associated speaker at object ' + str(i)) identifier_info = obj['identifier'].split('.') scene = identifier_info[0] + '.' + identifier_info[1] #if scene != last_scene: # if len(characters_on_stage) != 0: # print('Warning: scene ' + scene + ' just started with ' + str(characters_on_stage) + ' still on stage') last_scene = scene end_obj['data'].append({ 'type': 'line', 'identifier': obj['identifier'], 'text': obj['text'].strip(), 'speaker': currently_speaking, 'characters': characters_on_stage }) if len(characters_on_stage) == 0: currently_speaking = None json.dump(end_obj, out)<|fim▁end|>
end_obj = {'data': []}
<|file_name|>zz.internal.local.fs.js<|end_file_name|><|fim▁begin|>var zz = { Internal: { Local: { FS: { File: {}, Directory: {} } } } }; zz.Internal.Local.FS.File.read = function(path, successCallback, errorCallback) { Ti.API.debug("ZZ.Internal.Local.FS.File.read"); Ti.API.debug("ZZ.Internal.Local.FS.File.read [path : " + path + "]"); var file = Ti.Filesystem.getFile(Ti.Filesystem.applicationDataDirectory, path); var blob = file.read(); if (null == blob) { var errorMessage = "ZZ.Internal.Local.FS.File.read unable to read file [name : " + file.name + ", nativePath : " + file.nativePath + ", blob.mimeType : " + blob.mimeType + "]"; _manageError({ errorMessage: errorMessage }, errorCallback); return null; } Ti.API.debug("ZZ.Internal.Local.FS.File.read readed file [name : " + file.name + ", nativePath : " + file.nativePath + ", blob.mimeType : " + blob.mimeType + "]"); null != successCallback && successCallback(blob); return blob; }; zz.Internal.Local.FS.File.write = function(path, content, successCallback, errorCallback) { Ti.API.debug("ZZ.Internal.Local.FS.File.write"); Ti.API.debug("ZZ.Internal.Local.FS.File.write [path : " + path + "]"); var file = Ti.Filesystem.getFile(Ti.Filesystem.applicationDataDirectory, path); file.write(content); var blob = file.read(); if (null == blob) { var errorMessage = "ZZ.Internal.Local.FS.File.write unable to write file [name : " + file.name + ", nativePath : " + file.nativePath + ", blob.mimeType : " + blob.mimeType + "]"; _manageError({ errorMessage: errorMessage }, errorCallback); return null; } Ti.API.debug("ZZ.Internal.Local.FS.File.write written file [name : " + file.name + ", nativePath : " + file.nativePath + ", blob.mimeType : " + blob.mimeType + "]");<|fim▁hole|> return blob; }; zz.Internal.Local.FS.File.copy = function(from, to, successCallback, errorCallback) { Ti.API.debug("ZZ.Internal.Local.FS.File.copy"); Ti.API.debug("ZZ.Internal.Local.FS.File.copy [from : " + from + ", to : " + to + "]"); var fromFile = Ti.Filesystem.getFile(Ti.Filesystem.applicationDataDirectory, from); var toFile = Ti.Filesystem.getFile(Ti.Filesystem.applicationDataDirectory, to); toFile.write(fromFile.read()); var blob = toFile.read(); if (null == blob) { var errorMessage = "ZZ.Internal.Local.FS.File.copy unable to copy file [name : " + toFile.name + ", nativePath : " + toFile.nativePath + ", blob.mimeType : " + blob.mimeType + "]"; _manageError({ errorMessage: errorMessage }, errorCallback); return null; } Ti.API.debug("ZZ.Internal.Local.FS.File.copy copied to file [name : " + toFile.name + ", nativePath : " + toFile.nativePath + ", blob.mimeType : " + blob.mimeType + "]"); null != successCallback && successCallback(blob); return blob; }; zz.Internal.Local.FS.File.delete = function(path, successCallback, errorCallback) { Ti.API.debug("ZZ.Internal.Local.FS.File.delete"); Ti.API.debug("ZZ.Internal.Local.FS.File.delete [path : " + path + "]"); var file = Ti.Filesystem.getFile(Ti.Filesystem.applicationDataDirectory, path); var done = file.deleteFile(); if (!done) { var errorMessage = "ZZ.Internal.Local.FS.File.delete unable to delete file [name : " + file.name + ", nativePath : " + file.nativePath + "]"; _manageError({ errorMessage: errorMessage }, errorCallback); return false; } Ti.API.debug("ZZ.Internal.Local.FS.File.delete deleted file [name : " + file.name + ", nativePath : " + file.nativePath + "]"); null != successCallback && successCallback(); return true; }; zz.Internal.Local.FS.Directory.make = function(path, successCallback, errorCallback) { Ti.API.debug("ZZ.Internal.Local.FS.Directory.make"); Ti.API.debug("ZZ.Internal.Local.FS.Directory.make [path : " + path + "]"); var dir = Ti.Filesystem.getFile(Ti.Filesystem.applicationDataDirectory, path); var done = dir.createDirectory(); if (!done) { var errorMessage = "ZZ.Internal.Local.FS.Directory.make unable to create directory [name : " + dir.name + "]"; _manageError({ errorMessage: errorMessage }, errorCallback); return false; } Ti.API.debug("ZZ.Internal.Local.FS.Directory.make created directory [name : " + dir.name + ", nativePath : " + dir.nativePath + "]"); null != successCallback && successCallback(); return true; }; zz.Internal.Local.FS.Directory.remove = function(path, successCallback, errorCallback) { Ti.API.debug("ZZ.Internal.Local.FS.Directory.remove"); var dir = Ti.Filesystem.getFile(Ti.Filesystem.applicationDataDirectory, path); var done = dir.deleteDirectory(); if (!done) { var errorMessage = "ZZ.Internal.Local.FS.Directory.remove unable to remove directory [name : " + dir.name + ", nativePath : " + dir.nativePath + "]"; _manageError({ errorMessage: errorMessage }, errorCallback); return false; } Ti.API.debug("ZZ.Internal.Local.FS.Directory.remove removed directory [name : " + dir.name + ", nativePath : " + dir.nativePath + "]"); null != successCallback && successCallback(); return true; }; exports.ZZ = zz; exports.version = .2; var _manageError = function(error, errorCallback) { Ti.API.trace("ZZ.Internal.Local.FS._manageError"); Ti.API.error(error.errorMessage); null != errorCallback && errorCallback(error); };<|fim▁end|>
null != successCallback && successCallback(blob);
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from zerver.lib.test_classes import WebhookTestCase class LidarrHookTests(WebhookTestCase): STREAM_NAME = "lidarr" URL_TEMPLATE = "/api/v1/external/lidarr?api_key={api_key}&stream={stream}" WEBHOOK_DIR_NAME = "lidarr" def test_lidarr_test(self) -> None: """ Tests if lidarr test payload is handled correctly """ expected_topic = "Lidarr - Test" expected_message = "Lidarr webhook has been successfully configured." self.check_webhook("lidarr_test", expected_topic, expected_message) def test_lidarr_tracks_renamed(self) -> None: """ Tests if lidarr tracks renamed payload is handled correctly """ expected_topic = "Little Mix" expected_message = "The artist Little Mix has had its tracks renamed." self.check_webhook("lidarr_tracks_renamed", expected_topic, expected_message) def test_lidarr_tracks_retagged(self) -> None: """ Tests if lidarr tracks retagged payload is handled correctly """ expected_topic = "Little Mix" expected_message = "The artist Little Mix has had its tracks retagged." self.check_webhook("lidarr_tracks_retagged", expected_topic, expected_message) def test_lidarr_tracks_imported(self) -> None: """ Tests if lidarr tracks imported payload is handled correctly """ expected_topic = "UB40" expected_message = """ The following tracks by UB40 have been imported: * Cherry Oh Baby * Keep On Moving * Please Don't Make Me Cry * Sweet Sensation * Johnny Too Bad * Red Red Wine * Guilty * She Caught the Train * Version Girl * Many Rivers to Cross """.strip() self.check_webhook("lidarr_tracks_imported", expected_topic, expected_message) def test_lidarr_tracks_imported_upgrade(self) -> None: """ Tests if lidarr tracks imported upgrade payload is handled correctly """ expected_topic = "Little Mix" expected_message = """ The following tracks by Little Mix have been imported due to upgrade: * The National Manthem * Woman Like Me * Think About Us * Strip * Monster in Me * Joan of Arc * Love a Girl Right * American Boy * Told You So * Wasabi * More Than Words * Motivate * Notice * The Cure * Forget You Not * Woman’s World * The Cure (stripped) * Only You """.strip() self.check_webhook("lidarr_tracks_imported_upgrade", expected_topic, expected_message) def test_lidarr_album_grabbed(self) -> None: """ Tests if lidarr album grabbed payload is handled correctly """ expected_topic = "UB40" expected_message = "The album Labour of Love by UB40 has been grabbed." self.check_webhook("lidarr_album_grabbed", expected_topic, expected_message) def test_lidarr_tracks_imported_over_limit(self) -> None: """ Tests if lidarr tracks imported over limit payload is handled correctly """ expected_topic = "Michael Jackson" expected_message = """ The following tracks by Michael Jackson have been imported:<|fim▁hole|>* They Don’t Care About Us * Stranger in Moscow * Black or White * This Time Around * Rock With You * Earth Song * She’s Out of My Life * D.S. * Bad * Money * I Just Can’t Stop Loving You * Man in the Mirror * Come Together * Thriller * You Are Not Alone * Beat It * Childhood (theme from “Free Willy 2”) [and 10 more tracks(s)] """.strip() self.check_webhook("lidarr_tracks_imported_over_limit", expected_topic, expected_message) def test_lidarr_tracks_imported_upgrade_over_limit(self) -> None: """ Tests if lidarr tracks imported upgrade over limit payload is handled correctly """ expected_topic = "Michael Jackson" expected_message = """ The following tracks by Michael Jackson have been imported due to upgrade: * Scream * Billie Jean * The Way You Make Me Feel * They Don’t Care About Us * Stranger in Moscow * Black or White * This Time Around * Rock With You * Earth Song * She’s Out of My Life * D.S. * Bad * Money * I Just Can’t Stop Loving You * Man in the Mirror * Come Together * Thriller * You Are Not Alone * Beat It * Childhood (theme from “Free Willy 2”) [and 10 more tracks(s)] """.strip() self.check_webhook( "lidarr_tracks_imported_upgrade_over_limit", expected_topic, expected_message )<|fim▁end|>
* Scream * Billie Jean * The Way You Make Me Feel
<|file_name|>main.py<|end_file_name|><|fim▁begin|>from PySide import QtCore, QtGui from pysql_browser import MainWindow import sys def main(): app = QtGui.QApplication(sys.argv) window = MainWindow() window.show() sys.exit(app.exec_()) if __name__ == '__main__':<|fim▁hole|><|fim▁end|>
main()
<|file_name|>api-notes.js<|end_file_name|><|fim▁begin|>var joi = require('joi'); var _ = require('lodash'); module.exports = function(router, db) { // This is our "class" that accesses/modifies notes in the db var NoteService = require('../lib/db-notes') var noteService = new NoteService(db); // Joi is a wonderful library that takes strings and converts them into the proper // type while validating them. Anyone who works with query strings knows this is // usually a painfully dull process, and this makes that 10x easier. var querySchema = joi.object().keys({ limit: joi.number().integer().min(1).max(1000), skip: joi.number().integer().min(0), field: joi.string().valid(["createdBy", "note", "lastModified"]), value: joi.string().trim(), sort: joi.valid(["createdBy", "note"]), order: joi.number().valid([1, -1]) }).and(['sort', 'order']).and('field','value'); // Read list router.get('/api/notes', function(req, res, next) { joi.validate(req.query, querySchema, function(err, queryString) { var query = {}, options = {}; if(err) return next(err); if(_.isEmpty(queryString)) { options.sort = { 'lastModified': -1 }; query = {}; } else { query = setQuery(queryString.field, queryString.value); if(queryString.sort) options.sort[queryString.sort] = queryString.order; options.limit = queryString.limit; options.skip = queryString.skip; } noteService.list(query, options, function(err, users) {<|fim▁hole|> res.send(users); }); }); }); /** * Adds a note. The lastModified date is handled automatically. */ router.post('/api/notes', function(req, res, next) { var note; note = { createdBy: req.body.name, note: req.body.note }; noteService.add(note, function(err, result) { if(err) { if(err.message ==='Validation failed') { return res.send(400, err); } return next(err); } if(result === null) { return res.send(400, { error: { message: 'Note does not exist' }}); } // Doesn't belong in the api really, but for quickly getting this working: if(req.accepts(['html', 'json']) === 'html') return res.redirect('/notes'); res.send(200, result); }); }); /** * Updates a note. * TODO: Not yet used and tested */ router.put('/api/notes/:id', function(req, res, next) { var id; try{ id = ObjectID(req.params.id); }catch(err) { return res.send(400, {error: err.message}); } noteService.updateNote(id, req.body, function(err, result) { if(err) return next(err); if(result === 0) { return res.send(400, { error: 'Note `' + req.params.id + '` does not exist' }); } res.json(200, null); }); }); /** * Deletes a note from the db */ router.delete('/api/notes/:id', function(req, res, next) { noteService.remove(req.params.id, function(err, result) { if(err) return next(err); if(result === 0) { return res.send(400, { error: 'Note `' + req.params.id + '` does not exist' }); } res.json(200, null); }); }); };<|fim▁end|>
if(err) return next(err);
<|file_name|>pizza.py<|end_file_name|><|fim▁begin|>import pygame from pygame import gfxdraw from .rangable import Rangable import random class Pizza(Rangable): """docstring for Pizza""" def __init__(self, context): Rangable.__init__(self) self.context = context self.pizza = self.context.plain_pizza self.trashed = False self.perfected = False self.trashing = False self.trash_can = None self.trash_pos = None self.slices = None self.offset = random.randint(0,4) self.color=(0,0,0) self.x = 100 self.y = 400 # 5=> margin between top and pizza self.location = (self.x,self.y) self.width = 150 self.height = 150 self.toppings = [0, 0, 0, 0] self.requirements = [] self.potentalClues = [] self.drawing = None self.draw() """ update the button drawing surface. """ def draw(self): surf = pygame.Surface((self.width, self.height), pygame.SRCALPHA) pizza_img = pygame.transform.scale(self.context.plain_pizza, (self.width, self.height)) surf.blit(pizza_img, (0,0)) for i in range(0, len(self.toppings)): if self.toppings[i] > 0: self.drawTopping(surf, i, 0) #gfxdraw.filled_ellipse(surf, self.width//2,self.height//2, self.width/2, self.height/2, (219,162,74))#pizza #pygame.draw.arc(surf, (225,216,0), [0, 0, self.width, self.height], 0, 360, 2)#crust #draw slices on here afterwards self.drawing = surf self.dirty = False """ draw on a surface """ def drawOn(self, screen=None): S = 8 #speed towards trash can A = 9.8 #acceleration towards trash can if self.trashing: if self.touches(self.trash_can): self.trashed = True self.trashing = False else: self.setLocation(self.trash_pos[0] + 50, self.y + ((S)*A) ) if screen: if self.dirty: self.draw() screen.blit(self.drawing, self.location) else: print("Error: drawOn was called on Button object but no screen argument was passed") """ return topping drawing """ def drawTopping(self, surf, i, pad=0): #needs serious refactoring topping_img = pygame.transform.scale(self.context.game_toppings[i], (self.width/4, self.height/4)) if self.context.difficulty == "Advanced": amount = self.context.fractions[self.toppings[i]] else: amount = self.toppings[i] #center portion surf.blit(topping_img, ( (surf.get_width()/2) - (topping_img.get_width()/2), (surf.get_height()/2) - (topping_img.get_height()/2))) #top portion w,h = (surf.get_width()/6) + pad, surf.get_height()/6 if amount > 0: surf.blit( pygame.transform.rotate(topping_img, 45), ( w, h )) if amount > 0.25: surf.blit( pygame.transform.rotate(topping_img, 45), ( 3*w , h )) #bottom portion if amount > 0.5: surf.blit( pygame.transform.rotate(topping_img, 45), ( w, 3*h )) if amount > 0.75: surf.blit( pygame.transform.rotate(topping_img, 45), ( 3*w , 3*h )) return surf """ draw on a surface """ def moveToTrash(self, trash_pos=None, trash_can=None): if not(self.trashing or self.trashed): if trash_pos and trash_can: self.trash_pos = trash_pos self.trash_can = pygame.Rect((trash_pos[0], trash_pos[1]+self.height), (trash_can.get_width(), trash_can.get_height())) self.trashing = True self.setLocation(trash_pos[0] + 50, 200)<|fim▁hole|> print("Error: expected a trash_pos, trash_can got {}, {}".format(trash_pos, trash_can)) """ Add topping """ def addTopping(self, index): if self.toppings[index] == 0: self.toppings[index] = 1 else: self.toppings[index] = 0 self.dirty = True """ Change Topping """ def changeTopping(self, index, amount): self.toppings[index] = amount self.dirty = True """ set Costumer hidden Pizza requirements """ def setRequirements(self, requirements): self.requirements = requirements """ Checks if Pizza meets customer requirements. Currently only support topping requirements returns a tuple, boolean indicating whether it met the requirement or not. (Boolean, Message) """ def checkRequirements(self): if self.context.difficulty == "Easy": message = [] metRequirement = False notwanted = 0 missing = 0 for i in range(0, len(self.toppings)): if self.toppings[i] > 0 and self.requirements[i] == 0: notwanted += 1 elif self.toppings[i] == 0 and self.requirements[i] > 0: missing += 1 if missing > 0: message += ["There aren't enough toppings on the pizza. :(".format(notwanted)] elif missing < 0: message += ["There are more toppings on the pizza than I wanted. :(".format(notwanted)] if notwanted > 0: message += ["There {} {} {} on the pizza I don't like. :(".format( 'is' if notwanted == 1 else 'are', notwanted, 'topping' if notwanted == 1 else 'toppings' )] if not(notwanted) and missing == 0: metRequirement = True message += ["Thank you, that was the perfect pizza I was looking for! :)\n"] return (metRequirement, message) elif self.context.difficulty == "Advanced": metRequirement = True messages = [] names = ["Cheese", "Pepperoni", "Mushroom", "Pineapple"] # calculate full pizza requirements totalRequirements = [0 for i in range(0, len(self.toppings))] for arr in self.requirements: for i in range(0, len(arr)): totalRequirements[i] += arr[i] # check if pizza matches requirements for i in range(0, len(self.toppings)): topping = self.context.fractions[self.toppings[i]] if topping > totalRequirements[i] or topping < totalRequirements[i]: metRequirement = False # set up person-specific messages for personPreference in self.requirements: message = [] notwanted = 0 missing = 0 for i in range(0, len(self.toppings)): toppingAmount = self.context.fractions[self.toppings[i]] if personPreference[i] == 0 and toppingAmount > totalRequirements[i]: notwanted += 1 elif personPreference[i] > 0 and toppingAmount < totalRequirements[i]: missing += 1 if notwanted == 1: message += ["I want less of one topping"] elif notwanted > 1: message += ["I want less of {} toppings".format(notwanted)] if missing == 1: message += ["I want more of one topping"] elif missing > 1: message += ["I want more of {} toppings".format(missing)] messages.append(message) # Unique person messages personSpecificMessages = [] # Wrong / correct pizza if metRequirement: personSpecificMessages.append(["The is the correct pizza!"]) else: personSpecificMessages.append(["This is not the pizza I want."]) # Gather some potental 'simple' clues potentialCluesMuch = [] potentialCluesLittle = [] for i in range(0, len(self.toppings)): guessAmount = self.context.fractions[self.toppings[i]] correctAmount = totalRequirements[i] if guessAmount > correctAmount: potentialCluesMuch.append(["Too much {} ".format(names[i])]) elif guessAmount < correctAmount: potentialCluesLittle.append(["Too little {} ".format(names[i])]) # Back up for the 'simple clues' if len(potentialCluesMuch) == 0: for i in range(0, len(self.toppings)): guessAmount = self.context.fractions[self.toppings[i]] correctAmount = totalRequirements[i] if guessAmount == correctAmount: potentialCluesMuch.append(["The {} is just right".format(names[i])]) if len(potentialCluesLittle) == 0: for i in range(0, len(self.toppings)): guessAmount = self.context.fractions[self.toppings[i]] correctAmount = totalRequirements[i] if guessAmount == correctAmount: potentialCluesLittle.append(["The {} is just right".format(names[i])]) # To much of a topping if len(potentialCluesMuch) == 0: personSpecificMessages.append(["Looks fine to me"]) else: msg = potentialCluesMuch[random.randint(1,len(potentialCluesMuch)) - 1] personSpecificMessages.append(msg) # To little of a topping if len(potentialCluesLittle) == 0: personSpecificMessages.append(["Looks fine to me"]) else: msg = potentialCluesLittle[random.randint(1,len(potentialCluesLittle)) - 1] personSpecificMessages.append(msg) self.generateClues(names) # Random clue as the final person if len(self.potentalClues) == 0: personSpecificMessages.append(["Looks fine to me"]) else: personSpecificMessages.append(self.potentalClues[random.randint(1,len(self.potentalClues)) - 1]) formattedMessages = [[] for i in range(0, 4)] for i in range(0, len(personSpecificMessages)): for j in range(0, len(personSpecificMessages[i])): strArray = self.formatString(personSpecificMessages[i][j], 22) formattedMessages[i] += strArray # return (metRequirement, messages[0], messages[1], messages[2], messages[3]) return (metRequirement, formattedMessages[0], formattedMessages[1], formattedMessages[2], formattedMessages[3]) def formatString(self, msg, lineLength): strArray = []; #keep adding snippets as long as there is more to add while len(msg) > lineLength: #get space closest to end of line index = lineLength while index > 0 and msg[index] != " ": index = index - 1 if index == 0: index = lineLength strArray += [msg[:index]] msg = msg[index+1:] #add remainder of message strArray += [msg] return strArray """ draw on a surface """ def setPerfect(self): self.perfected = True """ x,y are the center points of the text. """ def setLocation(self, x, y): self.x = x self.y = y self.location = (x, y) """ Logic for generating clues """ def generateClues(self, names): self.potentalClues = [] # calculate full pizza requirements totalRequirements = [0 for i in range(0, len(self.toppings))] for arr in self.requirements: for i in range(0, len(arr)): totalRequirements[i] += arr[i] for i in range(0, len(totalRequirements)): print(totalRequirements[i]) # Same as for i in range(0, len(self.toppings) - 1): for j in range(i+1, len(self.toppings)): if totalRequirements[i] == totalRequirements[j]: self.potentalClues.append(["I want the same {} as {}".format(names[i], names[j])]) # Double for i in range(0, len(self.toppings) - 1): for j in range(i+1, len(self.toppings)): if totalRequirements[i] == 2 * totalRequirements[j] and totalRequirements[j] != 0: self.potentalClues.append(["I want twice the {} as {}".format(names[i], names[j])]) if totalRequirements[j] == 2 * totalRequirements[i] and totalRequirements[i] != 0: self.potentalClues.append(["I want twice the {} as {}".format(names[j], names[i])]) # Tripple for i in range(0, len(self.toppings) - 1): for j in range(i+1, len(self.toppings)): if totalRequirements[i] == 3 * totalRequirements[j] and totalRequirements[j] != 0: self.potentalClues.append(["I want triple the {} as {}".format(names[i], names[j])]) if totalRequirements[j] == 3 * totalRequirements[i] and totalRequirements[i] != 0: self.potentalClues.append(["I want triple the {} as {}".format(names[j], names[i])]) # As much as others for i in range(0, len(self.toppings)): total = 0.0 for j in range(0, len(self.toppings)): if i != j: total += self.toppings[j] if self.toppings[i] == total: self.potentalClues.append(["I want as much {} as everything else combined".format(names[i])])<|fim▁end|>
else:
<|file_name|>future.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0. use crate::callback::must_call; use futures::channel::mpsc; use futures::channel::oneshot as futures_oneshot; use futures::future::{self, BoxFuture, Future, FutureExt, TryFutureExt}; use futures::stream::{Stream, StreamExt}; use futures::task::{self, ArcWake, Context, Poll}; use std::sync::{Arc, Mutex}; /// Generates a paired future and callback so that when callback is being called, its result /// is automatically passed as a future result. pub fn paired_future_callback<T>() -> (Box<dyn FnOnce(T) + Send>, futures_oneshot::Receiver<T>) where T: Send + 'static, { let (tx, future) = futures_oneshot::channel::<T>(); let callback = Box::new(move |result| { let r = tx.send(result); if r.is_err() { warn!("paired_future_callback: Failed to send result to the future rx, discarded."); } }); (callback, future) } pub fn paired_must_called_future_callback<T>( arg_on_drop: impl FnOnce() -> T + Send + 'static, ) -> (Box<dyn FnOnce(T) + Send>, futures_oneshot::Receiver<T>) where T: Send + 'static, { let (tx, future) = futures_oneshot::channel::<T>(); let callback = must_call( move |result| { let r = tx.send(result); if r.is_err() { warn!("paired_future_callback: Failed to send result to the future rx, discarded."); } }, arg_on_drop, ); (callback, future) } /// Create a stream proxy with buffer representing the remote stream. The returned task /// will receive messages from the remote stream as much as possible. pub fn create_stream_with_buffer<T, S>( s: S, size: usize, ) -> ( impl Stream<Item = T> + Send + 'static, impl Future<Output = ()> + Send + 'static, ) where S: Stream<Item = T> + Send + 'static, T: Send + 'static, { let (tx, rx) = mpsc::channel::<T>(size); let driver = s .then(future::ok::<T, mpsc::SendError>) .forward(tx) .map_err(|e| warn!("stream with buffer send error"; "error" => %e)) .map(|_| ()); (rx, driver) } /// Polls the provided future immediately. If the future is not ready, /// it will register the waker. When the event is ready, the waker will /// be notified, then the internal future is immediately polled in the /// thread calling `wake()`. pub fn poll_future_notify<F: Future<Output = ()> + Send + 'static>(f: F) { let f: BoxFuture<'static, ()> = Box::pin(f); let waker = Arc::new(BatchCommandsWaker(Mutex::new(Some(f)))); waker.wake(); } // BatchCommandsWaker is used to make business pool notifiy completion queues directly. struct BatchCommandsWaker(Mutex<Option<BoxFuture<'static, ()>>>); impl ArcWake for BatchCommandsWaker { fn wake_by_ref(arc_self: &Arc<Self>) { let mut future_slot = arc_self.0.lock().unwrap(); if let Some(mut future) = future_slot.take() { let waker = task::waker_ref(&arc_self); let cx = &mut Context::from_waker(&*waker); match future.as_mut().poll(cx) { Poll::Pending => {<|fim▁hole|> } } } }<|fim▁end|>
*future_slot = Some(future); } Poll::Ready(()) => {}
<|file_name|>SqlTypeToXmlType.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2013-2015 Josef Hardi <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.obidea.semantika.database.datatype; import java.sql.Types; import java.util.HashMap; import com.obidea.semantika.datatype.DataType; /** * A mapping specification from SQL datatypes to XML Schema datatypes. * Source: {@link http://www.w3.org/2001/sw/rdb2rdf/wiki/Mapping_SQL_datatypes_to_XML_Schema_datatypes} */ public final class SqlTypeToXmlType { private static HashMap<Integer, String> mTypeMapping; static { mTypeMapping = new HashMap<Integer, String>(); // mCoreMapping.put(Types.BINARY, DataType.HEX_BINARY); // mCoreMapping.put(Types.JAVA_OBJECT, DataType.HEX_BINARY); mTypeMapping.put(Types.NUMERIC, DataType.DECIMAL); mTypeMapping.put(Types.DECIMAL, DataType.DECIMAL); mTypeMapping.put(Types.BIGINT, DataType.LONG); mTypeMapping.put(Types.INTEGER, DataType.INTEGER); mTypeMapping.put(Types.SMALLINT, DataType.SHORT);<|fim▁hole|> mTypeMapping.put(Types.CHAR, DataType.STRING); mTypeMapping.put(Types.VARCHAR, DataType.STRING); mTypeMapping.put(Types.NCHAR, DataType.STRING); mTypeMapping.put(Types.NVARCHAR, DataType.STRING); mTypeMapping.put(Types.LONGVARCHAR, DataType.STRING); mTypeMapping.put(Types.LONGNVARCHAR, DataType.STRING); mTypeMapping.put(Types.DATE, DataType.DATE); mTypeMapping.put(Types.TIME, DataType.TIME); mTypeMapping.put(Types.TIMESTAMP, DataType.DATE_TIME); mTypeMapping.put(Types.BOOLEAN, DataType.BOOLEAN); mTypeMapping.put(Types.BIT, DataType.BOOLEAN); mTypeMapping.put(Types.OTHER, DataType.STRING); } /** * Return the corresponding XML type given the SQL type. * * @param sqlType * The JDBC SQL type (see {@link java.sql.Types}). * @return a URI string representing the XML type. * @throws UnsupportedSqlDataTypeException * if the data type has no corresponding XML type. */ public static String get(int sqlType) { String toReturn = mTypeMapping.get(sqlType); if (toReturn == null) { throw new UnsupportedSqlDataTypeException(sqlType); } return toReturn; } }<|fim▁end|>
mTypeMapping.put(Types.TINYINT, DataType.BYTE); mTypeMapping.put(Types.REAL, DataType.FLOAT); mTypeMapping.put(Types.FLOAT, DataType.FLOAT); mTypeMapping.put(Types.DOUBLE, DataType.DOUBLE);
<|file_name|>latexdocvisitor.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************** * * $Id: $ * * * Copyright (C) 1997-2012 by Dimitri van Heesch. * * Permission to use, copy, modify, and distribute this software and its * documentation under the terms of the GNU General Public License is hereby * granted. No representations are made about the suitability of this software * for any purpose. It is provided "as is" without express or implied warranty. * See the GNU General Public License for more details. * * Documents produced by Doxygen are derivative works derived from the * input used in their production; they are not affected by this license. * */ #include <qfileinfo.h> #include "latexdocvisitor.h" #include "docparser.h" #include "language.h" #include "doxygen.h" #include "outputgen.h" #include "dot.h" #include "util.h" #include "message.h" #include "parserintf.h" #include "msc.h" #include "htmlattrib.h" #include "cite.h" static QCString escapeLabelName(const char *s) { QCString result; const char *p=s; char c; while ((c=*p++)) { switch (c) { case '%': result+="\\%"; break; case '|': result+="\\texttt{\"|}"; break; case '!': result+="\"!"; break; default: result+=c; } } return result; } const int maxLevels=5; static const char *secLabels[maxLevels] = { "section","subsection","subsubsection","paragraph","subparagraph" }; static const char *getSectionName(int level) { static bool compactLatex = Config_getBool("COMPACT_LATEX"); int l = level; if (compactLatex) l++; if (Doxygen::insideMainPage) l--; return secLabels[QMIN(maxLevels-1,l)]; } QCString LatexDocVisitor::escapeMakeIndexChars(const char *s) { QCString result; const char *p=s; char str[2]; str[1]=0; char c; while ((c=*p++)) { switch (c) { case '!': m_t << "\"!"; break; case '"': m_t << "\"\""; break; case '@': m_t << "\"@"; break; case '|': m_t << "\\texttt{\"|}"; break; case '[': m_t << "["; break; case ']': m_t << "]"; break; default: str[0]=c; filter(str); break; } } return result; } LatexDocVisitor::LatexDocVisitor(FTextStream &t,CodeOutputInterface &ci, const char *langExt,bool insideTabbing) : DocVisitor(DocVisitor_Latex), m_t(t), m_ci(ci), m_insidePre(FALSE), m_insideItem(FALSE), m_hide(FALSE), m_insideTabbing(insideTabbing), m_insideTable(FALSE), m_langExt(langExt), m_currentColumn(0), m_inRowspan(FALSE), m_inColspan(FALSE) { m_rowSpans.setAutoDelete(TRUE); } //-------------------------------------- // visitor functions for leaf nodes //-------------------------------------- void LatexDocVisitor::visit(DocWord *w) { if (m_hide) return; filter(w->word()); } void LatexDocVisitor::visit(DocLinkedWord *w) { if (m_hide) return; startLink(w->ref(),w->file(),w->anchor()); filter(w->word()); endLink(w->ref(),w->file(),w->anchor()); } void LatexDocVisitor::visit(DocWhiteSpace *w) { if (m_hide) return; if (m_insidePre) { m_t << w->chars(); } else { m_t << " "; } } void LatexDocVisitor::visit(DocSymbol *s) { if (m_hide) return; switch(s->symbol()) { case DocSymbol::BSlash: m_t << "\\textbackslash{}"; break; case DocSymbol::At: m_t << "@"; break; case DocSymbol::Less: if (m_insidePre) m_t << "<"; else m_t << "$<$"; break; case DocSymbol::Greater: if (m_insidePre) m_t << ">"; else m_t << "$>$"; break; case DocSymbol::Amp: m_t << "\\&"; break; case DocSymbol::Dollar: m_t << "\\$"; break; case DocSymbol::Hash: m_t << "\\#"; break; case DocSymbol::DoubleColon: m_t << "::"; break; case DocSymbol::Percent: m_t << "\\%"; break; case DocSymbol::Pipe: m_t << "$|$"; break; case DocSymbol::Copy: m_t << "\\copyright"; break; case DocSymbol::Tm: m_t << "\\texttrademark"; break; case DocSymbol::Reg: m_t << "\\textregistered"; break; case DocSymbol::Apos: m_t << "'"; break; case DocSymbol::Quot: m_t << "\""; break; case DocSymbol::Lsquo: m_t << "`"; break; case DocSymbol::Rsquo: m_t << "'"; break; case DocSymbol::Ldquo: m_t << "``"; break; case DocSymbol::Rdquo: m_t << "''"; break; case DocSymbol::Ndash: m_t << "--"; break; case DocSymbol::Mdash: m_t << "---"; break; case DocSymbol::Uml: if (s->letter()=='i') m_t << "\\\"{\\i}"; else m_t << "\\\"{" << s->letter() << "}"; break; case DocSymbol::Acute: if (s->letter()=='i') m_t << "\\'{\\i}"; else m_t << "\\'{" << s->letter() << "}"; break; case DocSymbol::Grave: if (s->letter()=='i') m_t << "\\`{\\i}"; else m_t << "\\`{" << s->letter() << "}"; break; case DocSymbol::Circ: if (s->letter()=='i') m_t << "\\^{\\i}"; else m_t << "\\^{" << s->letter() << "}"; break; case DocSymbol::Slash: if (tolower(s->letter())=='o') m_t << "{\\" << s->letter() << "}"; else m_t << s->letter(); break; case DocSymbol::Tilde: m_t << "\\~{" << s->letter() << "}"; break; case DocSymbol::Szlig: m_t << "{\\ss}"; break; case DocSymbol::Cedil: m_t << "\\c{" << s->letter() << "}"; break; case DocSymbol::Ring: m_t << "\\" << s->letter() << s->letter(); break; case DocSymbol::Nbsp: m_t << "~"; break; case DocSymbol::AElig: m_t << "{\\AE}"; break; case DocSymbol::Aelig: m_t << "{\\ae}"; break; case DocSymbol::GrkGamma: m_t << "{$\\Gamma$}"; break; case DocSymbol::GrkDelta: m_t << "{$\\Delta$}"; break; case DocSymbol::GrkTheta: m_t << "{$\\Theta$}"; break; case DocSymbol::GrkLambda: m_t << "{$\\Lambda$}"; break; case DocSymbol::GrkXi: m_t << "{$\\Xi$}"; break; case DocSymbol::GrkPi: m_t << "{$\\Pi$}"; break; case DocSymbol::GrkSigma: m_t << "{$\\Sigma$}"; break; case DocSymbol::GrkUpsilon: m_t << "{$\\Upsilon$}"; break; case DocSymbol::GrkPhi: m_t << "{$\\Phi$}"; break; case DocSymbol::GrkPsi: m_t << "{$\\Psi$}"; break; case DocSymbol::GrkOmega: m_t << "{$\\Omega$}"; break; case DocSymbol::Grkalpha: m_t << "{$\\alpha$}"; break; case DocSymbol::Grkbeta: m_t << "{$\\beta$}"; break; case DocSymbol::Grkgamma: m_t << "{$\\gamma$}"; break; case DocSymbol::Grkdelta: m_t << "{$\\delta$}"; break; case DocSymbol::Grkepsilon: m_t << "{$\\varepsilon$}"; break; case DocSymbol::Grkzeta: m_t << "{$\\zeta$}"; break; case DocSymbol::Grketa: m_t << "{$\\eta$}"; break; case DocSymbol::Grktheta: m_t << "{$\\theta$}"; break; case DocSymbol::Grkiota: m_t << "{$\\iota$}"; break; case DocSymbol::Grkkappa: m_t << "{$\\kappa$}"; break; case DocSymbol::Grklambda: m_t << "{$\\lambda$}"; break; case DocSymbol::Grkmu: m_t << "{$\\mu$}"; break; case DocSymbol::Grknu: m_t << "{$\\nu$}"; break; case DocSymbol::Grkxi: m_t << "{$\\xi$}"; break; case DocSymbol::Grkpi: m_t << "{$\\pi$}"; break; case DocSymbol::Grkrho: m_t << "{$\\rho$}"; break; case DocSymbol::Grksigma: m_t << "{$\\sigma$}"; break; case DocSymbol::Grktau: m_t << "{$\\tau$}"; break; case DocSymbol::Grkupsilon: m_t << "{$\\upsilon$}"; break; case DocSymbol::Grkphi: m_t << "{$\\varphi$}"; break; case DocSymbol::Grkchi: m_t << "{$\\chi$}"; break; case DocSymbol::Grkpsi: m_t << "{$\\psi$}"; break; case DocSymbol::Grkomega: m_t << "{$\\omega$}"; break; case DocSymbol::Grkvarsigma: m_t << "{$\\varsigma$}"; break; case DocSymbol::Section: m_t << "{$\\S$}"; break; case DocSymbol::Degree: m_t << "\\textdegree"; break; case DocSymbol::Prime: m_t << "'"; break; case DocSymbol::DoublePrime: m_t << "''"; break; case DocSymbol::Infinity: m_t << "{$\\infty$}"; break; case DocSymbol::EmptySet: m_t << "{$\\emptyset$}"; break; case DocSymbol::PlusMinus: m_t << "{$\\pm$}"; break; case DocSymbol::Times: m_t << "{$\\times$}"; break; case DocSymbol::Minus: m_t << "-"; break; case DocSymbol::CenterDot: m_t << "{$\\cdot$}"; break; case DocSymbol::Partial: m_t << "{$\\partial$}"; break; case DocSymbol::Nabla: m_t << "{$\\nabla$}"; break; case DocSymbol::SquareRoot: m_t << "{$\\surd$}"; break; case DocSymbol::Perpendicular: m_t << "{$\\perp$}"; break; case DocSymbol::Sum: m_t << "{$\\sum$}"; break; case DocSymbol::Integral: m_t << "{$\\int$}"; break; case DocSymbol::Product: m_t << "{$\\prod$}"; break; case DocSymbol::Similar: m_t << "{$\\sim$}"; break; case DocSymbol::Approx: m_t << "{$\\approx$}"; break; case DocSymbol::NotEqual: m_t << "{$\\ne$}"; break; case DocSymbol::Equivalent: m_t << "{$\\equiv$}"; break; case DocSymbol::Proportional: m_t << "{$\\propto$}"; break; case DocSymbol::LessEqual: m_t << "{$\\le$}"; break; case DocSymbol::GreaterEqual: m_t << "{$\\ge$}"; break; case DocSymbol::LeftArrow: m_t << "{$\\leftarrow$}"; break; case DocSymbol::RightArrow: m_t << "{$\\rightarrow$}"; break; case DocSymbol::SetIn: m_t << "{$\\in$}"; break; case DocSymbol::SetNotIn: m_t << "{$\\notin$}"; break; case DocSymbol::LeftCeil: m_t << "{$\\lceil$}"; break; case DocSymbol::RightCeil: m_t << "{$\\rceil$}"; break; case DocSymbol::LeftFloor: m_t << "{$\\lfloor$}"; break; case DocSymbol::RightFloor: m_t << "{$\\rfloor$}"; break; default: err("error: unknown symbol found\n"); } } void LatexDocVisitor::visit(DocURL *u) { if (m_hide) return; if (Config_getBool("PDF_HYPERLINKS")) { m_t << "\\href{"; if (u->isEmail()) m_t << "mailto:"; m_t << u->url() << "}"; } m_t << "{\\tt "; filter(u->url()); m_t << "}"; } void LatexDocVisitor::visit(DocLineBreak *) { if (m_hide) return; if (m_insideTable) m_t << "\\newline\n"; else m_t << "\\par\n"; } void LatexDocVisitor::visit(DocHorRuler *) { if (m_hide) return; m_t << "\n\n"; } void LatexDocVisitor::visit(DocStyleChange *s) { if (m_hide) return; switch (s->style()) { case DocStyleChange::Bold: if (s->enable()) m_t << "{\\bfseries "; else m_t << "}"; break; case DocStyleChange::Italic: if (s->enable()) m_t << "{\\itshape "; else m_t << "}"; break; case DocStyleChange::Code: if (s->enable()) m_t << "{\\ttfamily "; else m_t << "}"; break; case DocStyleChange::Subscript: if (s->enable()) m_t << "$_{\\mbox{"; else m_t << "}}$ "; break; case DocStyleChange::Superscript: if (s->enable()) m_t << "$^{\\mbox{"; else m_t << "}}$ "; break; case DocStyleChange::Center: if (s->enable()) m_t << "\\begin{center}"; else m_t << "\\end{center} "; break; case DocStyleChange::Small: if (s->enable()) m_t << "\n\\footnotesize "; else m_t << "\n\\normalsize "; break; case DocStyleChange::Preformatted: if (s->enable()) { m_t << "\n\\begin{DoxyPre}"; m_insidePre=TRUE; } else { m_insidePre=FALSE; m_t << "\\end{DoxyPre}\n"; } break; case DocStyleChange::Div: /* HTML only */ break; case DocStyleChange::Span: /* HTML only */ break; } } void LatexDocVisitor::visit(DocVerbatim *s) { if (m_hide) return; QCString lang = m_langExt; if (!s->language().isEmpty()) // explicit language setting { lang = s->language(); } switch(s->type()) { case DocVerbatim::Code: { m_t << "\n\\begin{DoxyCode}\n"; Doxygen::parserManager->getParser(lang) ->parseCode(m_ci,s->context(),s->text(), s->isExample(),s->exampleFile()); m_t << "\\end{DoxyCode}\n"; } break; case DocVerbatim::Verbatim: m_t << "\\begin{DoxyVerb}"; m_t << s->text(); m_t << "\\end{DoxyVerb}\n"; break; case DocVerbatim::HtmlOnly: case DocVerbatim::XmlOnly: case DocVerbatim::ManOnly: case DocVerbatim::RtfOnly: /* nothing */ break; case DocVerbatim::LatexOnly: m_t << s->text(); break; case DocVerbatim::Dot: { static int dotindex = 1; QCString fileName(4096); fileName.sprintf("%s%d%s", (Config_getString("LATEX_OUTPUT")+"/inline_dotgraph_").data(), dotindex++, ".dot" ); QFile file(fileName); if (!file.open(IO_WriteOnly)) { err("Could not open file %s for writing\n",fileName.data()); } file.writeBlock( s->text(), s->text().length() ); file.close(); m_t << "\\begin{center}\n"; startDotFile(fileName,"","",FALSE); endDotFile(FALSE); m_t << "\\end{center}\n"; if (Config_getBool("DOT_CLEANUP")) file.remove(); } break; case DocVerbatim::Msc: { static int mscindex = 1; QCString baseName(4096); baseName.sprintf("%s%d", (Config_getString("LATEX_OUTPUT")+"/inline_mscgraph_").data(), mscindex++ ); QFile file(baseName+".msc"); if (!file.open(IO_WriteOnly)) { err("Could not open file %s.msc for writing\n",baseName.data()); } QCString text = "msc {"; text+=s->text(); text+="}"; file.writeBlock( text, text.length() ); file.close(); m_t << "\\begin{center}\n"; writeMscFile(baseName); m_t << "\\end{center}\n"; if (Config_getBool("DOT_CLEANUP")) file.remove(); } break; } } void LatexDocVisitor::visit(DocAnchor *anc) { if (m_hide) return; m_t << "\\label{" << anc->file() << "_" << anc->anchor() << "}%" << endl; if (!anc->file().isEmpty() && Config_getBool("PDF_HYPERLINKS")) { m_t << "\\hypertarget{" << anc->file() << "_" << anc->anchor() << "}{}%" << endl; } } void LatexDocVisitor::visit(DocInclude *inc) { if (m_hide) return; switch(inc->type()) { case DocInclude::IncWithLines: { m_t << "\n\\begin{DoxyCodeInclude}\n"; QFileInfo cfi( inc->file() ); FileDef fd( cfi.dirPath().utf8(), cfi.fileName().utf8() ); Doxygen::parserManager->getParser(inc->extension()) ->parseCode(m_ci,inc->context(), inc->text(), inc->isExample(), inc->exampleFile(), &fd); m_t << "\\end{DoxyCodeInclude}" << endl; } break; case DocInclude::Include: m_t << "\n\\begin{DoxyCodeInclude}\n"; Doxygen::parserManager->getParser(inc->extension()) ->parseCode(m_ci,inc->context(), inc->text(),inc->isExample(), inc->exampleFile()); m_t << "\\end{DoxyCodeInclude}\n"; break; case DocInclude::DontInclude: break; case DocInclude::HtmlInclude: break; case DocInclude::VerbInclude: m_t << "\n\\begin{DoxyVerbInclude}\n"; m_t << inc->text(); m_t << "\\end{DoxyVerbInclude}\n"; break; case DocInclude::Snippet: { m_t << "\n\\begin{DoxyCodeInclude}\n"; Doxygen::parserManager->getParser(inc->extension()) ->parseCode(m_ci, inc->context(), extractBlock(inc->text(),inc->blockId()), inc->isExample(), inc->exampleFile() ); m_t << "\\end{DoxyCodeInclude}" << endl; } break; } } void LatexDocVisitor::visit(DocIncOperator *op) { //printf("DocIncOperator: type=%d first=%d, last=%d text=`%s'\n", // op->type(),op->isFirst(),op->isLast(),op->text().data()); if (op->isFirst()) { if (!m_hide) m_t << "\n\\begin{DoxyCodeInclude}\n"; pushEnabled(); m_hide = TRUE; } if (op->type()!=DocIncOperator::Skip) { popEnabled(); if (!m_hide) { Doxygen::parserManager->getParser(m_langExt) ->parseCode(m_ci,op->context(),op->text(), op->isExample(),op->exampleFile()); } pushEnabled(); m_hide=TRUE; } if (op->isLast()) { popEnabled(); if (!m_hide) m_t << "\n\\end{DoxyCodeInclude}\n"; } else { if (!m_hide) m_t << endl; } } void LatexDocVisitor::visit(DocFormula *f) { if (m_hide) return; m_t << f->text(); } void LatexDocVisitor::visit(DocIndexEntry *i) { if (m_hide) return; m_t << "\\index{" << escapeLabelName(i->entry()) << "@{"; escapeMakeIndexChars(i->entry()); m_t << "}}"; } void LatexDocVisitor::visit(DocSimpleSectSep *) { } void LatexDocVisitor::visit(DocCite *cite) { if (m_hide) return; if (!cite->file().isEmpty()) { //startLink(cite->ref(),cite->file(),cite->anchor()); QCString anchor = cite->anchor(); anchor = anchor.mid(CiteConsts::anchorPrefix.length()); // strip prefix m_t << "\\cite{" << anchor << "}"; } else { m_t << "{\\bfseries ["; filter(cite->text()); m_t << "]}"; } } //-------------------------------------- // visitor functions for compound nodes //-------------------------------------- void LatexDocVisitor::visitPre(DocAutoList *l) { if (m_hide) return; if (l->isEnumList()) { m_t << "\n\\begin{DoxyEnumerate}"; } else { m_t << "\n\\begin{DoxyItemize}"; } } void LatexDocVisitor::visitPost(DocAutoList *l) { if (m_hide) return; if (l->isEnumList()) { m_t << "\n\\end{DoxyEnumerate}"; } else { m_t << "\n\\end{DoxyItemize}"; } } void LatexDocVisitor::visitPre(DocAutoListItem *) { if (m_hide) return; m_t << "\n\\item "; } void LatexDocVisitor::visitPost(DocAutoListItem *) { } void LatexDocVisitor::visitPre(DocPara *) { } void LatexDocVisitor::visitPost(DocPara *p) { if (m_hide) return; if (!p->isLast() && // omit <p> for last paragraph !(p->parent() && // and for parameter sections p->parent()->kind()==DocNode::Kind_ParamSect ) ) m_t << endl << endl; } void LatexDocVisitor::visitPre(DocRoot *) { } void LatexDocVisitor::visitPost(DocRoot *) { } void LatexDocVisitor::visitPre(DocSimpleSect *s) { if (m_hide) return; switch(s->type()) { case DocSimpleSect::See: m_t << "\\begin{DoxySeeAlso}{"; filter(theTranslator->trSeeAlso()); break; case DocSimpleSect::Return: m_t << "\\begin{DoxyReturn}{"; filter(theTranslator->trReturns()); break; case DocSimpleSect::Author: m_t << "\\begin{DoxyAuthor}{"; filter(theTranslator->trAuthor(TRUE,TRUE)); break; case DocSimpleSect::Authors: m_t << "\\begin{DoxyAuthor}{"; filter(theTranslator->trAuthor(TRUE,FALSE)); break; case DocSimpleSect::Version: m_t << "\\begin{DoxyVersion}{"; filter(theTranslator->trVersion()); break; case DocSimpleSect::Since: m_t << "\\begin{DoxySince}{"; filter(theTranslator->trSince()); break; case DocSimpleSect::Date: m_t << "\\begin{DoxyDate}{"; filter(theTranslator->trDate()); break; case DocSimpleSect::Note: m_t << "\\begin{DoxyNote}{"; filter(theTranslator->trNote()); break; case DocSimpleSect::Warning: m_t << "\\begin{DoxyWarning}{"; filter(theTranslator->trWarning()); break; case DocSimpleSect::Pre: m_t << "\\begin{DoxyPrecond}{"; filter(theTranslator->trPrecondition()); break; case DocSimpleSect::Post: m_t << "\\begin{DoxyPostcond}{"; filter(theTranslator->trPostcondition()); break; case DocSimpleSect::Copyright: m_t << "\\begin{DoxyCopyright}{"; filter(theTranslator->trCopyright()); break; case DocSimpleSect::Invar: m_t << "\\begin{DoxyInvariant}{"; filter(theTranslator->trInvariant()); break; case DocSimpleSect::Remark: m_t << "\\begin{DoxyRemark}{"; filter(theTranslator->trRemarks()); break; case DocSimpleSect::Attention: m_t << "\\begin{DoxyAttention}{"; filter(theTranslator->trAttention()); break; case DocSimpleSect::User: m_t << "\\begin{DoxyParagraph}{"; break; case DocSimpleSect::Rcs: m_t << "\\begin{DoxyParagraph}{"; break; case DocSimpleSect::Unknown: break; } // special case 1: user defined title if (s->type()!=DocSimpleSect::User && s->type()!=DocSimpleSect::Rcs) { m_t << "}\n"; } else { m_insideItem=TRUE; } } void LatexDocVisitor::visitPost(DocSimpleSect *s) { if (m_hide) return; switch(s->type()) { case DocSimpleSect::See: m_t << "\n\\end{DoxySeeAlso}\n"; break; case DocSimpleSect::Return: m_t << "\n\\end{DoxyReturn}\n"; break; case DocSimpleSect::Author: m_t << "\n\\end{DoxyAuthor}\n"; break; case DocSimpleSect::Authors: m_t << "\n\\end{DoxyAuthor}\n"; break; case DocSimpleSect::Version: m_t << "\n\\end{DoxyVersion}\n"; break; case DocSimpleSect::Since: m_t << "\n\\end{DoxySince}\n"; break; case DocSimpleSect::Date: m_t << "\n\\end{DoxyDate}\n"; break; case DocSimpleSect::Note: m_t << "\n\\end{DoxyNote}\n"; break; case DocSimpleSect::Warning: m_t << "\n\\end{DoxyWarning}\n"; break; case DocSimpleSect::Pre: m_t << "\n\\end{DoxyPrecond}\n"; break; case DocSimpleSect::Post: m_t << "\n\\end{DoxyPostcond}\n"; break; case DocSimpleSect::Copyright: m_t << "\n\\end{DoxyCopyright}\n"; break; case DocSimpleSect::Invar: m_t << "\n\\end{DoxyInvariant}\n"; break; case DocSimpleSect::Remark: m_t << "\n\\end{DoxyRemark}\n"; break; case DocSimpleSect::Attention: m_t << "\n\\end{DoxyAttention}\n"; break; case DocSimpleSect::User: m_t << "\n\\end{DoxyParagraph}\n"; break; case DocSimpleSect::Rcs: m_t << "\n\\end{DoxyParagraph}\n"; break; default: break; } } void LatexDocVisitor::visitPre(DocTitle *) { } void LatexDocVisitor::visitPost(DocTitle *) { if (m_hide) return; m_insideItem=FALSE; m_t << "}\n"; } void LatexDocVisitor::visitPre(DocSimpleList *) { if (m_hide) return; m_t << "\\begin{DoxyItemize}" << endl; } void LatexDocVisitor::visitPost(DocSimpleList *) { if (m_hide) return; m_t << "\\end{DoxyItemize}" << endl; } void LatexDocVisitor::visitPre(DocSimpleListItem *) { if (m_hide) return; m_t << "\\item "; } void LatexDocVisitor::visitPost(DocSimpleListItem *) { } void LatexDocVisitor::visitPre(DocSection *s) { if (m_hide) return; if (Config_getBool("PDF_HYPERLINKS")) { m_t << "\\hypertarget{" << s->file() << "_" << s->anchor() << "}{}"; } m_t << "\\" << getSectionName(s->level()) << "{"; filter(convertCharEntitiesToUTF8(s->title().data())); m_t << "}\\label{" << s->file() << "_" << s->anchor() << "}" << endl; } void LatexDocVisitor::visitPost(DocSection *) { } void LatexDocVisitor::visitPre(DocHtmlList *s) { if (m_hide) return; if (s->type()==DocHtmlList::Ordered) m_t << "\n\\begin{DoxyEnumerate}"; else m_t << "\n\\begin{DoxyItemize}"; } void LatexDocVisitor::visitPost(DocHtmlList *s) { if (m_hide) return; if (s->type()==DocHtmlList::Ordered) m_t << "\n\\end{DoxyEnumerate}"; else m_t << "\n\\end{DoxyItemize}"; } void LatexDocVisitor::visitPre(DocHtmlListItem *) { if (m_hide) return; m_t << "\n\\item "; } void LatexDocVisitor::visitPost(DocHtmlListItem *) { } //void LatexDocVisitor::visitPre(DocHtmlPre *) //{ // m_t << "\\small\\begin{alltt}"; // m_insidePre=TRUE; //} //void LatexDocVisitor::visitPost(DocHtmlPre *) //{ // m_insidePre=FALSE; // m_t << "\\end{alltt}\\normalsize " << endl; //} void LatexDocVisitor::visitPre(DocHtmlDescList *dl) { if (m_hide) return; QCString val = dl->attribs().find("class"); if (val=="reflist") { m_t << "\n\\begin{DoxyRefList}"; } else { m_t << "\n\\begin{DoxyDescription}"; } } void LatexDocVisitor::visitPost(DocHtmlDescList *dl) { if (m_hide) return; QCString val = dl->attribs().find("class"); if (val=="reflist") { m_t << "\n\\end{DoxyRefList}"; } else { m_t << "\n\\end{DoxyDescription}"; } } void LatexDocVisitor::visitPre(DocHtmlDescTitle *) { if (m_hide) return; m_t << "\n\\item["; m_insideItem=TRUE; } void LatexDocVisitor::visitPost(DocHtmlDescTitle *) { if (m_hide) return; m_insideItem=FALSE; m_t << "]"; } void LatexDocVisitor::visitPre(DocHtmlDescData *) { } void LatexDocVisitor::visitPost(DocHtmlDescData *) { } void LatexDocVisitor::visitPre(DocHtmlTable *t) { m_rowSpans.clear(); m_insideTable=TRUE; if (m_hide) return; if (t->hasCaption()) { m_t << "\\begin{table}[h]"; } m_t << "\\begin{TabularC}{" << t->numColumns() << "}\n"; m_numCols = t->numColumns(); m_t << "\\hline\n"; } void LatexDocVisitor::visitPost(DocHtmlTable *t) { m_insideTable=FALSE; if (m_hide) return; if (t->hasCaption()) { m_t << "\\end{table}\n"; } else { m_t << "\\end{TabularC}\n"; } } void LatexDocVisitor::visitPre(DocHtmlCaption *) { if (m_hide) return; m_t << "\\end{TabularC}\n\\centering\n\\caption{"; } void LatexDocVisitor::visitPost(DocHtmlCaption *) { if (m_hide) return; m_t << "}\n"; } void LatexDocVisitor::visitPre(DocHtmlRow *r) { m_currentColumn = 0; if (r->isHeading()) m_t << "\\rowcolor{lightgray}"; } void LatexDocVisitor::visitPost(DocHtmlRow *row) { if (m_hide) return; int c=m_currentColumn; while (c<=m_numCols) // end of row while inside a row span? { uint i; for (i=0;i<m_rowSpans.count();i++) { ActiveRowSpan *span = m_rowSpans.at(i); //printf(" founc row span: column=%d rs=%d cs=%d rowIdx=%d cell->rowIdx=%d\n", // span->column, span->rowSpan,span->colSpan,row->rowIndex(),span->cell->rowIndex()); if (span->rowSpan>0 && span->column==c && // we are at a cell in a row span row->rowIndex()>span->cell->rowIndex() // but not the row that started the span ) { m_t << "&"; if (span->colSpan>1) // row span is also part of a column span { m_t << "\\multicolumn{" << span->colSpan << "}{"; m_t << "p{(\\linewidth-\\tabcolsep*" << m_numCols << "-\\arrayrulewidth*" << row->visibleCells() << ")*" << span->colSpan <<"/"<< m_numCols << "}|}{}"; } else // solitary row span { m_t << "\\multicolumn{1}{c|}{}"; } } } c++; } m_t << "\\\\"; int col = 1; uint i; for (i=0;i<m_rowSpans.count();i++) { ActiveRowSpan *span = m_rowSpans.at(i); if (span->rowSpan>0) span->rowSpan--; if (span->rowSpan<=0) { // inactive span } else if (span->column>col) { m_t << "\\cline{" << col << "-" << (span->column-1) << "}"; col = span->column+span->colSpan; } else { col = span->column+span->colSpan; } } if (col <= m_numCols) { m_t << "\\cline{" << col << "-" << m_numCols << "}"; } m_t << "\n"; } void LatexDocVisitor::visitPre(DocHtmlCell *c) { if (m_hide) return; DocHtmlRow *row = 0; if (c->parent() && c->parent()->kind()==DocNode::Kind_HtmlRow) { row = (DocHtmlRow*)c->parent(); } m_currentColumn++; //Skip columns that span from above. uint i; for (i=0;i<m_rowSpans.count();i++) { ActiveRowSpan *span = m_rowSpans.at(i); if (span->rowSpan>0 && span->column==m_currentColumn) { if (row && span->colSpan>1) { m_t << "\\multicolumn{" << span->colSpan << "}{"; if (m_currentColumn /*c->columnIndex()*/==1) // add extra | for first column { m_t << "|"; } m_t << "p{(\\linewidth-\\tabcolsep*" << m_numCols << "-\\arrayrulewidth*" << row->visibleCells() << ")*" << span->colSpan <<"/"<< m_numCols << "}|}{}"; m_currentColumn+=span->colSpan; } else { m_currentColumn++; } m_t << "&"; } } #if 0 QMap<int, int>::Iterator it = m_rowspanIndices.find(m_currentColumn); if (it!=m_rowspanIndices.end() && it.data()>0) { m_t << "&"; m_currentColumn++; it++; } #endif int cs = c->colSpan(); if (cs>1 && row) { m_inColspan = TRUE; m_t << "\\multicolumn{" << cs << "}{"; if (c->columnIndex()==1) // add extra | for first column { m_t << "|"; } m_t << "p{(\\linewidth-\\tabcolsep*" << m_numCols << "-\\arrayrulewidth*" << row->visibleCells() << ")*" << cs <<"/"<< m_numCols << "}|}{"; if (c->isHeading()) m_t << "\\cellcolor{lightgray}"; } int rs = c->rowSpan(); if (rs>0) { m_inRowspan = TRUE; //m_rowspanIndices[m_currentColumn] = rs; m_rowSpans.append(new ActiveRowSpan(c,rs,cs,m_currentColumn)); m_t << "\\multirow{" << rs << "}{\\linewidth}{"; } int a = c->alignment(); if (a==DocHtmlCell::Center) { m_t << "\\PBS\\centering "; } else if (a==DocHtmlCell::Right) { m_t << "\\PBS\\raggedleft "; } if (c->isHeading()) { m_t << "{\\bf "; } if (cs>1) { m_currentColumn+=cs-1; } } void LatexDocVisitor::visitPost(DocHtmlCell *c) { if (m_hide) return; if (c->isHeading()) { m_t << "}"; } if (m_inRowspan) { m_inRowspan = FALSE; m_t << "}"; } if (m_inColspan) { m_inColspan = FALSE; m_t << "}"; } if (!c->isLast()) m_t << "&"; } void LatexDocVisitor::visitPre(DocInternal *) { if (m_hide) return; //m_t << "\\begin{DoxyInternal}{"; //filter(theTranslator->trForInternalUseOnly()); //m_t << "}\n"; } void LatexDocVisitor::visitPost(DocInternal *) { if (m_hide) return; //m_t << "\\end{DoxyInternal}" << endl; } void LatexDocVisitor::visitPre(DocHRef *href) { if (m_hide) return; if (Config_getBool("PDF_HYPERLINKS")) { m_t << "\\href{"; m_t << href->url(); m_t << "}"; } m_t << "{\\tt "; } void LatexDocVisitor::visitPost(DocHRef *) { if (m_hide) return; m_t << "}"; } void LatexDocVisitor::visitPre(DocHtmlHeader *header) { if (m_hide) return; m_t << "\\" << getSectionName(header->level()) << "*{"; } void LatexDocVisitor::visitPost(DocHtmlHeader *) { if (m_hide) return; m_t << "}"; } void LatexDocVisitor::visitPre(DocImage *img) { if (img->type()==DocImage::Latex) { if (m_hide) return; if (img->hasCaption()) { m_t << "\n\\begin{DoxyImage}\n"; } else { m_t << "\n\\begin{DoxyImageNoCaption}\n" " \\mbox{"; } QCString gfxName = img->name(); if (gfxName.right(4)==".eps" || gfxName.right(4)==".pdf") { gfxName=gfxName.left(gfxName.length()-4); } m_t << "\\includegraphics"; if (!img->width().isEmpty()) { m_t << "[width=" << img->width() << "]"; } else if (!img->height().isEmpty()) { m_t << "[height=" << img->height() << "]"; } m_t << "{" << gfxName << "}"; if (img->hasCaption()) { m_t << "\n\\caption{"; } } else // other format -> skip { pushEnabled(); m_hide=TRUE; } } void LatexDocVisitor::visitPost(DocImage *img) { if (img->type()==DocImage::Latex) { if (m_hide) return; m_t << "}\n"; // end mbox or caption if (img->hasCaption()) { m_t << "\\end{DoxyImage}\n"; } else{ m_t << "\\end{DoxyImageNoCaption}\n"; } } else // other format { popEnabled(); } } void LatexDocVisitor::visitPre(DocDotFile *df) { if (m_hide) return; startDotFile(df->file(),df->width(),df->height(),df->hasCaption()); } void LatexDocVisitor::visitPost(DocDotFile *df) { if (m_hide) return; endDotFile(df->hasCaption()); } void LatexDocVisitor::visitPre(DocMscFile *df) { if (m_hide) return; startMscFile(df->file(),df->width(),df->height(),df->hasCaption()); } void LatexDocVisitor::visitPost(DocMscFile *df) { if (m_hide) return; endMscFile(df->hasCaption()); } void LatexDocVisitor::visitPre(DocLink *lnk) { if (m_hide) return; startLink(lnk->ref(),lnk->file(),lnk->anchor()); } void LatexDocVisitor::visitPost(DocLink *lnk) { if (m_hide) return; endLink(lnk->ref(),lnk->file(),lnk->anchor()); } void LatexDocVisitor::visitPre(DocRef *ref) { if (m_hide) return; // when ref->isSubPage()==TRUE we use ref->file() for HTML and // ref->anchor() for LaTeX/RTF if (ref->isSubPage()) { startLink(ref->ref(),0,ref->anchor()); } else { if (!ref->file().isEmpty()) startLink(ref->ref(),ref->file(),ref->anchor()); } if (!ref->hasLinkText()) filter(ref->targetTitle()); } void LatexDocVisitor::visitPost(DocRef *ref) { if (m_hide) return; if (ref->isSubPage()) { endLink(ref->ref(),0,ref->anchor()); } else { if (!ref->file().isEmpty()) endLink(ref->ref(),ref->file(),ref->anchor()); } } void LatexDocVisitor::visitPre(DocSecRefItem *) { if (m_hide) return; m_t << "\\item \\contentsline{section}{"; } void LatexDocVisitor::visitPost(DocSecRefItem *ref) { if (m_hide) return; m_t << "}{\\ref{" << ref->file() << "_" << ref->anchor() << "}}{}" << endl; } void LatexDocVisitor::visitPre(DocSecRefList *) { if (m_hide) return; m_t << "\\footnotesize" << endl; m_t << "\\begin{multicols}{2}" << endl; m_t << "\\begin{DoxyCompactList}" << endl; } void LatexDocVisitor::visitPost(DocSecRefList *) { if (m_hide) return; m_t << "\\end{DoxyCompactList}" << endl; m_t << "\\end{multicols}" << endl; m_t << "\\normalsize" << endl; } void LatexDocVisitor::visitPre(DocParamSect *s) { if (m_hide) return; bool hasInOutSpecs = s->hasInOutSpecifier(); bool hasTypeSpecs = s->hasTypeSpecifier(); switch(s->type()) { case DocParamSect::Param: m_t << "\n\\begin{DoxyParams}"; if (hasInOutSpecs && hasTypeSpecs) m_t << "[2]"; // 2 extra cols else if (hasInOutSpecs || hasTypeSpecs) m_t << "[1]"; // 1 extra col m_t << "{"; filter(theTranslator->trParameters()); break; case DocParamSect::RetVal: m_t << "\n\\begin{DoxyRetVals}{"; filter(theTranslator->trReturnValues()); break; case DocParamSect::Exception: m_t << "\n\\begin{DoxyExceptions}{"; filter(theTranslator->trExceptions()); break; case DocParamSect::TemplateParam: /* TODO: add this filter(theTranslator->trTemplateParam()); break; */ m_t << "\n\\begin{DoxyTemplParams}{"; filter("Template Parameters"); break; default: ASSERT(0); } m_t << "}\n"; } void LatexDocVisitor::visitPost(DocParamSect *s) { if (m_hide) return; switch(s->type()) { case DocParamSect::Param: m_t << "\\end{DoxyParams}\n"; break; case DocParamSect::RetVal: m_t << "\\end{DoxyRetVals}\n"; break; case DocParamSect::Exception: m_t << "\\end{DoxyExceptions}\n"; break; case DocParamSect::TemplateParam: m_t << "\\end{DoxyTemplParams}\n"; break; default: ASSERT(0); } } void LatexDocVisitor::visitPre(DocParamList *pl) { if (m_hide) return; DocParamSect::Type parentType = DocParamSect::Unknown; DocParamSect *sect = 0; if (pl->parent() && pl->parent()->kind()==DocNode::Kind_ParamSect) { parentType = ((DocParamSect*)pl->parent())->type(); sect=(DocParamSect*)pl->parent(); } bool useTable = parentType==DocParamSect::Param || parentType==DocParamSect::RetVal || parentType==DocParamSect::Exception || parentType==DocParamSect::TemplateParam; if (!useTable) { m_t << "\\item["; } if (sect && sect->hasInOutSpecifier()) { if (pl->direction()!=DocParamSect::Unspecified) { m_t << "\\mbox{\\tt "; if (pl->direction()==DocParamSect::In) { m_t << "in"; } else if (pl->direction()==DocParamSect::Out) { m_t << "out"; } else if (pl->direction()==DocParamSect::InOut) { m_t << "in,out"; } m_t << "} "; } if (useTable) m_t << " & "; } if (sect && sect->hasTypeSpecifier()) { QListIterator<DocNode> li(pl->paramTypes()); DocNode *type; bool first=TRUE; for (li.toFirst();(type=li.current());++li) { if (!first) m_t << " | "; else first=FALSE; if (type->kind()==DocNode::Kind_Word) { visit((DocWord*)type); } else if (type->kind()==DocNode::Kind_LinkedWord) { visit((DocLinkedWord*)type); } } if (useTable) m_t << " & "; } m_t << "{\\em "; //QStrListIterator li(pl->parameters()); //const char *s; QListIterator<DocNode> li(pl->parameters()); DocNode *param; bool first=TRUE; for (li.toFirst();(param=li.current());++li) { if (!first) m_t << ","; else first=FALSE; m_insideItem=TRUE; if (param->kind()==DocNode::Kind_Word) { visit((DocWord*)param); } else if (param->kind()==DocNode::Kind_LinkedWord) { visit((DocLinkedWord*)param); } m_insideItem=FALSE; } m_t << "}"; if (useTable) { m_t << " & "; } else { m_t << "]"; } } void LatexDocVisitor::visitPost(DocParamList *pl) { if (m_hide) return; DocParamSect::Type parentType = DocParamSect::Unknown; if (pl->parent() && pl->parent()->kind()==DocNode::Kind_ParamSect) { parentType = ((DocParamSect*)pl->parent())->type(); } bool useTable = parentType==DocParamSect::Param || parentType==DocParamSect::RetVal || parentType==DocParamSect::Exception || parentType==DocParamSect::TemplateParam; if (useTable) { m_t << "\\\\" << endl << "\\hline" << endl; } } void LatexDocVisitor::visitPre(DocXRefItem *x) { if (m_hide) return; m_t << "\\begin{DoxyRefDesc}{"; filter(x->title()); m_t << "}" << endl; bool anonymousEnum = x->file()=="@"; m_t << "\\item["; if (Config_getBool("PDF_HYPERLINKS") && !anonymousEnum) { m_t << "\\hyperlink{" << stripPath(x->file()) << "_" << x->anchor() << "}{"; } else { m_t << "{\\bf "; } m_insideItem=TRUE; filter(x->title()); m_insideItem=FALSE; m_t << "}]"; } void LatexDocVisitor::visitPost(DocXRefItem *) { if (m_hide) return; m_t << "\\end{DoxyRefDesc}" << endl; } void LatexDocVisitor::visitPre(DocInternalRef *ref) { if (m_hide) return; startLink(0,ref->file(),ref->anchor()); } void LatexDocVisitor::visitPost(DocInternalRef *ref) { if (m_hide) return; endLink(0,ref->file(),ref->anchor()); } void LatexDocVisitor::visitPre(DocCopy *) { } void LatexDocVisitor::visitPost(DocCopy *) { } void LatexDocVisitor::visitPre(DocText *) { } void LatexDocVisitor::visitPost(DocText *) { } void LatexDocVisitor::visitPre(DocHtmlBlockQuote *) { if (m_hide) return; m_t << "\\begin{quotation}" << endl; } void LatexDocVisitor::visitPost(DocHtmlBlockQuote *) { if (m_hide) return; m_t << "\\end{quotation}" << endl; } void LatexDocVisitor::filter(const char *str) { filterLatexString(m_t,str,m_insideTabbing,m_insidePre,m_insideItem); } void LatexDocVisitor::startLink(const QCString &ref,const QCString &file,const QCString &anchor) { if (ref.isEmpty() && Config_getBool("PDF_HYPERLINKS")) // internal PDF link { if (ref.isEmpty()) { m_t << "\\hyperlink{"; if (!file.isEmpty()) m_t << stripPath(file); if (!file.isEmpty() && !anchor.isEmpty()) m_t << "_"; if (!anchor.isEmpty()) m_t << anchor; m_t << "}{"; } else { QCString *dest; m_t << "\\href{"; if ((dest=Doxygen::tagDestinationDict[ref])) m_t << *dest << "/"; if (!file.isEmpty()) m_t << file << Doxygen::htmlFileExtension; if (!anchor.isEmpty()) m_t << "#" << anchor; m_t << "}{"; } } else if (ref.isEmpty()) // internal non-PDF link { m_t << "\\doxyref{"; } else // external link { m_t << "{\\bf "; } } void LatexDocVisitor::endLink(const QCString &ref,const QCString &file,const QCString &anchor) { m_t << "}"; if (ref.isEmpty() && !Config_getBool("PDF_HYPERLINKS")) { m_t << "{"; filter(theTranslator->trPageAbbreviation()); m_t << "}{" << file; if (!anchor.isEmpty()) m_t << "_" << anchor; m_t << "}"; } } void LatexDocVisitor::pushEnabled() { m_enabled.push(new bool(m_hide)); } void LatexDocVisitor::popEnabled() { bool *v=m_enabled.pop(); ASSERT(v!=0); m_hide = *v; delete v; } void LatexDocVisitor::startDotFile(const QCString &fileName, const QCString &width, const QCString &height, bool hasCaption ) { QCString baseName=fileName; int i; if ((i=baseName.findRev('/'))!=-1) { baseName=baseName.right(baseName.length()-i-1); } if ((i=baseName.find('.'))!=-1) { baseName=baseName.left(i); } baseName.prepend("dot_"); QCString outDir = Config_getString("LATEX_OUTPUT"); QCString name = fileName; writeDotGraphFromFile(name,outDir,baseName,EPS); if (hasCaption) { m_t << "\n\\begin{DoxyImage}\n"; } else { m_t << "\n\\begin{DoxyImageNoCaption}\n" " \\mbox{"; } m_t << "\\includegraphics"; if (!width.isEmpty()) { m_t << "[width=" << width << "]"; } else if (!height.isEmpty()) { m_t << "[height=" << height << "]"; } else { m_t << "[width=\\textwidth]"; } m_t << "{" << baseName << "}"; if (hasCaption) { m_t << "\n\\caption{";<|fim▁hole|>{ if (m_hide) return; m_t << "}\n"; // end caption or mbox if (hasCaption) { m_t << "\\end{DoxyImage}\n"; } else { m_t << "\\end{DoxyImageNoCaption}\n"; } } void LatexDocVisitor::startMscFile(const QCString &fileName, const QCString &width, const QCString &height, bool hasCaption ) { QCString baseName=fileName; int i; if ((i=baseName.findRev('/'))!=-1) { baseName=baseName.right(baseName.length()-i-1); } if ((i=baseName.find('.'))!=-1) { baseName=baseName.left(i); } baseName.prepend("msc_"); QCString outDir = Config_getString("LATEX_OUTPUT"); writeMscGraphFromFile(fileName,outDir,baseName,MSC_EPS); if (hasCaption) { m_t << "\n\\begin{DoxyImage}\n"; } else { m_t << "\n\\begin{DoxyImageNoCaption}\n" " \\mbox{"; } m_t << "\\includegraphics"; if (!width.isEmpty()) { m_t << "[width=" << width << "]"; } else if (!height.isEmpty()) { m_t << "[height=" << height << "]"; } else { m_t << "[width=\\textwidth]"; } m_t << "{" << baseName << "}"; if (hasCaption) { m_t << "\n\\caption{"; } } void LatexDocVisitor::endMscFile(bool hasCaption) { if (m_hide) return; m_t << "}\n"; // end caption or mbox if (hasCaption) { m_t << "\\end{DoxyImage}\n"; } else { m_t << "\\end{DoxyImageNoCaption}\n"; } } void LatexDocVisitor::writeMscFile(const QCString &baseName) { QCString shortName = baseName; int i; if ((i=shortName.findRev('/'))!=-1) { shortName=shortName.right(shortName.length()-i-1); } QCString outDir = Config_getString("LATEX_OUTPUT"); writeMscGraphFromFile(baseName+".msc",outDir,shortName,MSC_EPS); m_t << "\n\\begin{DoxyImageNoCaption}" " \\mbox{\\includegraphics"; m_t << "{" << shortName << "}"; m_t << "}\n"; // end mbox m_t << "\\end{DoxyImageNoCaption}\n"; }<|fim▁end|>
} } void LatexDocVisitor::endDotFile(bool hasCaption)
<|file_name|>default.js<|end_file_name|><|fim▁begin|>var index = require('express').Router(); var Member = require('../models/Member'); var sitemap = require('../middlewares/sitemap'); var utils = require('../middlewares/utils'); index.get('/', function (req, res) { res.redirect('/article'); }); index.get('/lang', function (req, res) { var setLang; if (req.cookies.lang == undefined || req.cookies.lang == 'zh-cn') { setLang = 'en-us'; } else { setLang = 'zh-cn'; }<|fim▁hole|> res.redirect('/'); }); index.get('/lib/contact-us', function (req, res) { res.render('contactUs', { description: '身为工大学子的你,如果对软件开发或是产品设计有兴趣,欢迎向我们投递简历。' }); }); index.get('/lib/sitemap.xml', function (req, res) { sitemap.createXml(function (xml) { res.contentType('text/xml'); res.send(xml); res.end(); }); }); module.exports = index;<|fim▁end|>
res.cookie('lang', setLang);
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url from . import views urlpatterns = [<|fim▁hole|> url(r'^$', views.index, name='index'), url(r'^(?P<quiz_slug>[-A-Za-z0-9_]+)/$', views.quiz, name='quiz'), url(r'^(?P<quiz_slug>[-A-Za-z0-9_]+)/(?P<question_slug>[-A-Za-z0-9_]+)/$', views.question, name='question') ]<|fim▁end|>
<|file_name|>list.hpp<|end_file_name|><|fim▁begin|><|fim▁hole|>/***************************************************************************** * * This file is part of Mapnik (c++ mapping toolkit) * * Copyright (C) 2015 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * *****************************************************************************/ #ifndef TEXT_PLACEMENTS_LIST_HPP #define TEXT_PLACEMENTS_LIST_HPP #include <mapnik/text/placements/base.hpp> namespace mapnik { class text_placement_info_list; class feature_impl; struct attribute; // Tries a list of placements. class text_placements_list: public text_placements { public: text_placements_list(); text_placement_info_ptr get_placement_info(double scale_factor, feature_impl const& feature, attributes const& vars, symbol_cache const& sc) const; virtual void add_expressions(expression_set & output) const; text_symbolizer_properties & add(); text_symbolizer_properties & get(unsigned i); std::size_t size() const; static text_placements_ptr from_xml(xml_node const& xml, fontset_map const& fontsets, bool is_shield); private: std::vector<text_symbolizer_properties> list_; friend class text_placement_info_list; }; // List placement strategy. // See parent class for documentation of each function. class text_placement_info_list : public text_placement_info { public: text_placement_info_list(text_placements_list const* parent, double scale_factor) : text_placement_info(parent, scale_factor), state(0), parent_(parent) {} bool next() const; virtual void reset_state() { state = 0; } private: mutable unsigned state; text_placements_list const* parent_; }; } //namespace #endif<|fim▁end|>
<|file_name|>print.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node<|fim▁hole|>// // Test displaying DNS records var fs = require('fs') var tap = require('tap') var test = tap.test var util = require('util') var Message = require('../message') test('Display a message', function(t) { var file = 'oreilly.com-response' fs.readFile(__dirname+'/../_test_data/'+file, function(er, data) { if(er) throw er var msg = new Message(data) , str = util.format('%s', msg) , json = JSON.stringify(msg) t.type(str, 'string', 'Message can stringify') var obj = JSON.parse(util.format('%j', msg)) t.equal(obj.id, 45753, 'JSON round-trip: id') t.equal(obj.type, 'response', 'JSON round-trip: type') t.equal(obj.opcode, 'query', 'JSON round-trip: opcode') t.equal(obj.authoritative, true, 'JSON round-trip: authoritative') t.equal(obj.truncated, false, 'JSON round-trip: truncated') t.equal(obj.recursion_desired, true, 'JSON round-trip: recursion_desired') t.equal(obj.recursion_available, true, 'JSON round-trip: recursion_available') t.equal(obj.responseCode, 0, 'JSON round-trip: responseCode') t.end() }) })<|fim▁end|>
// // Copyright 2012 Iris Couch, all rights reserved.
<|file_name|>SaphireData.py<|end_file_name|><|fim▁begin|># Copyright 2017 Battelle Energy Alliance, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import numpy as np import os import copy import re """ Created on July 12, 2018 @author: wangc """ def _deweird(s): """ Sometimes numpy loadtxt returns strings like "b'stuff'" This converts them to "stuff" @ In, s, str, possibly weird string @ Out, _deweird, str, possibly less weird string """ if type(s) == str and s.startswith("b'") and s.endswith("'"):<|fim▁hole|> return s[2:-1] else: return s class SaphireData: """ Class that parses output of SAPHIRE outputs and write a RAVEN compatible CSV """ def __init__(self, outFiles): """ Initialize the class @ In, outFiles, list, list of output files of SAPHIRE @ Out, None """ self.headerNames = [] # list of variable names in SAPHIRE output files self.outData = [] # list of variable values in SAPHIRE output files for outFile in outFiles: outFileName, outFileType = outFile[0], outFile[1] if outFileType == 'uncertainty': headers, data = self.getUncertainty(outFileName) self.headerNames.extend(headers) self.outData.extend(data) elif outFileType == 'importance': headers, data = self.getImportance(outFileName) self.headerNames.extend(headers) self.outData.extend(data) elif outFileType == 'quantiles': print("File:",outFileName, "with type", outFileType, "is not implemented yet! Skipping" ) pass else: raise IOError('The output file', outFileName, 'with type', outFileType, 'is not supported yet!') def getUncertainty(self, outName): """ Method to extract the uncertainty information of Event Tree or Fault Tree from SAPHIRE output files @ In, outName, string, the name of output file @ Out, (headerNames,outData), tuple, where headerNames is a list of output variable names and outData is a list of output variable values """ headerNames = [] outData = [] outFile = os.path.abspath(os.path.expanduser(outName)) data = np.loadtxt(outFile, dtype=object, delimiter=',', skiprows=2) headers = data[0] for i in range(1, len(data)): for j in range(1, len(headers)): name = _deweird(data[i,0]).strip().replace(" ", "~") header = _deweird(headers[j]).strip().replace(" ", "~") headerNames.append(name + '_' + header) outData.append(float(_deweird(data[i,j]))) return headerNames, outData def getImportance(self, outName): """ Method to extract the importance information of Fault Tree from SAPHIRE output files @ In, outName, string, the name of output file @ Out, headerNames, list, list of output variable names @ Out, outData, list, list of output variable values """ headerNames = [] outData = [] outFile = os.path.abspath(os.path.expanduser(outName)) data = np.loadtxt(outFile, dtype=object, delimiter=',', skiprows=2) headers = data[0] for i in range(1, len(data)): for j in range(1, len(headers)): name = _deweird(data[i,0]).strip().replace(" ", "~") header = _deweird(headers[j]).strip().replace(" ", "~") headerNames.append(name + '_' + header) outData.append(float(_deweird(data[i,j]))) return headerNames, outData def writeCSV(self, output): """ Print data into CSV format @ In, output, str, the name of output file @ Out, None """ outObj = open(output.strip()+".csv", mode='w+b') if not output.endswith('csv') else open(output.strip(), mode='w+b') # create string for header names headerString = ",".join(self.headerNames) # write & save array as csv file # FIXME: There is a problem with the numpy.savetxt, if provided data is 1D array_like, the demiliter will be # ignored, and out file format is not correct np.savetxt(outObj, [self.outData], delimiter=',', header=headerString, comments='') outObj.close()<|fim▁end|>
<|file_name|>editor.rs<|end_file_name|><|fim▁begin|>use std::env; use std::os; use std::process::{Command,Stdio}; use rand::Rng; use rand; use std::io::{Read,Write,BufReader,BufRead}; use std::fs; use std::path::{Path,PathBuf}; use std::fs::File; use super::Note;<|fim▁hole|>use std::io::Result; pub fn edit_note(note: &Note) -> Result<Note> { let editor = get_editor(); // is "vi" println!("Have the editor as {}", editor); let path = tmp_file_path(); println!("Have the path as {:?}", path); { let mut fp = try!(File::create(&path)); try!(fp.write_all(note.as_markdown().as_bytes())); } let child = Command::new(&editor) .arg(path.to_str().unwrap()) .stdin(Stdio::inherit()) .stdout(Stdio::inherit()) .status() .unwrap(); let mut f = File::open(&path).unwrap(); let mut new_note = parse_note(&mut f).unwrap(); new_note.id = note.id; fs::remove_file(&path); Ok(new_note) } fn get_editor() -> String { let terminal_is_dumb = match env::var("TERM") { Ok(val) => val == "dumb", Err(_) => true, }; if !terminal_is_dumb { match env::var("VISUAL") { Ok(name) => { return name; }, Err(_) => { }, } } match env::var("EDITOR") { Ok(name) => { return name; }, Err(_) => { }, } let mut default_editor = String::new(); default_editor.push_str("vi"); default_editor } fn tmp_file_path() -> PathBuf { let mut rng = rand::thread_rng(); let filename: String = rng.gen_ascii_chars().take(10).collect(); let mut path: PathBuf = util::nt_dir().unwrap(); path.push(filename); path } fn parse_note(fp: &mut File) -> Result<Note> { let mut reader = BufReader::new(fp); let mut lines = reader.lines(); let name = lines.nth(0_usize).unwrap().unwrap(); let mut content = String::new(); for line in lines.skip(1_usize) { content.push_str(&line.unwrap()); content.push_str("\n"); } Ok(Note::new(None, &name, &content)) }<|fim▁end|>
use super::util;