text
stringlengths 2
1.04M
| meta
dict |
---|---|
module Kaminari
# Active Record specific page scope methods implementations
module ActiveRecordRelationMethods
# Used for page_entry_info
def entry_name(options = {})
default = options[:count] == 1 ? model_name.human : model_name.human.pluralize
model_name.human(options.reverse_merge(default: default))
end
def reset #:nodoc:
@total_count = nil
super
end
def total_count(column_name = :all, _options = nil) #:nodoc:
return @total_count if defined?(@total_count) && @total_count
# There are some cases that total count can be deduced from loaded records
if loaded?
# Total count has to be 0 if loaded records are 0
return @total_count = 0 if (current_page == 1) && @records.empty?
# Total count is calculable at the last page
return @total_count = (current_page - 1) * limit_value + @records.length if @records.any? && (@records.length < limit_value)
end
# #count overrides the #select which could include generated columns referenced in #order, so skip #order here, where it's irrelevant to the result anyway
c = except(:offset, :limit, :order)
# Remove includes only if they are irrelevant
c = c.except(:includes) unless references_eager_loaded_tables?
c = c.limit(max_pages * limit_value) if max_pages && max_pages.respond_to?(:*)
# Handle grouping with a subquery
@total_count = if c.group_values.any?
c.model.from(c.except(:select).select("1")).count
else
c.count(column_name)
end
end
# Turn this Relation to a "without count mode" Relation.
# Note that the "without count mode" is supposed to be performant but has a feature limitation.
# Pro: paginates without casting an extra SELECT COUNT query
# Con: unable to know the total number of records/pages
def without_count
extend ::Kaminari::PaginatableWithoutCount
end
end
# A module that makes AR::Relation paginatable without having to cast another SELECT COUNT query
module PaginatableWithoutCount
# Overwrite AR::Relation#load to actually load one more record to judge if the page has next page
# then store the result in @_has_next ivar
def load
if loaded? || limit_value.nil?
super
else
@values[:limit] = limit_value + 1
# FIXME: this could be removed when we're dropping AR 4 support
@arel.limit = @values[:limit] if @arel && (Integer === @arel.limit)
super
@values[:limit] = limit_value - 1
# FIXME: this could be removed when we're dropping AR 4 support
@arel.limit = @values[:limit] if @arel && (Integer === @arel.limit)
if @records.any?
@records = @records.dup if (frozen = @records.frozen?)
@_has_next = [email protected]_at(limit_value)
@records.freeze if frozen
end
self
end
end
# The page wouldn't be the last page if there's "limit + 1" record
def last_page?
!out_of_range? && !@_has_next
end
# Empty relation needs no pagination
def out_of_range?
load unless loaded?
@records.empty?
end
# Force to raise an exception if #total_count is called explicitly.
def total_count
raise "This scope is marked as a non-count paginable scope and can't be used in combination " \
"with `#paginate' or `#page_entries_info'. Use #link_to_next_page or #link_to_previous_page instead."
end
end
end
| {
"content_hash": "fbdd228d7d20d0ba5885cc0c5f338b00",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 160,
"avg_line_length": 37.795698924731184,
"alnum_prop": 0.6475106685633002,
"repo_name": "amatsuda/kaminari",
"id": "41337e91188ab475eafb559e5d463666ec0612ca",
"size": "3546",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kaminari-activerecord/lib/kaminari/activerecord/active_record_relation_methods.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "10201"
},
{
"name": "Ruby",
"bytes": "121586"
}
],
"symlink_target": ""
} |
/* Linux needs some special initialization, but otherwise uses
the generic dynamic linker system interface code. */
#include <string.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/utsname.h>
#include <ldsodefs.h>
#include <kernel-features.h>
#ifdef SHARED
# define DL_SYSDEP_INIT frob_brk ()
static inline void
frob_brk (void)
{
__brk (0); /* Initialize the break. */
#if ! __ASSUME_BRK_PAGE_ROUNDED
/* If the dynamic linker was executed as a program, then the break may
start immediately after our data segment. However, dl-minimal.c has
already stolen the remainder of the page for internal allocations.
If we don't adjust the break location recorded by the kernel, the
normal program startup will inquire, find the value at our &_end,
and start allocating its own data there, clobbering dynamic linker
data structures allocated there during startup.
Later Linux kernels have changed this behavior so that the initial
break value is rounded up to the page boundary before we start. */
extern char *__curbrk attribute_hidden;
extern char _end[] attribute_hidden;
char *const endpage = (void *) 0 + (((__curbrk - (char *) 0)
+ GLRO(dl_pagesize) - 1)
& -GLRO(dl_pagesize));
if (__builtin_expect (__curbrk >= _end && __curbrk < endpage, 0))
__brk (endpage);
#endif
}
# include <elf/dl-sysdep.c>
#endif
int
attribute_hidden
_dl_discover_osversion (void)
{
#if (defined NEED_DL_SYSINFO || defined NEED_DL_SYSINFO_DSO) && defined SHARED
if (GLRO(dl_sysinfo_map) != NULL)
{
/* If the kernel-supplied DSO contains a note indicating the kernel's
version, we don't need to call uname or parse any strings. */
static const struct
{
ElfW(Nhdr) hdr;
char vendor[8];
} expected_note = { { sizeof "Linux", sizeof (ElfW(Word)), 0 }, "Linux" };
const ElfW(Phdr) *const phdr = GLRO(dl_sysinfo_map)->l_phdr;
const ElfW(Word) phnum = GLRO(dl_sysinfo_map)->l_phnum;
for (uint_fast16_t i = 0; i < phnum; ++i)
if (phdr[i].p_type == PT_NOTE)
{
const ElfW(Addr) start = (phdr[i].p_vaddr
+ GLRO(dl_sysinfo_map)->l_addr);
const ElfW(Nhdr) *note = (const void *) start;
while ((ElfW(Addr)) (note + 1) - start < phdr[i].p_memsz)
{
if (!memcmp (note, &expected_note, sizeof expected_note))
return *(const ElfW(Word) *) ((const void *) note
+ sizeof expected_note);
#define ROUND(len) (((len) + sizeof note->n_type - 1) & -sizeof note->n_type)
note = ((const void *) (note + 1)
+ ROUND (note->n_namesz) + ROUND (note->n_descsz));
#undef ROUND
}
}
}
#endif
char bufmem[64];
char *buf = bufmem;
unsigned int version;
int parts;
char *cp;
struct utsname uts;
/* Try the uname system call. */
if (__uname (&uts))
{
/* This was not successful. Now try reading the /proc filesystem. */
int fd = __open ("/proc/sys/kernel/osrelease", O_RDONLY);
if (fd < 0)
return -1;
ssize_t reslen = __read (fd, bufmem, sizeof (bufmem));
__close (fd);
if (reslen <= 0)
/* This also didn't work. We give up since we cannot
make sure the library can actually work. */
return -1;
buf[MIN (reslen, (ssize_t) sizeof (bufmem) - 1)] = '\0';
}
else
buf = uts.release;
/* Now convert it into a number. The string consists of at most
three parts. */
version = 0;
parts = 0;
cp = buf;
while ((*cp >= '0') && (*cp <= '9'))
{
unsigned int here = *cp++ - '0';
while ((*cp >= '0') && (*cp <= '9'))
{
here *= 10;
here += *cp++ - '0';
}
++parts;
version <<= 8;
version |= here;
if (*cp++ != '.' || parts == 3)
/* Another part following? */
break;
}
if (parts < 3)
version <<= 8 * (3 - parts);
return version;
}
| {
"content_hash": "a263eb1ecc4ec78a0f291d101d81baa8",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 80,
"avg_line_length": 28.036496350364963,
"alnum_prop": 0.6040093725592294,
"repo_name": "endplay/omniplay",
"id": "34e88089286edc2839158c28882268b410ce2fc8",
"size": "4770",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "eglibc-2.15/sysdeps/unix/sysv/linux/dl-sysdep.c",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "ASP",
"bytes": "4528"
},
{
"name": "Assembly",
"bytes": "17491433"
},
{
"name": "Awk",
"bytes": "79791"
},
{
"name": "Batchfile",
"bytes": "903"
},
{
"name": "C",
"bytes": "444772157"
},
{
"name": "C++",
"bytes": "10631343"
},
{
"name": "GDB",
"bytes": "17950"
},
{
"name": "HTML",
"bytes": "47935"
},
{
"name": "Java",
"bytes": "2193"
},
{
"name": "Lex",
"bytes": "44513"
},
{
"name": "M4",
"bytes": "9029"
},
{
"name": "Makefile",
"bytes": "1758605"
},
{
"name": "Objective-C",
"bytes": "5278898"
},
{
"name": "Perl",
"bytes": "649746"
},
{
"name": "Perl 6",
"bytes": "1101"
},
{
"name": "Python",
"bytes": "585875"
},
{
"name": "RPC",
"bytes": "97869"
},
{
"name": "Roff",
"bytes": "2522798"
},
{
"name": "Scilab",
"bytes": "21433"
},
{
"name": "Shell",
"bytes": "426172"
},
{
"name": "TeX",
"bytes": "283872"
},
{
"name": "UnrealScript",
"bytes": "6143"
},
{
"name": "XS",
"bytes": "1240"
},
{
"name": "Yacc",
"bytes": "93190"
},
{
"name": "sed",
"bytes": "9202"
}
],
"symlink_target": ""
} |
class CreateStudentIdentifiers < ActiveRecord::Migration
def change
create_table :student_identifier_types do |t|
t.belongs_to :organization, index: true
t.string :name, null: false
t.string :description, null: false
t.integer :content_type, null: false
t.datetime :created_at, null: false
t.datetime :updated_at, null: false
t.datetime :deleted_at
end
create_table :student_identifiers do |t|
t.belongs_to :organization, index: true
t.belongs_to :user, index: true
t.belongs_to :student_identifier_type
t.string :value, null: false
t.datetime :created_at, null: false
t.datetime :updated_at, null: false
t.datetime :deleted_at
end
add_column :assignments, :student_identifier_type_id, :integer
add_column :group_assignments, :student_identifier_type_id, :integer
add_index :student_identifiers, :student_identifier_type_id
end
end
| {
"content_hash": "724861acbb3f4af293a0b40f2391a861",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 72,
"avg_line_length": 34.964285714285715,
"alnum_prop": 0.6629213483146067,
"repo_name": "mkcode/classroom",
"id": "bf0e751499b4b4fecd5379e1dde3df4a4182dd01",
"size": "979",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "db/migrate/20160524234655_create_student_identifiers.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "17304"
},
{
"name": "CoffeeScript",
"bytes": "6112"
},
{
"name": "HTML",
"bytes": "54040"
},
{
"name": "Ruby",
"bytes": "155765"
},
{
"name": "Shell",
"bytes": "3606"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
Bot. Beechey Voy. 362, t. 87. 1839
#### Original name
null
### Remarks
null | {
"content_hash": "5ecb776333b0b4f51a6ec8aedefdc9e6",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 12.615384615384615,
"alnum_prop": 0.6829268292682927,
"repo_name": "mdoering/backbone",
"id": "bb3927d32ac129f21e2ba492793f0e813ad99d3b",
"size": "218",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Oleaceae/Fraxinus/Fraxinus dipetala/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
require 'spec_helper'
describe User do
before do
@random_name = -> { SecureRandom.hex }
@valid_attributes = { name: @random_name.call, password: "password", password_confirmation: "password" }
end
let(:user) { User.create(@valid_attributes) }
subject { user }
it { should be_valid }
it { should respond_to(:name) }
it { should respond_to(:api_key) }
describe "creating new" do
context "with duplicated name" do
it "should not be valid" do
User.new(@valid_attributes.update({ name: user.name.upcase })).should_not be_valid
end
end
it "creates an API token" do
user.api_key.length.should == 32
end
it "populates password digest" do
user.password_digest.should be_present
end
end
end
| {
"content_hash": "150e3292c93de1499c14f409bec6cd94",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 108,
"avg_line_length": 24.806451612903224,
"alnum_prop": 0.6488946684005201,
"repo_name": "stelligent/jonny_test_repo",
"id": "45d2c959c0ee01fff56913f5cadc982a0c31c62b",
"size": "769",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "spec/models/user_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "471"
},
{
"name": "CoffeeScript",
"bytes": "2438"
},
{
"name": "Cucumber",
"bytes": "830"
},
{
"name": "HTML",
"bytes": "10979"
},
{
"name": "Ruby",
"bytes": "57912"
},
{
"name": "Shell",
"bytes": "319"
}
],
"symlink_target": ""
} |
namespace content {
class WebContents;
}
namespace extensions {
// Implements the mojo interface of extensions::mojom::LocalFrameHost.
// ExtensionWebContentsObserver creates and owns this class and it's destroyed
// when WebContents is destroyed.
class ExtensionFrameHost : public mojom::LocalFrameHost {
public:
explicit ExtensionFrameHost(content::WebContents* web_contents);
ExtensionFrameHost(const ExtensionFrameHost&) = delete;
ExtensionFrameHost& operator=(const ExtensionFrameHost&) = delete;
~ExtensionFrameHost() override;
void BindLocalFrameHost(
mojo::PendingAssociatedReceiver<mojom::LocalFrameHost> receiver,
content::RenderFrameHost* rfh);
// mojom::LocalFrameHost:
void RequestScriptInjectionPermission(
const std::string& extension_id,
mojom::InjectionType script_type,
mojom::RunLocation run_location,
RequestScriptInjectionPermissionCallback callback) override;
void GetAppInstallState(const GURL& requestor_url,
GetAppInstallStateCallback callback) override;
void Request(mojom::RequestParamsPtr params,
RequestCallback callback) override;
void WatchedPageChange(
const std::vector<std::string>& css_selectors) override;
protected:
content::WebContents* web_contents() { return web_contents_; }
private:
// This raw pointer is safe to use because ExtensionWebContentsObserver whose
// lifetime is tied to the WebContents owns this instance.
raw_ptr<content::WebContents> web_contents_;
content::RenderFrameHostReceiverSet<mojom::LocalFrameHost> receivers_;
};
} // namespace extensions
#endif // EXTENSIONS_BROWSER_EXTENSION_FRAME_HOST_H_
| {
"content_hash": "777c9ab30a285d9c97de1e54c79b92fa",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 79,
"avg_line_length": 36.65217391304348,
"alnum_prop": 0.7580071174377224,
"repo_name": "ric2b/Vivaldi-browser",
"id": "9cf2a9466b1b8aef12a96de712a592b606dc203e",
"size": "2233",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "chromium/extensions/browser/extension_frame_host.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""Defines the base component class from which Landlab components inherit.
Base component class methods
++++++++++++++++++++++++++++
.. autosummary::
~landlab.core.model_component.Component.name
~landlab.core.model_component.Component.from_path
~landlab.core.model_component.Component.unit_agnostic
~landlab.core.model_component.Component.units
~landlab.core.model_component.Component.definitions
~landlab.core.model_component.Component.input_var_names
~landlab.core.model_component.Component.output_var_names
~landlab.core.model_component.Component.optional_var_names
~landlab.core.model_component.Component.var_type
~landlab.core.model_component.Component.var_units
~landlab.core.model_component.Component.var_definition
~landlab.core.model_component.Component.var_mapping
~landlab.core.model_component.Component.var_loc
~landlab.core.model_component.Component.var_help
~landlab.core.model_component.Component.initialize_output_fields
~landlab.core.model_component.Component.initialize_optional_output_fields
~landlab.core.model_component.Component.shape
~landlab.core.model_component.Component.grid
~landlab.core.model_component.Component.coords
"""
import os
import textwrap
import numpy as np
from .. import registry
from ..field import FieldError
from .model_parameter_loader import load_params
_VAR_HELP_MESSAGE = """
name: {name}
description:
{desc}
units: {units}
unit agnostic: {unit_agnostic}
at: {loc}
intent: {intent}
"""
class classproperty(property):
def __get__(self, cls, owner):
return self.fget.__get__(None, owner)()
class Component:
"""Base component class from which Landlab components inherit."""
_info = {}
_name = None
_cite_as = ""
_unit_agnostic = None
def __new__(cls, *args, **kwds):
registry.add(cls)
return object.__new__(cls)
def __init__(self, grid):
self._grid = grid
self._current_time = None
# ensure that required input fields exist
for name in self._info.keys():
at = self._info[name]["mapping"]
optional = self._info[name]["optional"]
in_true = "in" in self._info[name]["intent"]
if (in_true) and (not optional):
# if required input, verify that it exists.
if name not in self._grid[at]:
raise FieldError(
"{component} is missing required input field: {name} at {at}".format(
component=self._name, name=name, at=at
)
)
# if required input exists, check dtype.
field = self._grid[at][name]
dtype = self._info[name]["dtype"]
if field.dtype != dtype:
raise FieldError(
"{component} required input variable: {name} at {at} has incorrect dtype. dtype must be {dtype} and is {actual}".format(
component=self._name,
name=name,
at=at,
dtype=dtype,
actual=field.dtype,
)
)
# if optional input exists, check dtype
if in_true and optional:
if name in self._grid[at]:
field = self._grid[at][name]
dtype = self._info[name]["dtype"]
if field.dtype != dtype:
raise FieldError(
"{component} optional input variable: {name} at {at} has incorrect dtype. dtype must be {dtype} and is {actual}".format(
component=self._name,
name=name,
at=at,
dtype=dtype,
actual=field.dtype,
)
)
@classmethod
def from_path(cls, grid, path):
"""Create a component from an input file.
Parameters
----------
grid : ModelGrid
A landlab grid.
path : str or file_like
Path to a parameter file, contents of a parameter file, or
a file-like object.
Returns
-------
Component
A newly-created component.
"""
if os.path.isfile(path):
with open(path, "r") as fp:
params = load_params(fp)
else:
params = load_params(path)
return cls(grid, **params)
@classproperty
@classmethod
def cite_as(cls):
"""Citation information for component.
Return required software citation, if any. An empty string indicates
that no citations other than the standard Landlab package citations are
needed for the component.
Citations are provided in BibTeX format.
Returns
-------
cite_as
"""
return cls._cite_as
@property
def current_time(self):
"""Current time.
Some components may keep track of the current time. In this case, the
``current_time`` attribute is incremented. Otherwise it is set to None.
Returns
-------
current_time
"""
return self._current_time
@current_time.setter
def current_time(self, new_time):
if self._current_time is not None:
assert new_time > self._current_time
self._current_time = new_time
@classproperty
@classmethod
def input_var_names(cls):
"""Names of fields that are used by the component.
Returns
-------
tuple of str
Tuple of field names.
"""
input_var_names = [
name
for name in cls._info.keys()
if (not cls._info[name]["optional"]) and ("in" in cls._info[name]["intent"])
]
return tuple(sorted(input_var_names))
@classproperty
@classmethod
def output_var_names(cls):
"""Names of fields that are provided by the component.
Returns
-------
tuple of str
Tuple of field names.
"""
output_var_names = [
name
for name in cls._info.keys()
if (not cls._info[name]["optional"])
and ("out" in cls._info[name]["intent"])
]
return tuple(sorted(output_var_names))
@classproperty
@classmethod
def optional_var_names(cls):
"""Names of fields that are optionally provided by the component, if
any.
Returns
-------
tuple of str
Tuple of field names.
"""
optional_var_names = [
name for name in cls._info.keys() if cls._info[name]["optional"]
]
return tuple(sorted(optional_var_names))
@classmethod
def var_type(cls, name):
"""Returns the dtype of a field (float, int, bool, str...).
Parameters
----------
name : str
A field name.
Returns
-------
dtype
The dtype of the field.
"""
return cls._info[name]["dtype"]
@classproperty
@classmethod
def name(cls):
"""Name of the component.
Returns
-------
str
Component name.
"""
return cls._name
@classproperty
@classmethod
def unit_agnostic(cls):
"""Whether the component is unit agnostic.
If True, then the component is unit agnostic. Under this condition a
user must still provide consistent units across all input arguments,
keyword arguments, and fields. However, when ``unit_agnostic`` is True
the units specified can be interpreted as dimensions.
When False, then the component requires inputs in the specified units.
Returns
-------
bool
"""
return cls._unit_agnostic
@classproperty
@classmethod
def units(cls):
"""Get the units for all field values.
Returns
-------
tuple or str
Units for each field.
"""
return tuple(
sorted([(name, cls._info[name]["units"]) for name in cls._info.keys()])
)
@classmethod
def var_units(cls, name):
"""Get the units of a particular field.
Parameters
----------
name : str
A field name.
Returns
-------
str
Units for the given field.
"""
return cls._info[name]["units"]
@classproperty
@classmethod
def definitions(cls):
"""Get a description of each field.
Returns
-------
tuple of (*name*, *description*)
A description of each field.
"""
return tuple(
sorted([(name, cls._info[name]["doc"]) for name in cls._info.keys()])
)
@classmethod
def var_definition(cls, name):
"""Get a description of a particular field.
Parameters
----------
name : str
A field name.
Returns
-------
tuple of (*name*, *description*)
A description of each field.
"""
return cls._info[name]["doc"]
@classmethod
def var_help(cls, name):
"""Print a help message for a particular field.
Parameters
----------
name : str
A field name.
"""
desc = os.linesep.join(
textwrap.wrap(
cls._info[name]["doc"], initial_indent=" ", subsequent_indent=" "
)
)
units = cls._info[name]["units"]
loc = cls._info[name]["mapping"]
intent = cls._info[name]["intent"]
help = _VAR_HELP_MESSAGE.format(
name=name,
desc=desc,
units=units,
loc=loc,
intent=intent,
unit_agnostic=cls._unit_agnostic,
)
print(help.strip())
@classproperty
@classmethod
def var_mapping(cls):
"""Location where variables are defined.
Returns
-------
tuple of (name, location)
Tuple of variable name and location ('node', 'link', etc.) pairs.
"""
return tuple(
sorted([(name, cls._info[name]["mapping"]) for name in cls._info.keys()])
)
@classmethod
def var_loc(cls, name):
"""Location where a particular variable is defined.
Parameters
----------
name : str
A field name.
Returns
-------
str
The location ('node', 'link', etc.) where a variable is defined.
"""
return cls._info[name]["mapping"]
def initialize_output_fields(self, values_per_element=None):
"""Create fields for a component based on its input and output var
names.
This method will create new fields (without overwrite) for any fields
output by, but not supplied to, the component. New fields are
initialized to zero. Ignores optional fields. New fields are created as
arrays of floats, unless the component specifies the variable type.
Parameters
----------
values_per_element: int (optional)
On occasion, it is necessary to create a field that is of size
(n_grid_elements, values_per_element) instead of the default size
(n_grid_elements,). Use this keyword argument to acomplish this
task.
"""
for name in self._info.keys():
at = self._info[name]["mapping"]
optional = self._info[name]["optional"]
out_true = "out" in self._info[name]["intent"]
if (out_true) and (not optional) and (name not in self._grid[at]):
type_in = self.var_type(name)
num_elements = self._grid.size(at)
if values_per_element is None:
size = num_elements
else:
size = (num_elements, values_per_element)
init_vals = np.zeros(size, dtype=type_in)
units_in = self.var_units(name)
self.grid.add_field(name, init_vals, at=at, units=units_in, copy=False)
def initialize_optional_output_fields(self):
"""Create fields for a component based on its optional field outputs,
if declared in _optional_var_names.
This method will create new fields (without overwrite) for any
fields output by the component as optional. New fields are
initialized to zero. New fields are created as arrays of floats,
unless the component also contains the specifying property
_var_type.
"""
for name in self._info.keys():
at = self._info[name]["mapping"]
optional = self._info[name]["optional"]
out_true = "out" in self._info[name]["intent"]
if (out_true) and (optional) and (name not in self._grid[at]):
type_in = self.var_type(name)
init_vals = self.grid.zeros(at, dtype=type_in)
units_in = self.var_units(name)
self.grid.add_field(name, init_vals, at=at, units=units_in, copy=False)
@property
def shape(self):
"""Return the grid shape attached to the component, if defined."""
return self.grid._shape
@property
def grid(self):
"""Return the grid attached to the component."""
return self._grid
@property
def coords(self):
"""Return the coordinates of nodes on grid attached to the
component."""
return (self.grid.node_x, self.grid.node_y)
| {
"content_hash": "f59191ea71ba623fb81cf964de6e8842",
"timestamp": "",
"source": "github",
"line_count": 466,
"max_line_length": 148,
"avg_line_length": 29.86266094420601,
"alnum_prop": 0.541966082207531,
"repo_name": "cmshobe/landlab",
"id": "d1801b4d8b5fa75b621b43d02a763f92369111f9",
"size": "13939",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "landlab/core/model_component.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1359"
},
{
"name": "HTML",
"bytes": "99948"
},
{
"name": "Jupyter Notebook",
"bytes": "701992"
},
{
"name": "Makefile",
"bytes": "1924"
},
{
"name": "PowerShell",
"bytes": "7128"
},
{
"name": "Python",
"bytes": "4132304"
},
{
"name": "Shell",
"bytes": "2691"
},
{
"name": "TeX",
"bytes": "19453"
}
],
"symlink_target": ""
} |
package com.socrata.soda.external
import org.scalatest.{Assertions, FunSuite, MustMatchers}
import com.socrata.soql.types._
import com.rojoma.json.v3.ast._
import com.rojoma.json.v3.io.JsonReader
import com.vividsolutions.jts.geom._
class JsonColumnRepTest extends FunSuite with MustMatchers with Assertions {
test("Client reps know about all types") {
JsonColumnRep.forClientType.keySet must equal (SoQLType.typesByName.values.toSet)
}
test("Data coordinator reps know about all types") {
JsonColumnRep.forDataCoordinatorType.keySet must equal (SoQLType.typesByName.values.toSet)
}
test("JSON type checker handles nulls"){
JsonColumnRep.forClientType(SoQLText).fromJValue(JNull) must equal (Some(SoQLNull))
}
test("JSON type checker with text"){
val input = "this is input text"
JsonColumnRep.forClientType(SoQLText).fromJValue(JString(input)) must equal (Some(SoQLText(input)))
}
test("JSON type checker with unicode text"){
val input = "this is unicode input text صص صꕥꔚꔄꔞഝആ"
JsonColumnRep.forClientType(SoQLText).fromJValue(JString(input)) must equal (Some(SoQLText(input)))
}
test("JSON type checker for text: invalid input - object") {
JsonColumnRep.forClientType(SoQLText).fromJValue(JObject.canonicalEmpty) must equal (None)
}
test("JSON type checker with number (as string)"){
val input = "12345"
JsonColumnRep.forClientType(SoQLNumber).fromJValue(JString(input)) must equal (Some(SoQLNumber(java.math.BigDecimal.valueOf(input.toLong))))
}
test("Number is written as plain text (without scientific notation)"){
val input = SoQLNumber(BigDecimal(0.0000005302).bigDecimal)
JsonColumnRep.forClientType(SoQLNumber).toJValue(input) must equal (JString("0.0000005302"))
}
test("Really long number is written in scientific notation"){
val input = SoQLNumber(BigDecimal(0.000000000000000000005302).bigDecimal)
JsonColumnRep.forClientType(SoQLNumber).toJValue(input) must equal (JString("5.302E-21"))
}
test("JSON type checker with number (as number)"){
val input = BigDecimal(12345).bigDecimal
JsonColumnRep.forClientType(SoQLNumber).fromJValue(JNumber(input)) must equal (Some(SoQLNumber(input)))
}
test("JSON type checker with double"){
val input = 123.456789
JsonColumnRep.forClientType(SoQLDouble).fromJValue(JNumber(input)) must equal (Some(SoQLDouble(input)))
}
test("JSON type checker with double - positive infinity"){
JsonColumnRep.forClientType(SoQLDouble).fromJValue(JString("Infinity")) must equal (Some(SoQLDouble(Double.PositiveInfinity)))
}
test("JSON type checker with double - negative infinity"){
JsonColumnRep.forClientType(SoQLDouble).fromJValue(JString("-Infinity")) must equal (Some(SoQLDouble(Double.NegativeInfinity)))
}
test("JSON type checker with double - NaN"){
val result = JsonColumnRep.forClientType(SoQLDouble).fromJValue(JString("NaN"))
assert(result.get.asInstanceOf[SoQLDouble].value.isNaN)
}
test("JSON type checker with money"){
val input = BigDecimal(123.45).bigDecimal
JsonColumnRep.forClientType(SoQLMoney).fromJValue(JNumber(input)) must equal (Some(SoQLMoney(input)))
}
test("JSON type checker with boolean"){
val input = false
JsonColumnRep.forClientType(SoQLBoolean).fromJValue(JBoolean(input)) must equal (Some(SoQLBoolean(input)))
}
test("JSON type checker with fixed timestamp"){
val input = "2013-06-03T02:26:05.123Z"
val asDateTime = SoQLFixedTimestamp.StringRep.unapply(input).get
JsonColumnRep.forClientType(SoQLFixedTimestamp).fromJValue(JString(input)) must equal (Some(SoQLFixedTimestamp(asDateTime)))
}
test("JSON type checker with floating timestamp"){
val input = "2013-06-03T02:26:05.123"
val asDateTime = SoQLFloatingTimestamp.StringRep.unapply(input).get
JsonColumnRep.forClientType(SoQLFloatingTimestamp).fromJValue(JString(input)) must equal (Some(SoQLFloatingTimestamp(asDateTime)))
}
test("JSON type checker with date"){
val input = "2013-06-03"
val asDate = SoQLDate.StringRep.unapply(input).get
JsonColumnRep.forClientType(SoQLDate).fromJValue(JString(input)) must equal (Some(SoQLDate(asDate)))
}
test("JSON type checker with time"){
val input = "02:26:05.123"
val asTime = SoQLTime.StringRep.unapply(input).get
JsonColumnRep.forClientType(SoQLTime).fromJValue(JString(input)) must equal (Some(SoQLTime(asTime)))
}
test("JSON type checker with invalid time"){
val input = "@0z2:2!6:0$5.123"
JsonColumnRep.forClientType(SoQLTime).fromJValue(JString(input)) must be (None)
}
test("JSON type checker with array"){
val input = JArray(Seq(JString("this is text"), JNumber(222), JNull, JBoolean(true)))
JsonColumnRep.forClientType(SoQLArray).fromJValue(input) must equal (Some(SoQLArray(input)))
}
test("JSON type checker with object"){
val input = JObject(Map("key" -> JString("value")))
JsonColumnRep.forClientType(SoQLObject).fromJValue(input) must equal (Some(SoQLObject(input)))
}
test("JSON type checker with Point"){
val input = "{\"type\":\"Point\",\"coordinates\":[47.6303,-122.3148]}"
val asGeom = SoQLPoint.JsonRep.unapply(input)
JsonColumnRep.forClientType(SoQLPoint).fromJValue(JsonReader.fromString(input)) must equal (Some(SoQLPoint(asGeom.get)))
}
test("JSON type checker handles GeoJSON of different types") {
val input = """{"type":"MultiLineString","coordinates":[[[100,0.123456789012],[101,1]],[[102,2],[103,3]]]}"""
JsonColumnRep.forClientType(SoQLPoint).fromJValue(JsonReader.fromString(input)) must equal (None)
}
test("JSON type checker can read WKT geom fields") {
val wkt = "POINT (-30.04045 48.606567)"
val soqlPointFromWkt = JsonColumnRep.forClientType(SoQLPoint)
.fromJValue(JString(wkt)).get.asInstanceOf[SoQLPoint]
soqlPointFromWkt.value.getX must be { -30.04045 +- 0.000001 }
soqlPointFromWkt.value.getY must be { 48.606567 +- 0.000001 }
}
test("JSON type checker handles non-WKT geom fields") {
val notWkt = "blah (lbah lbah)"
JsonColumnRep.forClientType(SoQLPoint).fromJValue(JString(notWkt)) must equal (None)
}
test("JSON type checker with MultiLine"){
val input = """{"type":"MultiLineString","coordinates":[[[100,0.123456789012],[101,1]],[[102,2],[103,3]]]}"""
val SoQLMultiLine.JsonRep(asGeom) = input
JsonColumnRep.forClientType(SoQLMultiLine).fromJValue(JsonReader.fromString(input)) must equal (Some(SoQLMultiLine(asGeom)))
}
test("JSON type checker with MultiPolygon"){
val input = """{"type":"MultiPolygon","coordinates":[[[[40,40],[20,45.123456789012],[45,30],[40,40]]],[[[20,35],[10,30],[10,10],[30,5],[45,20],[20,35]],[[30,20],[20,15],[20,25],[30,20]]]]}"""
val SoQLMultiPolygon.JsonRep(asGeom) = input
JsonColumnRep.forClientType(SoQLMultiPolygon).fromJValue(JsonReader.fromString(input)) must equal (Some(SoQLMultiPolygon(asGeom)))
}
test("JSON type checker with Line"){
val input = """{"type":"LineString","coordinates":[[40,40],[20,45.123456789012],[45,30],[40,40]]}"""
val SoQLLine.JsonRep(asGeom) = input
JsonColumnRep.forClientType(SoQLLine).fromJValue(JsonReader.fromString(input)) must equal (Some(SoQLLine(asGeom)))
}
test("JSON type checker with Polygon"){
val input = """{"type":"Polygon","coordinates":[[ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ], [ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ]]}"""
val SoQLPolygon.JsonRep(asGeom) = input
JsonColumnRep.forClientType(SoQLPolygon).fromJValue(JsonReader.fromString(input)) must equal (Some(SoQLPolygon(asGeom)))
}
test("JSON type checker with Multipoint"){
val input = """{"type":"MultiPoint","coordinates":[[40,40],[20,45.123456789012],[45,30]]}"""
val SoQLMultiPoint.JsonRep(asGeom) = input
JsonColumnRep.forClientType(SoQLMultiPoint).fromJValue(JsonReader.fromString(input)) must equal (Some(SoQLMultiPoint(asGeom)))
}
val factory = new GeometryFactory
test("can export geometry types") {
val pt = SoQLPoint(factory.createPoint(new Coordinate(1.0, 2.0)))
val outJvalue = JsonColumnRep.forClientType(SoQLPoint).toJValue(pt)
JsonColumnRep.forClientType(SoQLPoint).fromJValue(outJvalue) must equal (Some(pt))
}
test("can read from CJSON WKT and WKB64") {
val wkb64 = "AAAAAAHAPgpa8K4hcEBITaQDgJ5U"
val wkt = "POINT (-30.04045 48.606567)"
val soqlPointFromWkb = JsonColumnRep.forDataCoordinatorType(SoQLPoint)
.fromJValue(JString(wkb64)).get.asInstanceOf[SoQLPoint]
soqlPointFromWkb.value.getX must be { -30.04045 +- 0.000001 }
soqlPointFromWkb.value.getY must be { 48.606567 +- 0.000001 }
val soqlPointFromWkt = JsonColumnRep.forDataCoordinatorType(SoQLPoint)
.fromJValue(JString(wkt)).get.asInstanceOf[SoQLPoint]
soqlPointFromWkt.value.getX must be { -30.04045 +- 0.000001 }
soqlPointFromWkt.value.getY must be { 48.606567 +- 0.000001 }
}
}
| {
"content_hash": "b284bf97c647ba2a386c5877d758ee29",
"timestamp": "",
"source": "github",
"line_count": 196,
"max_line_length": 203,
"avg_line_length": 46.19387755102041,
"alnum_prop": 0.7168102496134305,
"repo_name": "socrata-platform/soda-fountain",
"id": "ad39c6b32db64a594c9213572c07c1343de4af9c",
"size": "9069",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "soda-fountain-external/src/test/scala/com/socrata/soda/external/JsonColumnRepTest.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "724"
},
{
"name": "Java",
"bytes": "336"
},
{
"name": "Jinja",
"bytes": "1454"
},
{
"name": "Scala",
"bytes": "637050"
},
{
"name": "Shell",
"bytes": "3190"
}
],
"symlink_target": ""
} |
/* $NetBSD: __aeabi_fcmplt.c,v 1.1 2013/04/16 10:37:39 matt Exp $ */
/** @file
*
* Copyright (c) 2013 - 2014, ARM Limited. All rights reserved.
*
* This program and the accompanying materials
* are licensed and made available under the terms and conditions of the BSD License
* which accompanies this distribution. The full text of the license may be found at
* http://opensource.org/licenses/bsd-license.php
*
* THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
* WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
*
**/
/*
* Written by Ben Harris, 2000. This file is in the Public Domain.
*/
#include "softfloat-for-gcc.h"
#include "milieu.h"
#include "softfloat.h"
#if defined(LIBC_SCCS) && !defined(lint)
__RCSID("$NetBSD: __aeabi_fcmplt.c,v 1.1 2013/04/16 10:37:39 matt Exp $");
#endif /* LIBC_SCCS and not lint */
int __aeabi_fcmplt(float32, float32);
int
__aeabi_fcmplt(float32 a, float32 b)
{
return float32_lt(a, b);
}
| {
"content_hash": "6ee39f0cc4dcb4972fd7911740401950",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 85,
"avg_line_length": 28.694444444444443,
"alnum_prop": 0.6747337850919651,
"repo_name": "MattDevo/edk2",
"id": "e5b12cdef2f0195b5fbbe14ff1ef461d0a2103b8",
"size": "1033",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "ArmPkg/Library/ArmSoftFloatLib/Arm/__aeabi_fcmplt.c",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "4545237"
},
{
"name": "Batchfile",
"bytes": "93042"
},
{
"name": "C",
"bytes": "94289702"
},
{
"name": "C++",
"bytes": "20170310"
},
{
"name": "CSS",
"bytes": "1905"
},
{
"name": "DIGITAL Command Language",
"bytes": "13695"
},
{
"name": "GAP",
"bytes": "698245"
},
{
"name": "GDB",
"bytes": "96"
},
{
"name": "HTML",
"bytes": "472114"
},
{
"name": "Lua",
"bytes": "249"
},
{
"name": "Makefile",
"bytes": "231845"
},
{
"name": "NSIS",
"bytes": "2229"
},
{
"name": "Objective-C",
"bytes": "4147834"
},
{
"name": "PHP",
"bytes": "674"
},
{
"name": "PLSQL",
"bytes": "24782"
},
{
"name": "Perl",
"bytes": "6218"
},
{
"name": "Python",
"bytes": "27130096"
},
{
"name": "R",
"bytes": "21094"
},
{
"name": "Roff",
"bytes": "28192"
},
{
"name": "Shell",
"bytes": "104362"
},
{
"name": "SourcePawn",
"bytes": "29427"
},
{
"name": "Visual Basic",
"bytes": "494"
}
],
"symlink_target": ""
} |
package kube_inventory
import (
"context"
"time"
"github.com/ericchiang/k8s"
"github.com/ericchiang/k8s/apis/apps/v1beta1"
"github.com/ericchiang/k8s/apis/apps/v1beta2"
"github.com/ericchiang/k8s/apis/core/v1"
"github.com/influxdata/telegraf/internal/tls"
)
type client struct {
namespace string
timeout time.Duration
*k8s.Client
}
func newClient(baseURL, namespace, bearerToken string, timeout time.Duration, tlsConfig tls.ClientConfig) (*client, error) {
c, err := k8s.NewClient(&k8s.Config{
Clusters: []k8s.NamedCluster{{Name: "cluster", Cluster: k8s.Cluster{
Server: baseURL,
InsecureSkipTLSVerify: tlsConfig.InsecureSkipVerify,
CertificateAuthority: tlsConfig.TLSCA,
}}},
Contexts: []k8s.NamedContext{{Name: "context", Context: k8s.Context{
Cluster: "cluster",
AuthInfo: "auth",
Namespace: namespace,
}}},
AuthInfos: []k8s.NamedAuthInfo{{Name: "auth", AuthInfo: k8s.AuthInfo{
Token: bearerToken,
ClientCertificate: tlsConfig.TLSCert,
ClientKey: tlsConfig.TLSKey,
}}},
})
if err != nil {
return nil, err
}
return &client{
Client: c,
timeout: timeout,
namespace: namespace,
}, nil
}
func (c *client) getDaemonSets(ctx context.Context) (*v1beta2.DaemonSetList, error) {
list := new(v1beta2.DaemonSetList)
ctx, cancel := context.WithTimeout(ctx, c.timeout)
defer cancel()
return list, c.List(ctx, c.namespace, list)
}
func (c *client) getDeployments(ctx context.Context) (*v1beta1.DeploymentList, error) {
list := &v1beta1.DeploymentList{}
ctx, cancel := context.WithTimeout(ctx, c.timeout)
defer cancel()
return list, c.List(ctx, c.namespace, list)
}
func (c *client) getNodes(ctx context.Context) (*v1.NodeList, error) {
list := new(v1.NodeList)
ctx, cancel := context.WithTimeout(ctx, c.timeout)
defer cancel()
return list, c.List(ctx, "", list)
}
func (c *client) getPersistentVolumes(ctx context.Context) (*v1.PersistentVolumeList, error) {
list := new(v1.PersistentVolumeList)
ctx, cancel := context.WithTimeout(ctx, c.timeout)
defer cancel()
return list, c.List(ctx, "", list)
}
func (c *client) getPersistentVolumeClaims(ctx context.Context) (*v1.PersistentVolumeClaimList, error) {
list := new(v1.PersistentVolumeClaimList)
ctx, cancel := context.WithTimeout(ctx, c.timeout)
defer cancel()
return list, c.List(ctx, c.namespace, list)
}
func (c *client) getPods(ctx context.Context) (*v1.PodList, error) {
list := new(v1.PodList)
ctx, cancel := context.WithTimeout(ctx, c.timeout)
defer cancel()
return list, c.List(ctx, c.namespace, list)
}
func (c *client) getStatefulSets(ctx context.Context) (*v1beta1.StatefulSetList, error) {
list := new(v1beta1.StatefulSetList)
ctx, cancel := context.WithTimeout(ctx, c.timeout)
defer cancel()
return list, c.List(ctx, c.namespace, list)
}
| {
"content_hash": "951c2117640291553f1c435c5d7f62fb",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 124,
"avg_line_length": 29.278350515463917,
"alnum_prop": 0.7095070422535211,
"repo_name": "Heathland/telegraf",
"id": "bf207b0ad46d6ee74be39fee29c4bd6036022be3",
"size": "2840",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "plugins/inputs/kube_inventory/client.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "5270763"
},
{
"name": "Makefile",
"bytes": "3727"
},
{
"name": "Python",
"bytes": "37517"
},
{
"name": "Ragel",
"bytes": "7577"
},
{
"name": "Shell",
"bytes": "13945"
}
],
"symlink_target": ""
} |
bernoulli_django
================
| {
"content_hash": "bcb9db33e0cdc3361121ffdf8de14bf0",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 16,
"avg_line_length": 17,
"alnum_prop": 0.4411764705882353,
"repo_name": "bernoulli-metrics/bernoulli_django",
"id": "636a3d8f371e1f554346694d2c9f34a3a542c714",
"size": "34",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4296"
}
],
"symlink_target": ""
} |
{% extends "wiki/search.html" %}
{% block wiki_search_loop %}
{% with article.object as article %}
{% include "wiki/includes/searchresult.html" %}
{% endwith %}
{% endblock %}
| {
"content_hash": "c738aee858bbd2ff47292b06493e3d33",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 47,
"avg_line_length": 22.25,
"alnum_prop": 0.651685393258427,
"repo_name": "skbly7/serc",
"id": "d0f23a8e8c21e8a31679e0b0012356023e905d6c",
"size": "178",
"binary": false,
"copies": "25",
"ref": "refs/heads/master",
"path": "website/wiki/plugins/haystack/templates/wiki/plugins/haystack/search.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "167250"
},
{
"name": "GCC Machine Description",
"bytes": "107"
},
{
"name": "HTML",
"bytes": "127197"
},
{
"name": "JavaScript",
"bytes": "276437"
},
{
"name": "Python",
"bytes": "659443"
},
{
"name": "Shell",
"bytes": "194"
}
],
"symlink_target": ""
} |
#ifndef _EFI_PCI_IO_H
#define _EFI_PCI_IO_H
//
// Global ID for the PCI I/O Protocol
//
#define EFI_PCI_IO_PROTOCOL_GUID \
{ \
0x4cf5b200, 0x68b8, 0x4ca5, 0x9e, 0xec, 0xb2, 0x3e, 0x3f, 0x50, 0x2, 0x9a \
}
EFI_FORWARD_DECLARATION (EFI_PCI_IO_PROTOCOL);
//
// Prototypes for the PCI I/O Protocol
//
typedef enum {
EfiPciIoWidthUint8 = 0,
EfiPciIoWidthUint16,
EfiPciIoWidthUint32,
EfiPciIoWidthUint64,
EfiPciIoWidthFifoUint8,
EfiPciIoWidthFifoUint16,
EfiPciIoWidthFifoUint32,
EfiPciIoWidthFifoUint64,
EfiPciIoWidthFillUint8,
EfiPciIoWidthFillUint16,
EfiPciIoWidthFillUint32,
EfiPciIoWidthFillUint64,
EfiPciIoWidthMaximum
} EFI_PCI_IO_PROTOCOL_WIDTH;
//
// Complete PCI address generater
//
#define EFI_PCI_IO_PASS_THROUGH_BAR 0xff // Special BAR that passes a memory or I/O cycle through unchanged
#define EFI_PCI_IO_ATTRIBUTE_MASK 0x077f // All the following I/O and Memory cycles
#define EFI_PCI_IO_ATTRIBUTE_ISA_MOTHERBOARD_IO 0x0001 // I/O cycles 0x0000-0x00FF (10 bit decode)
#define EFI_PCI_IO_ATTRIBUTE_ISA_IO 0x0002 // I/O cycles 0x0000-0x03FF (10 bit decode)
#define EFI_PCI_IO_ATTRIBUTE_VGA_PALETTE_IO 0x0004 // I/O cycles 0x3C6, 0x3C8, 0x3C9 (10 bit decode)
#define EFI_PCI_IO_ATTRIBUTE_VGA_MEMORY 0x0008 // MEM cycles 0xA0000-0xBFFFF (24 bit decode)
#define EFI_PCI_IO_ATTRIBUTE_VGA_IO 0x0010 // I/O cycles 0x3B0-0x3BB and 0x3C0-0x3DF (10 bit decode)
#define EFI_PCI_IO_ATTRIBUTE_IDE_PRIMARY_IO 0x0020 // I/O cycles 0x1F0-0x1F7, 0x3F6, 0x3F7 (10 bit decode)
#define EFI_PCI_IO_ATTRIBUTE_IDE_SECONDARY_IO 0x0040 // I/O cycles 0x170-0x177, 0x376, 0x377 (10 bit decode)
#define EFI_PCI_IO_ATTRIBUTE_MEMORY_WRITE_COMBINE 0x0080 // Map a memory range so write are combined
#define EFI_PCI_IO_ATTRIBUTE_IO 0x0100 // Enable the I/O decode bit in the PCI Config Header
#define EFI_PCI_IO_ATTRIBUTE_MEMORY 0x0200 // Enable the Memory decode bit in the PCI Config Header
#define EFI_PCI_IO_ATTRIBUTE_BUS_MASTER 0x0400 // Enable the DMA bit in the PCI Config Header
#define EFI_PCI_IO_ATTRIBUTE_MEMORY_CACHED 0x0800 // Map a memory range so all r/w accesses are cached
#define EFI_PCI_IO_ATTRIBUTE_MEMORY_DISABLE 0x1000 // Disable a memory range
#define EFI_PCI_IO_ATTRIBUTE_EMBEDDED_DEVICE 0x2000 // Clear for an add-in PCI Device
#define EFI_PCI_IO_ATTRIBUTE_EMBEDDED_ROM 0x4000 // Clear for a physical PCI Option ROM accessed through ROM BAR
#define EFI_PCI_IO_ATTRIBUTE_DUAL_ADDRESS_CYCLE 0x8000 // Clear for PCI controllers that can not genrate a DAC
//
// The following definition is added in EFI1.1 spec update and UEFI2.0 spec.
//
#define EFI_PCI_IO_ATTRIBUTE_ISA_IO_16 0x10000 // I/O cycles 0x0100-0x03FF (16 bit decode)
#define EFI_PCI_IO_ATTRIBUTE_VGA_PALETTE_IO_16 0x20000 // I/O cycles 0x3C6, 0x3C8, 0x3C9 (16 bit decode)
#define EFI_PCI_IO_ATTRIBUTE_VGA_IO_16 0x40000 // I/O cycles 0x3B0-0x3BB and 0x3C0-0x3DF (16 bit decode)
#define EFI_PCI_DEVICE_ENABLE (EFI_PCI_IO_ATTRIBUTE_IO | EFI_PCI_IO_ATTRIBUTE_MEMORY | EFI_PCI_IO_ATTRIBUTE_BUS_MASTER)
#define EFI_VGA_DEVICE_ENABLE (EFI_PCI_IO_ATTRIBUTE_VGA_PALETTE_IO | EFI_PCI_IO_ATTRIBUTE_VGA_MEMORY | EFI_PCI_IO_ATTRIBUTE_VGA_IO | EFI_PCI_IO_ATTRIBUTE_IO)
//
// *******************************************************
// EFI_PCI_IO_PROTOCOL_OPERATION
// *******************************************************
//
typedef enum {
EfiPciIoOperationBusMasterRead,
EfiPciIoOperationBusMasterWrite,
EfiPciIoOperationBusMasterCommonBuffer,
EfiPciIoOperationMaximum
} EFI_PCI_IO_PROTOCOL_OPERATION;
//
// *******************************************************
// EFI_PCI_IO_PROTOCOL_ATTRIBUTE_OPERATION
// *******************************************************
//
typedef enum {
EfiPciIoAttributeOperationGet,
EfiPciIoAttributeOperationSet,
EfiPciIoAttributeOperationEnable,
EfiPciIoAttributeOperationDisable,
EfiPciIoAttributeOperationSupported,
EfiPciIoAttributeOperationMaximum
} EFI_PCI_IO_PROTOCOL_ATTRIBUTE_OPERATION;
typedef
EFI_STATUS
(EFIAPI *EFI_PCI_IO_PROTOCOL_POLL_IO_MEM) (
IN EFI_PCI_IO_PROTOCOL * This,
IN EFI_PCI_IO_PROTOCOL_WIDTH Width,
IN UINT8 BarIndex,
IN UINT64 Offset,
IN UINT64 Mask,
IN UINT64 Value,
IN UINT64 Delay,
OUT UINT64 *Result
);
typedef
EFI_STATUS
(EFIAPI *EFI_PCI_IO_PROTOCOL_IO_MEM) (
IN EFI_PCI_IO_PROTOCOL * This,
IN EFI_PCI_IO_PROTOCOL_WIDTH Width,
IN UINT8 BarIndex,
IN UINT64 Offset,
IN UINTN Count,
IN OUT VOID *Buffer
);
typedef struct {
EFI_PCI_IO_PROTOCOL_IO_MEM Read;
EFI_PCI_IO_PROTOCOL_IO_MEM Write;
} EFI_PCI_IO_PROTOCOL_ACCESS;
typedef
EFI_STATUS
(EFIAPI *EFI_PCI_IO_PROTOCOL_CONFIG) (
IN EFI_PCI_IO_PROTOCOL * This,
IN EFI_PCI_IO_PROTOCOL_WIDTH Width,
IN UINT32 Offset,
IN UINTN Count,
IN OUT VOID *Buffer
);
typedef struct {
EFI_PCI_IO_PROTOCOL_CONFIG Read;
EFI_PCI_IO_PROTOCOL_CONFIG Write;
} EFI_PCI_IO_PROTOCOL_CONFIG_ACCESS;
typedef
EFI_STATUS
(EFIAPI *EFI_PCI_IO_PROTOCOL_COPY_MEM) (
IN EFI_PCI_IO_PROTOCOL * This,
IN EFI_PCI_IO_PROTOCOL_WIDTH Width,
IN UINT8 DestBarIndex,
IN UINT64 DestOffset,
IN UINT8 SrcBarIndex,
IN UINT64 SrcOffset,
IN UINTN Count
);
typedef
EFI_STATUS
(EFIAPI *EFI_PCI_IO_PROTOCOL_MAP) (
IN EFI_PCI_IO_PROTOCOL * This,
IN EFI_PCI_IO_PROTOCOL_OPERATION Operation,
IN VOID *HostAddress,
IN OUT UINTN *NumberOfBytes,
OUT EFI_PHYSICAL_ADDRESS * DeviceAddress,
OUT VOID **Mapping
);
typedef
EFI_STATUS
(EFIAPI *EFI_PCI_IO_PROTOCOL_UNMAP) (
IN EFI_PCI_IO_PROTOCOL * This,
IN VOID *Mapping
);
typedef
EFI_STATUS
(EFIAPI *EFI_PCI_IO_PROTOCOL_ALLOCATE_BUFFER) (
IN EFI_PCI_IO_PROTOCOL * This,
IN EFI_ALLOCATE_TYPE Type,
IN EFI_MEMORY_TYPE MemoryType,
IN UINTN Pages,
OUT VOID **HostAddress,
IN UINT64 Attributes
);
typedef
EFI_STATUS
(EFIAPI *EFI_PCI_IO_PROTOCOL_FREE_BUFFER) (
IN EFI_PCI_IO_PROTOCOL * This,
IN UINTN Pages,
IN VOID *HostAddress
);
typedef
EFI_STATUS
(EFIAPI *EFI_PCI_IO_PROTOCOL_FLUSH) (
IN EFI_PCI_IO_PROTOCOL * This
);
typedef
EFI_STATUS
(EFIAPI *EFI_PCI_IO_PROTOCOL_GET_LOCATION) (
IN EFI_PCI_IO_PROTOCOL * This,
OUT UINTN *SegmentNumber,
OUT UINTN *BusNumber,
OUT UINTN *DeviceNumber,
OUT UINTN *FunctionNumber
);
typedef
EFI_STATUS
(EFIAPI *EFI_PCI_IO_PROTOCOL_ATTRIBUTES) (
IN EFI_PCI_IO_PROTOCOL * This,
IN EFI_PCI_IO_PROTOCOL_ATTRIBUTE_OPERATION Operation,
IN UINT64 Attributes,
OUT UINT64 *Result OPTIONAL
);
typedef
EFI_STATUS
(EFIAPI *EFI_PCI_IO_PROTOCOL_GET_BAR_ATTRIBUTES) (
IN EFI_PCI_IO_PROTOCOL * This,
IN UINT8 BarIndex,
OUT UINT64 *Supports, OPTIONAL
OUT VOID **Resources OPTIONAL
);
typedef
EFI_STATUS
(EFIAPI *EFI_PCI_IO_PROTOCOL_SET_BAR_ATTRIBUTES) (
IN EFI_PCI_IO_PROTOCOL * This,
IN UINT64 Attributes,
IN UINT8 BarIndex,
IN OUT UINT64 *Offset,
IN OUT UINT64 *Length
);
//
// Interface structure for the PCI I/O Protocol
//
typedef struct _EFI_PCI_IO_PROTOCOL {
EFI_PCI_IO_PROTOCOL_POLL_IO_MEM PollMem;
EFI_PCI_IO_PROTOCOL_POLL_IO_MEM PollIo;
EFI_PCI_IO_PROTOCOL_ACCESS Mem;
EFI_PCI_IO_PROTOCOL_ACCESS Io;
EFI_PCI_IO_PROTOCOL_CONFIG_ACCESS Pci;
EFI_PCI_IO_PROTOCOL_COPY_MEM CopyMem;
EFI_PCI_IO_PROTOCOL_MAP Map;
EFI_PCI_IO_PROTOCOL_UNMAP Unmap;
EFI_PCI_IO_PROTOCOL_ALLOCATE_BUFFER AllocateBuffer;
EFI_PCI_IO_PROTOCOL_FREE_BUFFER FreeBuffer;
EFI_PCI_IO_PROTOCOL_FLUSH Flush;
EFI_PCI_IO_PROTOCOL_GET_LOCATION GetLocation;
EFI_PCI_IO_PROTOCOL_ATTRIBUTES Attributes;
EFI_PCI_IO_PROTOCOL_GET_BAR_ATTRIBUTES GetBarAttributes;
EFI_PCI_IO_PROTOCOL_SET_BAR_ATTRIBUTES SetBarAttributes;
UINT64 RomSize;
VOID *RomImage;
} EFI_PCI_IO_PROTOCOL;
extern EFI_GUID gEfiPciIoProtocolGuid;
#endif
| {
"content_hash": "ddd6ca5c57c2b70e19cdee9fc3651263",
"timestamp": "",
"source": "github",
"line_count": 254,
"max_line_length": 177,
"avg_line_length": 36.618110236220474,
"alnum_prop": 0.5918718417374476,
"repo_name": "bitcrystal/edk",
"id": "c43f013b12ba4af751dcbdb965d04dddc033a2c6",
"size": "10137",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Foundation/Efi/Protocol/PciIo/PciIo.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "959602"
},
{
"name": "Batchfile",
"bytes": "60476"
},
{
"name": "C",
"bytes": "18423335"
},
{
"name": "C++",
"bytes": "938859"
},
{
"name": "GAP",
"bytes": "256118"
},
{
"name": "Makefile",
"bytes": "172033"
},
{
"name": "Objective-C",
"bytes": "2896732"
}
],
"symlink_target": ""
} |
import { NgModule } from '@angular/core';
import { RouterModule } from '@angular/router';
import { AboutComponent } from './index';
@NgModule({
imports: [
RouterModule.forChild([
{
path: 'about',
component: AboutComponent,
},
]),
],
exports: [
RouterModule,
],
})
export class AboutRoutingModule { }
| {
"content_hash": "acc7792aaa48de6efc46f7068ae5f3ff",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 47,
"avg_line_length": 17.55,
"alnum_prop": 0.5925925925925926,
"repo_name": "tarlepp/angular2-firebase-material-demo",
"id": "6c29e2b1219409c43321cbee0fd7503893221dc9",
"size": "351",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "src/app/about/about-routing.module.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3529"
},
{
"name": "HTML",
"bytes": "8068"
},
{
"name": "JavaScript",
"bytes": "1955"
},
{
"name": "TypeScript",
"bytes": "50119"
}
],
"symlink_target": ""
} |
package com.jcwhatever.nucleus.providers.npc.events;
import com.jcwhatever.nucleus.providers.npc.INpc;
import org.bukkit.event.HandlerList;
/**
* Called when a new {@link INpc} instance is created.
*/
public class NpcCreateEvent extends NpcEvent {
private static final HandlerList handlers = new HandlerList();
/**
* Constructor.
*
* @param npc The NPC the event is for.
*/
public NpcCreateEvent(INpc npc) {
super(npc);
}
@Override
public HandlerList getHandlers() {
return handlers;
}
public static HandlerList getHandlerList() {
return handlers;
}
} | {
"content_hash": "b6e9268635a85074139a4ecf452af40f",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 66,
"avg_line_length": 19.484848484848484,
"alnum_prop": 0.656298600311042,
"repo_name": "JCThePants/NucleusFramework",
"id": "f9bbc0ad7d112b585f57529f9636a0fa85a56fca",
"size": "1867",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/com/jcwhatever/nucleus/providers/npc/events/NpcCreateEvent.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "26414"
},
{
"name": "HTML",
"bytes": "5069"
},
{
"name": "Java",
"bytes": "6510926"
}
],
"symlink_target": ""
} |
namespace Autofac.Integration.Mvc.Test.Stubs;
public abstract class AbstractViewMasterPage : ViewMasterPage
{
public Dependency Dependency { get; set; }
}
| {
"content_hash": "096c1138ac069316ae7ebf00722c34b0",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 61,
"avg_line_length": 26.666666666666668,
"alnum_prop": 0.7875,
"repo_name": "autofac/Autofac.Mvc",
"id": "cdfb72046ee3464c01008d5c7f1d07401f310954",
"size": "308",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "test/Autofac.Integration.Mvc.Test/Stubs/AbstractViewMasterPage.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "192856"
},
{
"name": "PowerShell",
"bytes": "11785"
}
],
"symlink_target": ""
} |
A presentation given at Nature Publishing Group on Nov 13, 2012. | {
"content_hash": "dfe68e9f98e4db3fb368dc4b465b264e",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 64,
"avg_line_length": 64,
"alnum_prop": 0.8125,
"repo_name": "benmiles/clojure-for-rubyists",
"id": "af72b774a2f18e0cac7b71613077788991859f82",
"size": "88",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "F#",
"bytes": "1654"
},
{
"name": "JavaScript",
"bytes": "54054"
}
],
"symlink_target": ""
} |
var SaveRelationCommand = function() {
this.Extends = SimpleCommand;
this.execute = function(note) {
//alert("SaveRelationCommand");
try {
var mediator = null; //parameter !!!
var grid = mediator.grid;
var gridView = grid.getGridView();
var originalEntityValues = null;
var commandName = Command.EDT;
if (mediator.childRelation.getId() === null) {
commandName = Command.ADD;
}
if (commandName == Command.EDT) {
if (mediator.childRelation.getCei()) {
var entity = Entity.getById(mediator.childRelation.getCei());
//originalEntityValues = Entity.clone(entity);
originalEntityValues = (entity)?entity.clone():null;
originalEntityValues.setTypeObject(entity.getTypeObject());
//originalEntityValues.setAttributeList(entity.getAttributeList());
originalEntityValues.setAttributeList(entity.getChildAttributeList());
}
}
var relation = mediator.childRelation.save(mediator);
if (relation) {
var command = new RelationCommand(commandName);
if (command) {
command.setRelation(relation);
if (gridView) {
command.setNivo(gridView.getCurrentNivo());
command.setPosition(gridView.getPosition());
}
this.setLastCommand(command,true); //TODO: this !!!!
if ((mediator.sourceName == Command.EDT) ||
(mediator.sourceName == Command.GRP)) {
mediator.sourceName += ("_"+command.getId()+"/0");
}
if (mediator.sourceName.length == 3) {
mediator.sourceName += ("_"+command.getId());
}
command.setSourceName(mediator.sourceName);
//ATT: After setting sourceName, get groupName to construct final sourceName !!!
var seq1Id = command.getSeq1Id();
if (commandName != Command.EDT) {
command.setSourceName(command.getGroupName()+"/"+seq1Id);
} else {
if (mediator.sourceName.substr(0,3) != Command.GRP) {
command.setSourceName(command.getGroupName()+"/"+seq1Id);
} else {
var seq2Id = command.getSeq2Id();
var seq2Nbr = new Number(seq2Id);
seq2Nbr = (seq2Nbr + 1);
seq2Id = seq2Nbr.toString();
command.setSourceName(command.getGroupName()+"/"+seq1Id+"."+seq2Id);
}
if (originalEntityValues) {
originalEntityValues.setKey(command.getSourceName());
//_oe.push(originalEntityValues); //TODO: _oe
}
}
}
}
//this.sendNotification(SjamayeeFacade.RELATION_SAVED,mediator); // + !!! relation !!!
} catch(error) {
Utils.alert("SaveRelationCommand Error: "+error.message,Utils.LOG_LEVEL_ERROR);
}
};
};
SaveRelationCommand = new Class(new SaveRelationCommand());
| {
"content_hash": "53e1a81b5f36303f08f3ccec1dd20347",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 93,
"avg_line_length": 40.04347826086956,
"alnum_prop": 0.6264929424538545,
"repo_name": "sjamayee/sjamayee",
"id": "cd9da5c4272f4107a296282a69262b539287c357",
"size": "2774",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/sjamayee/js_old/pmvc/controller/common/SaveRelationCommand.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "203799"
},
{
"name": "JavaScript",
"bytes": "5710906"
},
{
"name": "PHP",
"bytes": "102128"
},
{
"name": "Python",
"bytes": "2259"
}
],
"symlink_target": ""
} |
package com.thoughtworks.go.server.dao;
import com.thoughtworks.go.config.GoConfigDao;
import com.thoughtworks.go.config.PipelineConfig;
import com.thoughtworks.go.domain.*;
import com.thoughtworks.go.domain.buildcause.BuildCause;
import com.thoughtworks.go.helper.JobInstanceMother;
import com.thoughtworks.go.helper.PipelineMother;
import com.thoughtworks.go.server.cache.GoCache;
import com.thoughtworks.go.server.materials.DependencyMaterialUpdateNotifier;
import com.thoughtworks.go.server.persistence.MaterialRepository;
import com.thoughtworks.go.server.service.InstanceFactory;
import com.thoughtworks.go.server.transaction.AfterCompletionCallback;
import com.thoughtworks.go.server.transaction.TransactionTemplate;
import com.thoughtworks.go.util.GoConfigFileHelper;
import com.thoughtworks.go.util.TimeProvider;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
import org.springframework.transaction.support.TransactionSynchronization;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import static com.thoughtworks.go.helper.ModificationsMother.modifyOneFile;
import static com.thoughtworks.go.server.dao.DatabaseAccessHelper.assertIsInserted;
import static com.thoughtworks.go.server.dao.DatabaseAccessHelper.assertNotInserted;
import static com.thoughtworks.go.util.GoConstants.DEFAULT_APPROVED_BY;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.hasItem;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"classpath:WEB-INF/applicationContext-global.xml",
"classpath:WEB-INF/applicationContext-dataLocalAccess.xml",
"classpath:testPropertyConfigurer.xml",
"classpath:WEB-INF/spring-all-servlet.xml",
})
public class PipelineStateDaoIntegrationTest {
@Autowired private PipelineStateDao pipelineStateDao;
@Autowired private PipelineSqlMapDao pipelineSqlMapDao;
@Autowired private DatabaseAccessHelper dbHelper;
@Autowired private MaterialRepository materialRepository;
@Autowired private GoCache goCache;
@Autowired private TransactionTemplate transactionTemplate;
@Autowired private GoConfigDao goConfigDao;
@Autowired private InstanceFactory instanceFactory;
@Autowired private DependencyMaterialUpdateNotifier notifier;
@Before
public void setup() throws Exception {
dbHelper.onSetUp();
goCache.clear();
GoConfigFileHelper configHelper = new GoConfigFileHelper();
configHelper.usingCruiseConfigDao(goConfigDao);
notifier.disableUpdates();
}
@After
public void teardown() throws Exception {
notifier.enableUpdates();
dbHelper.onTearDown();
}
@Test
public void shouldFindLockedPipelinesCaseInsensitively() throws Exception {
Pipeline minglePipeline = schedulePipelineWithStages(PipelineMother.twoBuildPlansWithResourcesAndMaterials("mingle", "stage1", "stage2"));
pipelineStateDao.lockPipeline(minglePipeline);
PipelineState lockedPipelineState = pipelineStateDao.pipelineStateFor("mingle");
assertThat(lockedPipelineState.getLockedBy().pipelineIdentifier(), is(minglePipeline.getIdentifier()));
assertThat(lockedPipelineState.getLockedByPipelineId(), is(minglePipeline.getId()));
assertThat(lockedPipelineState.getLockedByPipelineId(), is(not(0L)));
lockedPipelineState = pipelineStateDao.pipelineStateFor("mInGlE");
assertThat(lockedPipelineState.getLockedBy().pipelineIdentifier(), is(minglePipeline.getIdentifier()));
assertThat(lockedPipelineState.getLockedByPipelineId(), is(minglePipeline.getId()));
assertThat(lockedPipelineState.getLockedByPipelineId(), is(not(0L)));
}
@Test
public void shouldBombWhenLockingPipelineThatHasAlreadyBeenLocked() throws Exception {
String pipelineName = UUID.randomUUID().toString();
Pipeline minglePipeline1 = schedulePipelineWithStages(PipelineMother.twoBuildPlansWithResourcesAndMaterials(pipelineName, "defaultStage"));
Pipeline minglePipeline2 = schedulePipelineWithStages(PipelineMother.twoBuildPlansWithResourcesAndMaterials(pipelineName, "defaultStage"));
pipelineStateDao.lockPipeline(minglePipeline1);
assertThat(pipelineStateDao.pipelineStateFor(pipelineName).getLockedBy(), is(minglePipeline1.getFirstStage().getIdentifier()));
try {
pipelineStateDao.lockPipeline(minglePipeline2);
fail("Should not be able to lock a different instance of an already locked pipeline");
} catch (Exception e) {
assertThat(e.getMessage(), is(String.format("Pipeline '%s' is already locked (counter = 1)", pipelineName)));
}
}
@Test
public void shouldNotBombWhenLockingTheSamePipelineInstanceThatHasAlreadyBeenLocked() throws Exception {
String pipelineName = "pipeline";
Pipeline minglePipeline1 = schedulePipelineWithStages(PipelineMother.twoBuildPlansWithResourcesAndMaterials(pipelineName, "defaultStage"));
pipelineStateDao.lockPipeline(minglePipeline1);
assertThat(pipelineStateDao.pipelineStateFor(pipelineName).getLockedBy(), is(minglePipeline1.getFirstStage().getIdentifier()));
try {
pipelineStateDao.lockPipeline(minglePipeline1);
} catch (Exception e) {
fail("Should not bomb trying to lock a locked pipeline instance but got: " + e.getMessage());
}
}
@Test
public void shouldUnlockPipelineInstance() throws Exception {
String pipelineName = UUID.randomUUID().toString();
Pipeline minglePipeline = schedulePipelineWithStages(PipelineMother.twoBuildPlansWithResourcesAndMaterials(pipelineName, "defaultStage"));
TestAfterCompletionCallback afterLockCallback = new TestAfterCompletionCallback();
pipelineStateDao.lockPipeline(minglePipeline, afterLockCallback);
PipelineState pipelineState = pipelineStateDao.pipelineStateFor(pipelineName);
assertThat(pipelineState.getLockedBy(), is(minglePipeline.getFirstStage().getIdentifier()));
assertThat(pipelineState.getLockedByPipelineId(), is(minglePipeline.getId()));
afterLockCallback.assertCalledWithStatus(TransactionSynchronization.STATUS_COMMITTED);
TestAfterCompletionCallback unlockCallback = new TestAfterCompletionCallback();
pipelineStateDao.unlockPipeline(pipelineName, unlockCallback);
PipelineState pipelineState1 = pipelineStateDao.pipelineStateFor(pipelineName);
assertThat(pipelineState1.getLockedBy(), is(nullValue()));
assertThat(pipelineState1.getLockedByPipelineId(), is(0L));
unlockCallback.assertCalledWithStatus(TransactionSynchronization.STATUS_COMMITTED);
}
@Test
public void shouldReturnListOfAllLockedPipelines() throws Exception {
Pipeline minglePipeline = schedulePipelineWithStages(PipelineMother.twoBuildPlansWithResourcesAndMaterials("mingle", "defaultStage"));
Pipeline twistPipeline = schedulePipelineWithStages(PipelineMother.twoBuildPlansWithResourcesAndMaterials("twist", "defaultStage"));
pipelineStateDao.lockPipeline(minglePipeline);
pipelineStateDao.lockPipeline(twistPipeline);
List<String> lockedPipelines = pipelineStateDao.lockedPipelines();
assertThat(lockedPipelines.size(), is(2));
assertThat(lockedPipelines, hasItem("mingle"));
assertThat(lockedPipelines, hasItem("twist"));
pipelineStateDao.unlockPipeline("mingle");
lockedPipelines = pipelineStateDao.lockedPipelines();
assertThat(lockedPipelines.size(), is(1));
assertThat(lockedPipelines, hasItem("twist"));
}
@Test
public void lockPipeline_shouldEnsureOnlyOneThreadCanLockAPipelineSuccessfully() throws Exception {
List<Thread> threads = new ArrayList<>();
final int[] errors = new int[1];
for (int i = 0; i < 10; i++) {
JobInstances jobInstances = new JobInstances(JobInstanceMother.completed("job"));
Stage stage = new Stage("stage-1", jobInstances, "shilpa", null, "auto", new TimeProvider());
final Pipeline pipeline = PipelineMother.pipeline("mingle", stage);
pipeline.setCounter(i + 1);
Thread thread = new Thread(new Runnable() {
public void run() {
try {
pipelineStateDao.lockPipeline(pipeline);
} catch (Exception e) {
errors[0]++;
}
}
}, "thread-" + i);
threads.add(thread);
thread.start();
}
for (Thread thread : threads) {
thread.join();
}
assertThat(errors[0], is(9));
}
private Pipeline schedulePipelineWithStages(PipelineConfig pipelineConfig) throws Exception {
BuildCause buildCause = BuildCause.createWithModifications(modifyOneFile(pipelineConfig), "");
Pipeline pipeline = instanceFactory.createPipelineInstance(pipelineConfig, buildCause, new DefaultSchedulingContext(DEFAULT_APPROVED_BY), "md5-test", new TimeProvider());
assertNotInserted(pipeline.getId());
savePipeline(pipeline);
assertIsInserted(pipeline.getId());
return pipeline;
}
private void savePipeline(final Pipeline pipeline) {
transactionTemplate.execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus status) {
materialRepository.save(pipeline.getBuildCause().getMaterialRevisions());
pipelineSqlMapDao.saveWithStages(pipeline);
}
});
}
private class TestAfterCompletionCallback implements AfterCompletionCallback {
boolean called = false;
Integer status = null;
@Override
public void execute(int status) {
called = true;
this.status = status;
}
void assertCalledWithStatus(int status) {
assertTrue(called);
assertThat(this.status, equalTo(status));
}
}
}
| {
"content_hash": "0366d5556ae54facdc731a98ec03f94e",
"timestamp": "",
"source": "github",
"line_count": 228,
"max_line_length": 178,
"avg_line_length": 47.473684210526315,
"alnum_prop": 0.7363266814486327,
"repo_name": "ind9/gocd",
"id": "0a3a0f6049dfc37fff3c75d04741027f8c125f63",
"size": "11425",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "server/src/test-integration/java/com/thoughtworks/go/server/dao/PipelineStateDaoIntegrationTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "8573"
},
{
"name": "CSS",
"bytes": "611591"
},
{
"name": "FreeMarker",
"bytes": "182"
},
{
"name": "Groovy",
"bytes": "31403"
},
{
"name": "HTML",
"bytes": "662629"
},
{
"name": "Java",
"bytes": "18066460"
},
{
"name": "JavaScript",
"bytes": "3114732"
},
{
"name": "NSIS",
"bytes": "17010"
},
{
"name": "PLSQL",
"bytes": "4414"
},
{
"name": "PowerShell",
"bytes": "743"
},
{
"name": "Ruby",
"bytes": "4280532"
},
{
"name": "SQLPL",
"bytes": "15479"
},
{
"name": "Shell",
"bytes": "191871"
},
{
"name": "XSLT",
"bytes": "185781"
}
],
"symlink_target": ""
} |
<menu xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
tools:context="com.aspose.asposecloudimagingandroid.MainActivity">
<item
android:id="@+id/action_settings"
android:orderInCategory="100"
android:title="@string/action_settings"
app:showAsAction="never" />
</menu>
| {
"content_hash": "4f52a1022bc528c7867c5591ed2ae431",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 70,
"avg_line_length": 42.5,
"alnum_prop": 0.6894117647058824,
"repo_name": "asposeimaging/Aspose_Imaging_Cloud",
"id": "f276e2efd3ddfb5176d504e96cb87f2490a71979",
"size": "425",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "SDKs/Aspose.Imaging-Cloud-SDK-for-Android/app/src/main/res/menu/menu_main.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "207"
},
{
"name": "C#",
"bytes": "211360"
},
{
"name": "Java",
"bytes": "264819"
},
{
"name": "JavaScript",
"bytes": "146745"
},
{
"name": "Objective-C",
"bytes": "224110"
},
{
"name": "PHP",
"bytes": "150631"
},
{
"name": "Python",
"bytes": "180881"
},
{
"name": "Ruby",
"bytes": "190426"
}
],
"symlink_target": ""
} |
package org.mifos.customers.group;
import java.util.Date;
import org.mifos.customers.CustomerTemplateImpl;
import org.mifos.customers.util.helpers.CustomerStatus;
/**
* use builders instead of templates.
*/
@Deprecated
public class GroupTemplateImpl extends CustomerTemplateImpl implements GroupTemplate {
private boolean isTrained;
private Date trainedDate;
private Integer parentCustomerId;
private GroupTemplateImpl(Integer parentCenterId) {
super("TestGroup", CustomerStatus.GROUP_ACTIVE);
this.parentCustomerId = parentCenterId;
}
@Override
public boolean isTrained() {
return this.isTrained;
}
@Override
public Date getTrainedDate() {
return this.trainedDate;
}
@Override
public Integer getParentCenterId() {
return this.parentCustomerId;
}
/**
* Use this in transactions that you don't plan on committing to the
* database. If you commit more than one of these to the database you'll run
* into uniqueness constraints. Plan on always rolling back the transaction.
*
* @param parentCenterId
* @return groupTemplateImpl
*/
public static GroupTemplateImpl createNonUniqueGroupTemplate(Integer parentCenterId) {
return new GroupTemplateImpl(parentCenterId);
}
}
| {
"content_hash": "faa456e1fe7722252e5d1d804bf00cfd",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 90,
"avg_line_length": 26.38,
"alnum_prop": 0.7179681576952237,
"repo_name": "maduhu/head",
"id": "cb9df113edf153c1a52204d3160a6a0d427351ad",
"size": "2080",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "application/src/test/java/org/mifos/customers/group/GroupTemplateImpl.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4245"
},
{
"name": "CSS",
"bytes": "111180"
},
{
"name": "Groff",
"bytes": "671"
},
{
"name": "HTML",
"bytes": "83311"
},
{
"name": "Java",
"bytes": "22987418"
},
{
"name": "JavaScript",
"bytes": "835209"
},
{
"name": "Makefile",
"bytes": "44"
},
{
"name": "PLSQL",
"bytes": "71576"
},
{
"name": "Python",
"bytes": "37612"
},
{
"name": "Shell",
"bytes": "73897"
}
],
"symlink_target": ""
} |
<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
</head>
<body>
Comparable object wrappers
</body>
</html> | {
"content_hash": "c30be7f4bf6d6906a6b186054d40e957",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 75,
"avg_line_length": 39.48,
"alnum_prop": 0.7619047619047619,
"repo_name": "zhangdian/solr4.6.0",
"id": "5d6252dad937cd435e9abd7ccc28af6779bf96bf",
"size": "987",
"binary": false,
"copies": "22",
"ref": "refs/heads/master",
"path": "lucene/core/src/java/org/apache/lucene/util/mutable/package.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "26754"
},
{
"name": "CSS",
"bytes": "124919"
},
{
"name": "Java",
"bytes": "36911040"
},
{
"name": "JavaScript",
"bytes": "1208827"
},
{
"name": "Perl",
"bytes": "91873"
},
{
"name": "Python",
"bytes": "278886"
},
{
"name": "Shell",
"bytes": "77667"
},
{
"name": "XSLT",
"bytes": "88379"
}
],
"symlink_target": ""
} |
var input;
var emptyText;
function testSettingEmptyStringClearsSubFields(type) {
description('Check if input.value="" clears an input with partially-specified value.');
input = document.createElement('input');
input.type = type;
document.body.appendChild(input);
input.focus();
emptyText = getUserAgentShadowTextContent(input);
if (!window.eventSender)
debug('This test needs to be run on DRT/WTR.');
else {
debug('Empty text: ' + emptyText);
shouldNotBe('eventSender.keyDown("ArrowUp"); getUserAgentShadowTextContent(input)', 'emptyText');
shouldBe('input.value = ""; getUserAgentShadowTextContent(input)', 'emptyText');
input.remove();
}
}
| {
"content_hash": "aaad887170d6dd91eeecc2d250b06197",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 105,
"avg_line_length": 36,
"alnum_prop": 0.6833333333333333,
"repo_name": "Samsung/ChromiumGStreamerBackend",
"id": "7db440cd1a92bae553ad8f613ccdb4c1fffaf60c",
"size": "720",
"binary": false,
"copies": "16",
"ref": "refs/heads/master",
"path": "third_party/WebKit/LayoutTests/fast/forms/resources/multiple-fields-value-set-empty.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
#region License
//
// Copyright Tony Beveridge 2015. All rights reserved.
// MIT license applies.
//
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Management;
namespace Infra {
// Not my creation.
// See: http://www.codescratcher.com/windows-forms/get-computer-hardware-information-using-c/
public static class HardwareInfo {
/// <summary>
/// Retrieving Processor Id.
/// </summary>
/// <returns></returns>
///
public static String GetProcessorId() {
ManagementClass mc = new ManagementClass("win32_processor");
ManagementObjectCollection moc = mc.GetInstances();
String Id = String.Empty;
foreach (ManagementObject mo in moc) {
Id = mo.Properties["processorID"].Value.ToString();
break;
}
return Id;
}
/// <summary>
/// Retrieving HDD Serial No.
/// </summary>
/// <returns></returns>
public static String GetHDDSerialNo() {
ManagementClass mangnmt = new ManagementClass("Win32_LogicalDisk");
ManagementObjectCollection mcol = mangnmt.GetInstances();
string result = "";
foreach (ManagementObject strt in mcol) {
result += Convert.ToString(strt["VolumeSerialNumber"]);
}
return result;
}
/// <summary>
/// Retrieving System MAC Address.
/// </summary>
/// <returns></returns>
public static string GetMACAddress() {
ManagementClass mc = new ManagementClass("Win32_NetworkAdapterConfiguration");
ManagementObjectCollection moc = mc.GetInstances();
string MACAddress = String.Empty;
foreach (ManagementObject mo in moc) {
if (MACAddress == String.Empty) {
if ((bool)mo["IPEnabled"] == true) MACAddress = mo["MacAddress"].ToString();
}
mo.Dispose();
}
MACAddress = MACAddress.Replace(":", "");
return MACAddress;
}
/// <summary>
/// Retrieving Motherboard Manufacturer.
/// </summary>
/// <returns></returns>
public static string GetBoardMaker() {
ManagementObjectSearcher searcher = new ManagementObjectSearcher("root\\CIMV2", "SELECT * FROM Win32_BaseBoard");
foreach (ManagementObject wmi in searcher.Get()) {
try {
return wmi.GetPropertyValue("Manufacturer").ToString();
}
catch { }
}
return "Board Maker: Unknown";
}
/// <summary>
/// Retrieving Motherboard Product Id.
/// </summary>
/// <returns></returns>
public static string GetBoardProductId() {
ManagementObjectSearcher searcher = new ManagementObjectSearcher("root\\CIMV2", "SELECT * FROM Win32_BaseBoard");
foreach (ManagementObject wmi in searcher.Get()) {
try {
return wmi.GetPropertyValue("Product").ToString();
}
catch { }
}
return "Product: Unknown";
}
/// <summary>
/// Retrieving CD-DVD Drive Path.
/// </summary>
/// <returns></returns>
public static string GetCdRomDrive() {
ManagementObjectSearcher searcher = new ManagementObjectSearcher("root\\CIMV2", "SELECT * FROM Win32_CDROMDrive");
foreach (ManagementObject wmi in searcher.Get()) {
try {
return wmi.GetPropertyValue("Drive").ToString();
}
catch { }
}
return "CD ROM Drive Letter: Unknown";
}
/// <summary>
/// Retrieving BIOS Maker.
/// </summary>
/// <returns></returns>
public static string GetBIOSmaker() {
ManagementObjectSearcher searcher = new ManagementObjectSearcher("root\\CIMV2", "SELECT * FROM Win32_BIOS");
foreach (ManagementObject wmi in searcher.Get()) {
try {
return wmi.GetPropertyValue("Manufacturer").ToString();
}
catch { }
}
return "BIOS Maker: Unknown";
}
/// <summary>
/// Retrieving BIOS Serial No.
/// </summary>
/// <returns></returns>
public static string GetBIOSserNo() {
ManagementObjectSearcher searcher = new ManagementObjectSearcher("root\\CIMV2", "SELECT * FROM Win32_BIOS");
foreach (ManagementObject wmi in searcher.Get()) {
try {
return wmi.GetPropertyValue("SerialNumber").ToString();
}
catch { }
}
return "BIOS Serial Number: Unknown";
}
/// <summary>
/// Retrieving BIOS Caption.
/// </summary>
/// <returns></returns>
public static string GetBIOScaption() {
ManagementObjectSearcher searcher = new ManagementObjectSearcher("root\\CIMV2", "SELECT * FROM Win32_BIOS");
foreach (ManagementObject wmi in searcher.Get()) {
try {
return wmi.GetPropertyValue("Caption").ToString();
}
catch { }
}
return "BIOS Caption: Unknown";
}
/// <summary>
/// Retrieving System Account Name.
/// </summary>
/// <returns></returns>
public static string GetAccountName() {
ManagementObjectSearcher searcher = new ManagementObjectSearcher("root\\CIMV2", "SELECT * FROM Win32_UserAccount");
foreach (ManagementObject wmi in searcher.Get()) {
try {
return wmi.GetPropertyValue("Name").ToString();
}
catch { }
}
return "User Account Name: Unknown";
}
/// <summary>
/// Retrieving Physical Ram Memory.
/// </summary>
/// <returns></returns>
public static string GetPhysicalMemory() {
ManagementScope oMs = new ManagementScope();
ObjectQuery oQuery = new ObjectQuery("SELECT Capacity FROM Win32_PhysicalMemory");
ManagementObjectSearcher oSearcher = new ManagementObjectSearcher(oMs, oQuery);
ManagementObjectCollection oCollection = oSearcher.Get();
long MemSize = 0;
long mCap = 0;
// In case more than one Memory sticks are installed
foreach (ManagementObject obj in oCollection) {
mCap = Convert.ToInt64(obj["Capacity"]);
MemSize += mCap;
}
MemSize = (MemSize / 1024) / 1024;
return MemSize.ToString() + "MB";
}
/// <summary>
/// Retrieving No of Ram Slot on Motherboard.
/// </summary>
/// <returns></returns>
public static string GetNoRamSlots() {
int MemSlots = 0;
ManagementScope oMs = new ManagementScope();
ObjectQuery oQuery2 = new ObjectQuery("SELECT MemoryDevices FROM Win32_PhysicalMemoryArray");
ManagementObjectSearcher oSearcher2 = new ManagementObjectSearcher(oMs, oQuery2);
ManagementObjectCollection oCollection2 = oSearcher2.Get();
foreach (ManagementObject obj in oCollection2) {
MemSlots = Convert.ToInt32(obj["MemoryDevices"]);
}
return MemSlots.ToString();
}
//Get CPU Temprature.
/// <summary>
/// method for retrieving the CPU Manufacturer
/// using the WMI class
/// </summary>
/// <returns>CPU Manufacturer</returns>
public static string GetCPUManufacturer() {
string cpuMan = String.Empty;
//create an instance of the Managemnet class with the
//Win32_Processor class
ManagementClass mgmt = new ManagementClass("Win32_Processor");
//create a ManagementObjectCollection to loop through
ManagementObjectCollection objCol = mgmt.GetInstances();
//start our loop for all processors found
foreach (ManagementObject obj in objCol) {
if (cpuMan == String.Empty) {
// only return manufacturer from first CPU
cpuMan = obj.Properties["Manufacturer"].Value.ToString();
}
}
return cpuMan;
}
/// <summary>
/// method to retrieve the CPU's current
/// clock speed using the WMI class
/// </summary>
/// <returns>Clock speed</returns>
public static int GetCPUCurrentClockSpeed() {
int cpuClockSpeed = 0;
//create an instance of the Managemnet class with the
//Win32_Processor class
ManagementClass mgmt = new ManagementClass("Win32_Processor");
//create a ManagementObjectCollection to loop through
ManagementObjectCollection objCol = mgmt.GetInstances();
//start our loop for all processors found
foreach (ManagementObject obj in objCol) {
if (cpuClockSpeed == 0) {
// only return cpuStatus from first CPU
cpuClockSpeed = Convert.ToInt32(obj.Properties["CurrentClockSpeed"].Value.ToString());
}
}
//return the status
return cpuClockSpeed;
}
/// <summary>
/// method to retrieve the network adapters
/// default IP gateway using WMI
/// </summary>
/// <returns>adapters default IP gateway</returns>
public static string GetDefaultIPGateway() {
//create out management class object using the
//Win32_NetworkAdapterConfiguration class to get the attributes
//of the network adapter
ManagementClass mgmt = new ManagementClass("Win32_NetworkAdapterConfiguration");
//create our ManagementObjectCollection to get the attributes with
ManagementObjectCollection objCol = mgmt.GetInstances();
string gateway = String.Empty;
//loop through all the objects we find
foreach (ManagementObject obj in objCol) {
if (gateway == String.Empty) // only return MAC Address from first card
{
//grab the value from the first network adapter we find
//you can change the string to an array and get all
//network adapters found as well
//check to see if the adapter's IPEnabled
//equals true
if ((bool)obj["IPEnabled"] == true) {
gateway = obj["DefaultIPGateway"].ToString();
}
}
//dispose of our object
obj.Dispose();
}
//replace the ":" with an empty space, this could also
//be removed if you wish
gateway = gateway.Replace(":", "");
//return the mac address
return gateway;
}
/// <summary>
/// Retrieve CPU Speed.
/// </summary>
/// <returns></returns>
public static double? GetCpuSpeedInGHz() {
double? GHz = null;
using (ManagementClass mc = new ManagementClass("Win32_Processor")) {
foreach (ManagementObject mo in mc.GetInstances()) {
GHz = 0.001 * (UInt32)mo.Properties["CurrentClockSpeed"].Value;
break;
}
}
return GHz;
}
/// <summary>
/// Retrieving Current Language
/// </summary>
/// <returns></returns>
public static string GetCurrentLanguage() {
ManagementObjectSearcher searcher = new ManagementObjectSearcher("root\\CIMV2", "SELECT * FROM Win32_BIOS");
foreach (ManagementObject wmi in searcher.Get()) {
try {
return wmi.GetPropertyValue("CurrentLanguage").ToString();
}
catch { }
}
return "BIOS Maker: Unknown";
}
/// <summary>
/// Retrieving Current Language.
/// </summary>
/// <returns></returns>
public static string GetOSInformation() {
ManagementObjectSearcher searcher = new ManagementObjectSearcher("SELECT * FROM Win32_OperatingSystem");
foreach (ManagementObject wmi in searcher.Get()) {
try {
return ((string)wmi["Caption"]).Trim() + ", " + (string)wmi["Version"] + ", " + (string)wmi["OSArchitecture"];
}
catch { }
}
return "BIOS Maker: Unknown";
}
/// <summary>
/// Retrieving Processor Information.
/// </summary>
/// <returns></returns>
public static String GetProcessorInformation() {
ManagementClass mc = new ManagementClass("win32_processor");
ManagementObjectCollection moc = mc.GetInstances();
String info = String.Empty;
foreach (ManagementObject mo in moc) {
string name = (string)mo["Name"];
name = name.Replace("(TM)", "™").Replace("(tm)", "™").Replace("(R)", "®").Replace("(r)", "®").Replace("(C)", "©").Replace("(c)", "©").Replace(" ", " ").Replace(" ", " ");
info = name + ", " + (string)mo["Caption"] + ", " + (string)mo["SocketDesignation"];
//mo.Properties["Name"].Value.ToString();
//break;
}
return info;
}
/// <summary>
/// Retrieving Computer Name.
/// </summary>
/// <returns></returns>
public static String GetComputerName() {
ManagementClass mc = new ManagementClass("Win32_ComputerSystem");
ManagementObjectCollection moc = mc.GetInstances();
String info = String.Empty;
foreach (ManagementObject mo in moc) {
info = (string)mo["Name"];
//mo.Properties["Name"].Value.ToString();
//break;
}
return info;
}
}
} | {
"content_hash": "d9a3e8a98e36b73061fd6d0d00709b6c",
"timestamp": "",
"source": "github",
"line_count": 407,
"max_line_length": 190,
"avg_line_length": 35.92137592137592,
"alnum_prop": 0.5345417236662107,
"repo_name": "afgbeveridge/Quorum",
"id": "738e9572874004853b9cb5e0a3a312f12d9167f2",
"size": "14630",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Infra/HardwareInfo.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "104"
},
{
"name": "Batchfile",
"bytes": "356"
},
{
"name": "C#",
"bytes": "334520"
},
{
"name": "CSS",
"bytes": "21167"
},
{
"name": "HTML",
"bytes": "5127"
},
{
"name": "JavaScript",
"bytes": "564775"
}
],
"symlink_target": ""
} |
> For quick and/or simple scripts that need _dockerization_
1. Start with a base image like Debian or Ubuntu.
2. Start a container: `docker run -it -name <name> --hostname <name> debian bash`
3. `apt-get update`
4. `apt-get install <dependencies>`
5. Then run `docker commit` to create an image: e.g. `docker commit <name> test/<name_image>`
| {
"content_hash": "f339200319a395b73d8a25923344f86e",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 93,
"avg_line_length": 49,
"alnum_prop": 0.7201166180758017,
"repo_name": "MichaelMartinez/til",
"id": "6b638f889874b0759e3cfb9ba354e619f88d5bc0",
"size": "367",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docker/simple-docker-images.md",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
function openCancelDialog(node, cancel_url, delete_url, event){
event.stopPropagation();
var btns = {"This":function(){window.location=cancel_url;}, "All":function(){window.location=delete_url}, "Do nothing":function(){$(this).dialog("destroy");}};
dia = $("#delete_dialog").dialog({'buttons':btns, 'modal':true});
dia.dialog('open');
return false;
}
function openEditDialog(node, occurrence_url, event_url, event){
event.stopPropagation();
var btns = {"This":function(){window.location=occurrence_url;}, "All":function(){window.location=event_url}, "Do nothing":function(){$(this).dialog("destroy");}};
dia = $("#edit_dialog").dialog({'buttons':btns, 'modal':true});
dia.dialog('open');
return false;
}
function openDetail(node){
var btns = { "Close":function(){$(this).dialog("destroy");}};
dia = $($(node).attr("href")).dialog({'buttons':btns, 'modal':true, 'title':'Details'});
dia.dialog('open');
return false;
}
function openURL(url, event){
event.stopPropagation();
window.location=url;
} | {
"content_hash": "7ffb3bb943bebe335237a94b7bbe2ff5",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 164,
"avg_line_length": 38.2962962962963,
"alnum_prop": 0.6673114119922631,
"repo_name": "michealcarrerweb/LHVent_app",
"id": "2f042481042eb7dbfe39f418a3f4077e84ba2bd4",
"size": "1034",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "LHV_app/site_media/static/schedule.7ffb3bb943be.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "678251"
},
{
"name": "HTML",
"bytes": "187707"
},
{
"name": "JavaScript",
"bytes": "425409"
},
{
"name": "Python",
"bytes": "563574"
},
{
"name": "Shell",
"bytes": "520"
}
],
"symlink_target": ""
} |
require "spec_helper"
describe Lita::Adapters::Slack::MessageHandler, lita: true do
subject { described_class.new(robot, robot_id, data) }
before do
allow(robot).to receive(:trigger)
Lita::Adapters::Slack::RoomCreator.create_room(channel, robot)
end
let(:robot) { instance_double('Lita::Robot', name: 'Lita', mention_name: 'lita') }
let(:robot_id) { 'U12345678' }
let(:channel) { Lita::Adapters::Slack::SlackChannel.new('C2147483705', 'general', 1360782804, 'U023BECGF', raw_data) }
let(:raw_data) { Hash.new }
describe "#handle" do
context "with a hello message" do
let(:data) { { "type" => "hello" } }
it "triggers a connected event" do
expect(robot).to receive(:trigger).with(:connected)
subject.handle
end
end
context "with a normal message" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"user" => "U023BECGF",
"text" => "Hello",
"ts" => "1234.5678"
}
end
let(:message) { instance_double('Lita::Message', command!: false, extensions: {}) }
let(:source) { instance_double('Lita::Source', private_message?: false) }
let(:user) { instance_double('Lita::User', id: 'U023BECGF') }
before do
allow(Lita::User).to receive(:find_by_id).and_return(user)
allow(Lita::Source).to receive(:new).with(
user: user,
room: "C2147483705"
).and_return(source)
allow(Lita::Message).to receive(:new).with(robot, "Hello", source).and_return(message)
allow(robot).to receive(:receive).with(message)
end
it "dispatches the message to Lita" do
expect(robot).to receive(:receive).with(message)
subject.handle
end
it "saves the timestamp in extensions" do
subject.handle
expect(message.extensions[:slack][:timestamp]).to eq("1234.5678")
end
context "when the message is a direct message" do
let(:data) do
{
"type" => "message",
"channel" => "D2147483705",
"user" => "U023BECGF",
"text" => "Hello"
}
end
before do
allow(Lita::Source).to receive(:new).with(
user: user,
room: "D2147483705"
).and_return(source)
allow(source).to receive(:private_message!).and_return(true)
allow(source).to receive(:private_message?).and_return(true)
end
it "marks the source as a private message" do
expect(source).to receive(:private_message!)
subject.handle
end
it "marks the message as a command" do
expect(message).to receive(:command!)
subject.handle
end
end
context "when the message starts with a Slack-style @-mention" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"user" => "U023BECGF",
"text" => "<@#{robot_id}>: Hello"
}
end
it "converts it to a Lita-style @-mention" do
expect(Lita::Message).to receive(:new).with(
robot,
"@lita: Hello",
source
).and_return(message)
subject.handle
end
end
context "when the message has attach" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"user" => "U023BECGF",
"text" => "Hello",
"attachments" => [{"text" => "attached hello"}]
}
end
it "recives attachment text" do
expect(Lita::Message).to receive(:new).with(
robot,
"Hello\nattached hello",
source
).and_return(message)
subject.handle
end
end
context "when the message is nil" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"user" => "U023BECGF",
}
end
it "dispatches an empty message to Lita" do
expect(Lita::Message).to receive(:new).with(
robot,
"",
source
).and_return(message)
subject.handle
end
end
describe "Removing message formatting" do
let(:user) { instance_double('Lita::User', id: 'U123',name: 'name', mention_name: 'label') }
context "does nothing if there are no user links" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo",
source
).and_return(message)
subject.handle
end
end
context "decodes entities" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo > & < >&<",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo > & < >&<",
source
).and_return(message)
subject.handle
end
end
context "changes <@123> links to @label" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo <@123> bar",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo @label bar",
source
).and_return(message)
subject.handle
end
end
context "changes <@U123|label> links to label" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo <@123|label> bar",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo label bar",
source
).and_return(message)
subject.handle
end
end
context "changes <#C2147483705> links to #general" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo <#C2147483705> bar",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo #general bar",
source
).and_return(message)
subject.handle
end
end
context "changes <#C2147483705|genral> links to #general" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo <#C2147483705|general> bar",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo general bar",
source
).and_return(message)
subject.handle
end
end
context "changes <!everyone> links to @everyone" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo <!everyone> bar",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo @everyone bar",
source
).and_return(message)
subject.handle
end
end
context "changes <!channel> links to @channel" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo <!channel> bar",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo @channel bar",
source
).and_return(message)
subject.handle
end
end
context "changes <!group> links to @group" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo <!group> bar",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo @group bar",
source
).and_return(message)
subject.handle
end
end
context "removes remove formatting around <http> links" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo <http://www.example.com> bar",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo http://www.example.com bar",
source
).and_return(message)
subject.handle
end
end
context "removes remove formatting around <http> links with a substring label" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo <http://www.example.com|www.example.com> bar",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo www.example.com bar",
source
).and_return(message)
subject.handle
end
end
context "remove formatting around <skype> links" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo <skype:echo123?call> bar",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo skype:echo123?call bar",
source
).and_return(message)
subject.handle
end
end
context "remove formatting around <http> links with a label containing entities" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo <http://www.example.com|label > & <> bar",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo label > & < (http://www.example.com) bar",
source
).and_return(message)
subject.handle
end
end
context "remove formatting around around <mailto> links" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo <mailto:[email protected]> bar",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo [email protected] bar",
source
).and_return(message)
subject.handle
end
end
context "remove formatting around <mailto> links with an email label" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo <mailto:[email protected]|[email protected]> bar",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo [email protected] bar",
source
).and_return(message)
subject.handle
end
end
context "change multiple links at once" do
let(:data) do
{
"type" => "message",
"channel" => "C2147483705",
"text" => "foo <@U123|label> bar <#C2147483705> <!channel> <http://www.example.com|label>",
}
end
it "removes formatting" do
expect(Lita::Message).to receive(:new).with(
robot,
"foo label bar #general @channel label (http://www.example.com)",
source
).and_return(message)
subject.handle
end
end
end
end
context "with a message with an unsupported subtype" do
let(:data) do
{
"type" => "message",
"subtype" => "???"
}
end
it "does not dispatch the message to Lita" do
expect(robot).not_to receive(:receive)
subject.handle
end
end
context "with a message from the robot itself" do
let(:data) do
{
"type" => "message",
"subtype" => "bot_message"
}
end
let(:user) { instance_double('Lita::User', id: 12345) }
before do
# TODO: This probably shouldn't be tested with stubs.
allow(Lita::User).to receive(:find_by_id).and_return(user)
allow(Lita::User).to receive(:find_by_name).and_return(user)
end
it "does not dispatch the message to Lita" do
expect(robot).not_to receive(:receive)
subject.handle
end
end
context "with a team join message" do
# let(:bobby) { Lita::Adapters::Slack::SlackUser.new('U023BECGF', 'bobby', real_name) }
let(:data) do
{
"type" => "team_join",
"user" => {
"id" => "U023BECGF",
"name" => "bobby",
"real_name" => "Bobby Tables"
}
}
end
it "creates the new user" do
expect(
Lita::Adapters::Slack::UserCreator
).to receive(:create_user) do |slack_user, robot, robot_id|
expect(slack_user.name).to eq("bobby")
end
subject.handle
end
end
context "with a bot added message" do
let(:data) do
{
"type" => "bot_added",
"bot" => {
"id" => "U01234567",
"name" => "foobot"
}
}
end
it "creates a new user for the bot" do
expect(
Lita::Adapters::Slack::UserCreator
).to receive(:create_user) do |slack_user, robot, robot_id|
expect(slack_user.name).to eq("foobot")
end
subject.handle
end
end
%w(channel_created channel_rename group_rename).each do |type|
context "with a #{type} message" do
before { allow(robot).to receive(:trigger) }
let(:data) do
{
"type" => type,
"channel" => {
"id" => "C01234567890",
"name" => "mychannel",
}
}
end
it "creates a new room for the channel" do
subject.handle
expect(Lita::Room.find_by_name("mychannel").id).to eq('C01234567890')
end
end
end
context "with an error message" do
let(:data) do
{
"type" => "error",
"error" => {
"code" => 2,
"msg" => "message text is missing"
}
}
end
it "logs the error" do
expect(Lita.logger).to receive(:error).with(
"Error with code 2 received from Slack: message text is missing"
)
subject.handle
end
end
context "with an unknown message" do
let(:data) { { "type" => "???" } }
it "logs the type" do
expect(Lita.logger).to receive(:debug).with(
"??? event received from Slack and will be ignored."
)
subject.handle
end
end
end
end
| {
"content_hash": "974bd08db2af5f0e761296273e571ea2",
"timestamp": "",
"source": "github",
"line_count": 598,
"max_line_length": 120,
"avg_line_length": 30.47826086956522,
"alnum_prop": 0.4214857895314386,
"repo_name": "ajaleelp/lita-slack",
"id": "1be64d4e89767dd7b5aee08c90e1ff329c491d15",
"size": "18226",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/lita/adapters/slack/message_handler_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "68932"
}
],
"symlink_target": ""
} |
#pragma checksum "C:\APPA\Projectos\Proprios\WindowsPhone\WorkOrganizer\WorkOrganizer\WorkOrganizer\WorkOrganizer.Windows\HousesManagementPage.xaml" "{406ea660-64cf-4c82-b6f0-42d48172a799}" "31902D799263541804A96283E3B24A05"
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace WorkOrganizer
{
partial class AuxiliarManagementPage : global::Windows.UI.Xaml.Controls.Page
{
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 4.0.0.0")]
private global::Windows.UI.Xaml.Controls.Grid LayoutRoot;
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 4.0.0.0")]
private global::Windows.UI.Xaml.Controls.TextBlock WorkOrganizer;
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 4.0.0.0")]
private global::Windows.UI.Xaml.Controls.Button ButtonGoBack;
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 4.0.0.0")]
private global::Windows.UI.Xaml.Controls.TextBlock TitleHouses;
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 4.0.0.0")]
private global::Windows.UI.Xaml.Controls.Button ButtonAddHouse;
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 4.0.0.0")]
private bool _contentLoaded;
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.Windows.UI.Xaml.Build.Tasks"," 4.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public void InitializeComponent()
{
if (_contentLoaded)
return;
_contentLoaded = true;
global::Windows.UI.Xaml.Application.LoadComponent(this, new global::System.Uri("ms-appx:///HousesManagementPage.xaml"), global::Windows.UI.Xaml.Controls.Primitives.ComponentResourceLocation.Application);
LayoutRoot = (global::Windows.UI.Xaml.Controls.Grid)this.FindName("LayoutRoot");
WorkOrganizer = (global::Windows.UI.Xaml.Controls.TextBlock)this.FindName("WorkOrganizer");
ButtonGoBack = (global::Windows.UI.Xaml.Controls.Button)this.FindName("ButtonGoBack");
TitleHouses = (global::Windows.UI.Xaml.Controls.TextBlock)this.FindName("TitleHouses");
ButtonAddHouse = (global::Windows.UI.Xaml.Controls.Button)this.FindName("ButtonAddHouse");
}
}
}
| {
"content_hash": "196036b600795a3209de2f0a4923dfd8",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 224,
"avg_line_length": 57.3,
"alnum_prop": 0.6736474694589878,
"repo_name": "appa-programming/WorkOrganizer",
"id": "bcf2f1856dad05ea5206227be4dc6f4670a78412",
"size": "2867",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "WorkOrganizer/WorkOrganizer/WorkOrganizer.Windows/obj/Debug/HousesManagementPage.g.i.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "471474"
},
{
"name": "HTML",
"bytes": "38642"
}
],
"symlink_target": ""
} |
package io.druid.query.extraction;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets;
import org.junit.Assert;
import org.junit.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.Map;
public class MapLookupExtractorTest
{
private final Map lookupMap = ImmutableMap.of("foo", "bar", "null", "", "empty String", "", "", "empty_string");
private final MapLookupExtractor fn = new MapLookupExtractor(lookupMap, false);
@Test
public void testUnApply()
{
Assert.assertEquals(Arrays.asList("foo"), fn.unapply("bar"));
Assert.assertEquals(Sets.newHashSet("null", "empty String"), Sets.newHashSet(fn.unapply("")));
Assert.assertEquals("Null value should be equal to empty string",
Sets.newHashSet("null", "empty String"),
Sets.newHashSet(fn.unapply((String) null)));
Assert.assertEquals(Sets.newHashSet(""), Sets.newHashSet(fn.unapply("empty_string")));
Assert.assertEquals("not existing value returns empty list", Collections.EMPTY_LIST, fn.unapply("not There"));
}
@Test
public void testGetMap()
{
Assert.assertEquals(lookupMap, fn.getMap());
}
@Test
public void testApply()
{
Assert.assertEquals("bar", fn.apply("foo"));
}
@Test
public void testGetCacheKey()
{
final MapLookupExtractor fn2 = new MapLookupExtractor(ImmutableMap.copyOf(lookupMap), false);
Assert.assertArrayEquals(fn.getCacheKey(), fn2.getCacheKey());
final MapLookupExtractor fn3 = new MapLookupExtractor(ImmutableMap.of("foo2", "bar"), false);
Assert.assertFalse(Arrays.equals(fn.getCacheKey(), fn3.getCacheKey()));
final MapLookupExtractor fn4 = new MapLookupExtractor(ImmutableMap.of("foo", "bar2"), false);
Assert.assertFalse(Arrays.equals(fn.getCacheKey(), fn4.getCacheKey()));
}
@Test
public void testEquals()
{
final MapLookupExtractor fn2 = new MapLookupExtractor(ImmutableMap.copyOf(lookupMap), false);
Assert.assertEquals(fn, fn2);
final MapLookupExtractor fn3 = new MapLookupExtractor(ImmutableMap.of("foo2", "bar"), false);
Assert.assertNotEquals(fn, fn3);
final MapLookupExtractor fn4 = new MapLookupExtractor(ImmutableMap.of("foo", "bar2"), false);
Assert.assertNotEquals(fn, fn4);
}
@Test
public void testHashCode()
{
final MapLookupExtractor fn2 = new MapLookupExtractor(ImmutableMap.copyOf(lookupMap), false);
Assert.assertEquals(fn.hashCode(), fn2.hashCode());
final MapLookupExtractor fn3 = new MapLookupExtractor(ImmutableMap.of("foo2", "bar"), false);
Assert.assertNotEquals(fn.hashCode(), fn3.hashCode());
final MapLookupExtractor fn4 = new MapLookupExtractor(ImmutableMap.of("foo", "bar2"), false);
Assert.assertNotEquals(fn.hashCode(), fn4.hashCode());
}
}
| {
"content_hash": "bf2c259b2179406a192bbf37712cef08",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 114,
"avg_line_length": 36.58441558441559,
"alnum_prop": 0.7121050763223287,
"repo_name": "taochaoqiang/druid",
"id": "870812dcac4b3f22fdce77c61d894d381b12d4dd",
"size": "3622",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "processing/src/test/java/io/druid/query/extraction/MapLookupExtractorTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1406"
},
{
"name": "CSS",
"bytes": "11623"
},
{
"name": "HTML",
"bytes": "26739"
},
{
"name": "Java",
"bytes": "15694963"
},
{
"name": "JavaScript",
"bytes": "295150"
},
{
"name": "Makefile",
"bytes": "659"
},
{
"name": "PostScript",
"bytes": "5"
},
{
"name": "Protocol Buffer",
"bytes": "552"
},
{
"name": "R",
"bytes": "17002"
},
{
"name": "Roff",
"bytes": "3617"
},
{
"name": "Shell",
"bytes": "3997"
},
{
"name": "TeX",
"bytes": "399444"
},
{
"name": "Thrift",
"bytes": "199"
}
],
"symlink_target": ""
} |
package org.gobiiproject.gobiiclient.dtorequests.dbops.crud;
import org.apache.commons.lang.StringUtils;
import org.gobiiproject.gobiiapimodel.hateos.Link;
import org.gobiiproject.gobiiapimodel.hateos.LinkCollection;
import org.gobiiproject.gobiiapimodel.payload.PayloadEnvelope;
import org.gobiiproject.gobiiapimodel.restresources.RestUri;
import org.gobiiproject.gobiiapimodel.types.ServiceRequestId;
import org.gobiiproject.gobiiclient.core.common.Authenticator;
import org.gobiiproject.gobiiclient.core.common.ClientContext;
import org.gobiiproject.gobiiclient.core.gobii.GobiiEnvelopeRestResource;
import org.gobiiproject.gobiiclient.dtorequests.Helpers.*;
import org.gobiiproject.gobiimodel.headerlesscontainer.AnalysisDTO;
import org.gobiiproject.gobiimodel.headerlesscontainer.NameIdDTO;
import org.gobiiproject.gobiimodel.types.GobiiEntityNameType;
import org.gobiiproject.gobiimodel.types.GobiiFilterType;
import org.gobiiproject.gobiimodel.types.GobiiProcessType;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
public class DtoCrudRequestAnalysisTest implements DtoCrudRequestTest {
@BeforeClass
public static void setUpClass() throws Exception {
Assert.assertTrue(Authenticator.authenticate());
}
@AfterClass
public static void tearDownUpClass() throws Exception {
Assert.assertTrue(Authenticator.deAuthenticate());
}
@Test
@Override
public void get() throws Exception {
RestUri restUriAnalysis = ClientContext.getInstance(null,false)
.getUriFactory()
.resourceColl(ServiceRequestId.URL_ANALYSIS);
GobiiEnvelopeRestResource<AnalysisDTO> gobiiEnvelopeRestResource = new GobiiEnvelopeRestResource<>(restUriAnalysis);
PayloadEnvelope<AnalysisDTO> resultEnvelope = gobiiEnvelopeRestResource.get(AnalysisDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
List<AnalysisDTO> analysisDTOList = resultEnvelope.getPayload().getData();
Assert.assertNotNull(analysisDTOList);
Assert.assertTrue(analysisDTOList.size() > 0);
Assert.assertNotNull(analysisDTOList.get(0).getAnalysisName());
// use an arbitrary analysis id
Integer analysisId = analysisDTOList.get(0).getAnalysisId();
RestUri restUriAnalysisForGetById = ClientContext.getInstance(null, false)
.getUriFactory()
.resourceByUriIdParam(ServiceRequestId.URL_ANALYSIS);
restUriAnalysisForGetById.setParamValue("id", analysisId.toString());
GobiiEnvelopeRestResource<AnalysisDTO> gobiiEnvelopeRestResourceForGetById = new GobiiEnvelopeRestResource<>(restUriAnalysisForGetById);
PayloadEnvelope<AnalysisDTO> resultEnvelopeForGetById = gobiiEnvelopeRestResourceForGetById
.get(AnalysisDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
AnalysisDTO analysisDTO = resultEnvelopeForGetById.getPayload().getData().get(0);
Assert.assertTrue(analysisDTO.getAnalysisId() > 0);
Assert.assertNotNull(analysisDTO.getAnalysisName());
}
@Override
public void testEmptyResult() throws Exception {
DtoRestRequestUtils<AnalysisDTO> dtoDtoRestRequestUtils = new DtoRestRequestUtils<>(AnalysisDTO.class,
ServiceRequestId.URL_ANALYSIS);
Integer maxId = dtoDtoRestRequestUtils.getMaxPkVal();
Integer nonExistentID = maxId + 1;
PayloadEnvelope<AnalysisDTO> resultEnvelope = dtoDtoRestRequestUtils.getResponseEnvelopeForEntityId(nonExistentID.toString());
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
Assert.assertNotNull(resultEnvelope.getPayload());
Assert.assertNotNull(resultEnvelope.getPayload().getData());
Assert.assertTrue(resultEnvelope.getPayload().getData().size() == 0);
}
@Test
@Override
public void create() throws Exception {
RestUri namesUri = ClientContext.getInstance(null, false).getUriFactory().nameIdListByQueryParams();
namesUri.setParamValue("entity", GobiiEntityNameType.CVTERMS.toString().toLowerCase());
namesUri.setParamValue("filterType", StringUtils.capitalize(GobiiFilterType.BYTYPENAME.toString()));
namesUri.setParamValue("filterValue", "analysis_type");
GobiiEnvelopeRestResource<NameIdDTO> gobiiEnvelopeRestResourceForAnalysisTerms = new GobiiEnvelopeRestResource<>(namesUri);
PayloadEnvelope<NameIdDTO> resultEnvelope = gobiiEnvelopeRestResourceForAnalysisTerms
.get(NameIdDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
List<NameIdDTO> analysisTypes = resultEnvelope.getPayload().getData();
List<NameIdDTO> analysisProperTerms = new ArrayList<>(analysisTypes);
EntityParamValues entityParamValues = TestDtoFactory
.makeConstrainedEntityParams(analysisProperTerms, 1);
AnalysisDTO newAnalysisDto = TestDtoFactory
.makePopulatedAnalysisDTO(GobiiProcessType.CREATE, 1, entityParamValues);
PayloadEnvelope<AnalysisDTO> payloadEnvelope = new PayloadEnvelope<>(newAnalysisDto, GobiiProcessType.CREATE);
GobiiEnvelopeRestResource<AnalysisDTO> gobiiEnvelopeRestResource = new GobiiEnvelopeRestResource<>(ClientContext.getInstance(null, false)
.getUriFactory()
.resourceColl(ServiceRequestId.URL_ANALYSIS));
PayloadEnvelope<AnalysisDTO> analysisDTOResponseEnvelope = gobiiEnvelopeRestResource.post(AnalysisDTO.class,
payloadEnvelope);
AnalysisDTO analysisDTOResponse = analysisDTOResponseEnvelope.getPayload().getData().get(0);
Assert.assertNotEquals(null, analysisDTOResponse);
Assert.assertTrue(analysisDTOResponse.getAnalysisId() > 0);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(analysisDTOResponseEnvelope.getHeader()));
GlobalPkValues.getInstance().addPkVal(GobiiEntityNameType.ANALYSES, analysisDTOResponse.getAnalysisId());
RestUri restUriAnalysisForGetById = ClientContext.getInstance(null, false)
.getUriFactory()
.resourceByUriIdParam(ServiceRequestId.URL_ANALYSIS);
restUriAnalysisForGetById.setParamValue("id", analysisDTOResponse.getAnalysisId().toString());
GobiiEnvelopeRestResource<AnalysisDTO> gobiiEnvelopeRestResouceForGetById = new GobiiEnvelopeRestResource<>(restUriAnalysisForGetById);
PayloadEnvelope<AnalysisDTO> resultEnvelopeForGetById = gobiiEnvelopeRestResouceForGetById
.get(AnalysisDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelopeForGetById.getHeader()));
AnalysisDTO analysisDTOResponseForParams = resultEnvelopeForGetById.getPayload().getData().get(0);
GlobalPkValues.getInstance().addPkVal(GobiiEntityNameType.ANALYSES, analysisDTOResponse.getAnalysisId());
} // testAnalysisCreate
@Test
@Override
public void update() throws Exception {
RestUri namesUri = ClientContext.getInstance(null, false).getUriFactory().nameIdListByQueryParams();
namesUri.setParamValue("entity", GobiiEntityNameType.CVTERMS.toString().toLowerCase());
namesUri.setParamValue("filterType", StringUtils.capitalize(GobiiFilterType.BYTYPENAME.toString()));
namesUri.setParamValue("filterValue", "analysis_type");
GobiiEnvelopeRestResource<NameIdDTO> gobiiEnvelopeRestResourceForAnalysisTerms = new GobiiEnvelopeRestResource<>(namesUri);
PayloadEnvelope<NameIdDTO> resultEnvelope = gobiiEnvelopeRestResourceForAnalysisTerms
.get(NameIdDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
List<NameIdDTO> analysisTypes = resultEnvelope.getPayload().getData();
List<NameIdDTO> analysisProperTerms = new ArrayList<>(analysisTypes);
EntityParamValues entityParamValues = TestDtoFactory
.makeConstrainedEntityParams(analysisProperTerms, 1);
// create a new analysis for our test
AnalysisDTO newAnalysisDto = TestDtoFactory
.makePopulatedAnalysisDTO(GobiiProcessType.CREATE, 1, entityParamValues);
PayloadEnvelope<AnalysisDTO> payloadEnvelope = new PayloadEnvelope<>(newAnalysisDto, GobiiProcessType.CREATE);
GobiiEnvelopeRestResource<AnalysisDTO> gobiiEnvelopeRestResource = new GobiiEnvelopeRestResource<>(ClientContext.getInstance(null, false)
.getUriFactory()
.resourceColl(ServiceRequestId.URL_ANALYSIS));
PayloadEnvelope<AnalysisDTO> analysisDTOResponseEnvelope = gobiiEnvelopeRestResource.post(AnalysisDTO.class,
payloadEnvelope);
AnalysisDTO newAnalysisDTOResponse = analysisDTOResponseEnvelope.getPayload().getData().get(0);
// re-retrieve the analysis we just created so we start with a fresh READ mode dto
RestUri restUriAnalysisForGetById = ClientContext.getInstance(null, false)
.getUriFactory()
.resourceByUriIdParam(ServiceRequestId.URL_ANALYSIS);
restUriAnalysisForGetById.setParamValue("id", newAnalysisDTOResponse.getAnalysisId().toString());
GobiiEnvelopeRestResource<AnalysisDTO> gobiiEnvelopeRestResourceForGetById = new GobiiEnvelopeRestResource<>(restUriAnalysisForGetById);
PayloadEnvelope<AnalysisDTO> resultEnvelopeForGetByID = gobiiEnvelopeRestResourceForGetById
.get(AnalysisDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelopeForGetByID.getHeader()));
AnalysisDTO analysisDTOReceived = resultEnvelopeForGetByID.getPayload().getData().get(0);
String newName = UUID.randomUUID().toString();
analysisDTOReceived.setAnalysisName(newName);
gobiiEnvelopeRestResourceForGetById.setParamValue("id", analysisDTOReceived.getAnalysisId().toString());
PayloadEnvelope<AnalysisDTO> analysisDTOResponseEnvelopeUpdate = gobiiEnvelopeRestResourceForGetById.put(AnalysisDTO.class,
new PayloadEnvelope<>(analysisDTOReceived, GobiiProcessType.UPDATE));
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(analysisDTOResponseEnvelopeUpdate.getHeader()));
AnalysisDTO analysisDTORequest = analysisDTOResponseEnvelopeUpdate.getPayload().getData().get(0);
restUriAnalysisForGetById.setParamValue("id", analysisDTORequest.getAnalysisId().toString());
resultEnvelopeForGetByID = gobiiEnvelopeRestResourceForGetById
.get(AnalysisDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelopeForGetByID.getHeader()));
AnalysisDTO dtoRequestAnalysisReRetrieved = resultEnvelopeForGetByID.getPayload().getData().get(0);
Assert.assertTrue(dtoRequestAnalysisReRetrieved.getAnalysisName().equals(newName));
}
@Override
public void getList() throws Exception {
RestUri restUriAnalysis = ClientContext.getInstance(null, false)
.getUriFactory()
.resourceColl(ServiceRequestId.URL_ANALYSIS);
GobiiEnvelopeRestResource<AnalysisDTO> gobiiEnvelopeRestResource = new GobiiEnvelopeRestResource<>(restUriAnalysis);
PayloadEnvelope<AnalysisDTO> resultEnvelope = gobiiEnvelopeRestResource
.get(AnalysisDTO.class);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelope.getHeader()));
List<AnalysisDTO> analysisDTOList = resultEnvelope.getPayload().getData();
Assert.assertNotNull(analysisDTOList);
Assert.assertTrue(analysisDTOList.size() > 0);
Assert.assertNotNull(analysisDTOList.get(0).getAnalysisName());
LinkCollection linkCollection = resultEnvelope.getPayload().getLinkCollection();
Assert.assertTrue(linkCollection.getLinksPerDataItem().size() == analysisDTOList.size());
List<Integer> itemsToTest = new ArrayList<>();
if (analysisDTOList.size() > 50) {
itemsToTest = TestUtils.makeListOfIntegersInRange(10, analysisDTOList.size());
} else {
for (int idx = 0; idx < analysisDTOList.size(); idx++) {
itemsToTest.add(idx);
}
}
for (Integer currentIdx : itemsToTest) {
AnalysisDTO currentAnalysisDto = analysisDTOList.get(currentIdx);
Link currentLink = linkCollection.getLinksPerDataItem().get(currentIdx);
RestUri restUriAnalysisForGetById = ClientContext.getInstance(null, false)
.getUriFactory()
.RestUriFromUri(currentLink.getHref());
GobiiEnvelopeRestResource<AnalysisDTO> gobiiEnvelopeRestResourceForGetById = new GobiiEnvelopeRestResource<>(restUriAnalysisForGetById);
PayloadEnvelope<AnalysisDTO> resultEnvelopeForGetById = gobiiEnvelopeRestResourceForGetById
.get(AnalysisDTO.class);
Assert.assertNotNull(resultEnvelopeForGetById);
Assert.assertFalse(TestUtils.checkAndPrintHeaderMessages(resultEnvelopeForGetById.getHeader()));
AnalysisDTO analysisDTOFromLink = resultEnvelopeForGetById.getPayload().getData().get(0);
Assert.assertTrue(currentAnalysisDto.getAnalysisName().equals(analysisDTOFromLink.getAnalysisName()));
Assert.assertTrue(currentAnalysisDto.getAnalysisId().equals(analysisDTOFromLink.getAnalysisId()));
}
}
}
| {
"content_hash": "3b45fa913e07943e40a228d7d6f407f6",
"timestamp": "",
"source": "github",
"line_count": 264,
"max_line_length": 148,
"avg_line_length": 52.13636363636363,
"alnum_prop": 0.7467306015693113,
"repo_name": "gobiiproject/GOBii-System",
"id": "cd0b5f47f72b80268263f23d6b3bb0c1b2464729",
"size": "14003",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "gobiiproject/gobii-client/src/test/java/org/gobiiproject/gobiiclient/dtorequests/dbops/crud/DtoCrudRequestAnalysisTest.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "1734"
},
{
"name": "Batchfile",
"bytes": "17658"
},
{
"name": "C",
"bytes": "123419"
},
{
"name": "CSS",
"bytes": "1583"
},
{
"name": "HTML",
"bytes": "392838"
},
{
"name": "Java",
"bytes": "3692282"
},
{
"name": "JavaScript",
"bytes": "8090"
},
{
"name": "Makefile",
"bytes": "17937"
},
{
"name": "PLpgSQL",
"bytes": "615167"
},
{
"name": "Perl",
"bytes": "30299"
},
{
"name": "Python",
"bytes": "108776"
},
{
"name": "SQLPL",
"bytes": "5474"
},
{
"name": "Shell",
"bytes": "6710"
},
{
"name": "TypeScript",
"bytes": "293150"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en-us">
<head>
<title> Development · Lee's Blog </title>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1">
<meta name="generator" content="Hugo 0.40.1" />
<script src="https://code.jquery.com/jquery-3.1.1.min.js" integrity="sha256-hVVnYaiADRTO2PzUGmuLJr8BLUSjGIZsDYGmIJLv2b8=" crossorigin="anonymous"></script>
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css" integrity="sha384-BVYiiSIFeK1dGmJRAkycuHAHRg32OmUcww7on3RYdg4Va+PmSTsz/K68vbdEjh4u" crossorigin="anonymous">
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js" integrity="sha384-Tc5IQib027qvyjSMfHjOMaLkfuWVxZxUPnCJA7l2mCWNIpG9mGCD8wGNIcPD7Txa" crossorigin="anonymous"></script>
<link href="https://maxcdn.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css" rel="stylesheet" integrity="sha384-wvfXpqpZZVQGK6TAh5PVlGOfQNHSoD2xbE+QkPxCAFlNEevoEH3Sl0sibVcOQVnN" crossorigin="anonymous">
<link rel="stylesheet" href="https://phuclb1.github.io/css/nix.css">
<link href="https://fonts.googleapis.com/css?family=Inconsolata%7COpen+Sans%7CConcert+One" rel="stylesheet">
</head>
<body>
<header>
<nav class="navbar navbar-default navbar-fixed-top navbar-inverse font-header">
<div class="container-fluid">
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar-collapse-1" aria-expanded="false">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" id="green-terminal" href=https://phuclb1.github.io/>lee@blog ~ $</a>
</div>
<div class="collapse navbar-collapse" id="navbar-collapse-1">
<ul class="nav navbar-nav navbar-right">
<li>
<a href="https://phuclb1.github.io/">/home/lee</a>
</li>
<li class="dropdown">
<a href="/about">~/about</a>
</li>
<li class="dropdown">
<a href="/post">~/post</a>
</li>
</ul>
</div>
</div>
</nav>
</header>
<div class="container wrapper">
<div class="row">
<div class="col-xs-12 text-center">
<h1 id=>Development</h1>
</div>
</div>
<ul id="post-list">
<li>
<div class="post-list-item">
<div class="post-header">
<h4 class="post-link"><a href="https://phuclb1.github.io/post/goisforlovers/">(Hu)go Template Primer</a></h4>
<h4 class="post-date">Apr 2, 2014</h4>
</div>
<div class="post-summary"><p>Hugo uses the excellent go html/template library for its template engine. It is an extremely lightweight engine that provides a very small amount of logic. In our experience that it is just the right amount of logic to be able to create a good static website. If you have used other template systems from different languages or frameworks you will find a lot of similarities in go templates.
This document is a brief primer on using go templates.</p></div>
<div class="post-list-footer text-center">
<a href="https://phuclb1.github.io/post/goisforlovers/">Read More</a>
</div>
</div>
</li>
<li>
<div class="post-list-item">
<div class="post-header">
<h4 class="post-link"><a href="https://phuclb1.github.io/post/hugoisforlovers/">Getting Started with Hugo</a></h4>
<h4 class="post-date">Apr 2, 2014</h4>
</div>
<div class="post-summary"><p>Step 1. Install Hugo Goto hugo releases and download the appropriate version for your os and architecture.
Save it somewhere specific as we will be using it in the next step.
More complete instructions are available at installing hugo
Step 2. Build the Docs Hugo has its own example site which happens to also be the documentation site you are reading right now.
Follow the following steps:
Clone the hugo repository Go into the repo Run hugo in server mode and build the docs Open your browser to http://localhost:1313 Corresponding pseudo commands:</p></div>
<div class="post-list-footer text-center">
<a href="https://phuclb1.github.io/post/hugoisforlovers/">Read More</a>
</div>
</div>
</li>
</ul>
<div class="push"></div>
</div>
<footer class="footer text-center">
<p>Copyright © 2018 Mr Lee -
<span class="credit">
Powered by
<a target="_blank" href="https://gohugo.io">Hugo</a>
and
<a target="_blank" href="https://github.com/LordMathis/hugo-theme-nix/">Nix</a> theme.
</span>
</p>
</footer>
</body>
</html>
| {
"content_hash": "c346a5907183d825bd766e22e97c471e",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 426,
"avg_line_length": 36.17164179104478,
"alnum_prop": 0.6655663296884671,
"repo_name": "phuclb1/phuclb1.github.io",
"id": "3b45ed9d3d5e889d5b605b847b7216b791ec0c05",
"size": "4847",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tags/development/index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1917"
},
{
"name": "HTML",
"bytes": "111023"
},
{
"name": "JavaScript",
"bytes": "4332"
}
],
"symlink_target": ""
} |
<?php declare(strict_types=1);
namespace Monolog\Formatter;
use Monolog\Logger;
class LogstashFormatterTest extends \PHPUnit\Framework\TestCase
{
public function tearDown()
{
\PHPUnit\Framework\Error\Warning::$enabled = true;
return parent::tearDown();
}
/**
* @covers Monolog\Formatter\LogstashFormatter::format
*/
public function testDefaultFormatterV1()
{
$formatter = new LogstashFormatter('test', 'hostname');
$record = [
'level' => Logger::ERROR,
'level_name' => 'ERROR',
'channel' => 'meh',
'context' => [],
'datetime' => new \DateTimeImmutable("@0"),
'extra' => [],
'message' => 'log',
];
$message = json_decode($formatter->format($record), true);
$this->assertEquals("1970-01-01T00:00:00.000000+00:00", $message['@timestamp']);
$this->assertEquals("1", $message['@version']);
$this->assertEquals('log', $message['message']);
$this->assertEquals('meh', $message['channel']);
$this->assertEquals('ERROR', $message['level']);
$this->assertEquals(Logger::ERROR, $message['monolog_level']);
$this->assertEquals('test', $message['type']);
$this->assertEquals('hostname', $message['host']);
$formatter = new LogstashFormatter('mysystem');
$message = json_decode($formatter->format($record), true);
$this->assertEquals('mysystem', $message['type']);
}
/**
* @covers Monolog\Formatter\LogstashFormatter::format
*/
public function testFormatWithFileAndLineV1()
{
$formatter = new LogstashFormatter('test');
$record = [
'level' => Logger::ERROR,
'level_name' => 'ERROR',
'channel' => 'meh',
'context' => ['from' => 'logger'],
'datetime' => new \DateTimeImmutable("@0"),
'extra' => ['file' => 'test', 'line' => 14],
'message' => 'log',
];
$message = json_decode($formatter->format($record), true);
$this->assertEquals('test', $message['extra']['file']);
$this->assertEquals(14, $message['extra']['line']);
}
/**
* @covers Monolog\Formatter\LogstashFormatter::format
*/
public function testFormatWithContextV1()
{
$formatter = new LogstashFormatter('test');
$record = [
'level' => Logger::ERROR,
'level_name' => 'ERROR',
'channel' => 'meh',
'context' => ['from' => 'logger'],
'datetime' => new \DateTimeImmutable("@0"),
'extra' => ['key' => 'pair'],
'message' => 'log',
];
$message = json_decode($formatter->format($record), true);
$this->assertArrayHasKey('context', $message);
$this->assertArrayHasKey('from', $message['context']);
$this->assertEquals('logger', $message['context']['from']);
// Test with extraPrefix
$formatter = new LogstashFormatter('test', null, 'extra', 'CTX');
$message = json_decode($formatter->format($record), true);
$this->assertArrayHasKey('CTX', $message);
$this->assertArrayHasKey('from', $message['CTX']);
$this->assertEquals('logger', $message['CTX']['from']);
}
/**
* @covers Monolog\Formatter\LogstashFormatter::format
*/
public function testFormatWithExtraV1()
{
$formatter = new LogstashFormatter('test');
$record = [
'level' => Logger::ERROR,
'level_name' => 'ERROR',
'channel' => 'meh',
'context' => ['from' => 'logger'],
'datetime' => new \DateTimeImmutable("@0"),
'extra' => ['key' => 'pair'],
'message' => 'log',
];
$message = json_decode($formatter->format($record), true);
$this->assertArrayHasKey('extra', $message);
$this->assertArrayHasKey('key', $message['extra']);
$this->assertEquals('pair', $message['extra']['key']);
// Test with extraPrefix
$formatter = new LogstashFormatter('test', null, 'EXTRA');
$message = json_decode($formatter->format($record), true);
$this->assertArrayHasKey('EXTRA', $message);
$this->assertArrayHasKey('key', $message['EXTRA']);
$this->assertEquals('pair', $message['EXTRA']['key']);
}
public function testFormatWithApplicationNameV1()
{
$formatter = new LogstashFormatter('app', 'test');
$record = [
'level' => Logger::ERROR,
'level_name' => 'ERROR',
'channel' => 'meh',
'context' => ['from' => 'logger'],
'datetime' => new \DateTimeImmutable("@0"),
'extra' => ['key' => 'pair'],
'message' => 'log',
];
$message = json_decode($formatter->format($record), true);
$this->assertArrayHasKey('type', $message);
$this->assertEquals('app', $message['type']);
}
public function testFormatWithLatin9Data()
{
$formatter = new LogstashFormatter('test', 'hostname');
$record = [
'level' => Logger::ERROR,
'level_name' => 'ERROR',
'channel' => '¯\_(ツ)_/¯',
'context' => [],
'datetime' => new \DateTimeImmutable("@0"),
'extra' => [
'user_agent' => "\xD6WN; FBCR/OrangeEspa\xF1a; Vers\xE3o/4.0; F\xE4rist",
],
'message' => 'log',
];
$message = json_decode($formatter->format($record), true);
$this->assertEquals("1970-01-01T00:00:00.000000+00:00", $message['@timestamp']);
$this->assertEquals('log', $message['message']);
$this->assertEquals('¯\_(ツ)_/¯', $message['channel']);
$this->assertEquals('ERROR', $message['level']);
$this->assertEquals('test', $message['type']);
$this->assertEquals('hostname', $message['host']);
$this->assertEquals('ÖWN; FBCR/OrangeEspaña; Versão/4.0; Färist', $message['extra']['user_agent']);
}
}
| {
"content_hash": "b9b96ffa336c571d7138e4b400f5dce2",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 107,
"avg_line_length": 34.00555555555555,
"alnum_prop": 0.5317758536186897,
"repo_name": "localheinz/monolog",
"id": "5d0374bc9b028a246887f4fbcc46fd7951251d32",
"size": "6360",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/Monolog/Formatter/LogstashFormatterTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "744986"
}
],
"symlink_target": ""
} |
package br.com.six2six.fixturefactory.model;
import java.io.Serializable;
public class Address implements Serializable {
private static final long serialVersionUID = -157590924427802878L;
private Long id;
private String street;
private City city;
private String state;
private String country;
private String zipCode;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getStreet() {
return street;
}
public void setStreet(String street) {
this.street = street;
}
public City getCity() {
return city;
}
public void setCity(City city) {
this.city = city;
}
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public String getCountry() {
return country;
}
public void setCountry(String country) {
this.country = country;
}
public String getZipCode() {
return zipCode;
}
public void setZipCode(String zipCode) {
this.zipCode = zipCode;
}
@Override
public String toString() {
return "Address [city=" + city + ", country=" + country + ", id=" + id + ", state=" + state + ", street=" + street + ", zipCode=" + zipCode + "]";
}
}
| {
"content_hash": "a37dd85c32a8350474d93d5fba0d4ea3",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 148,
"avg_line_length": 21.736842105263158,
"alnum_prop": 0.6529459241323649,
"repo_name": "six2six/fixture-factory",
"id": "ccd964350a2a142f193c19507ddcb0a25a4102d7",
"size": "1239",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/test/java/br/com/six2six/fixturefactory/model/Address.java",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "179567"
}
],
"symlink_target": ""
} |
/** @file
* @brief I2S bus (SSP) driver for Intel CAVS.
*
* Limitations:
* - DMA is used in simple single block transfer mode (with linked list
* enabled) and "interrupt on full transfer completion" mode.
*/
#include <errno.h>
#include <string.h>
#include <misc/__assert.h>
#include <kernel.h>
#include <device.h>
#include <init.h>
#include <dma.h>
#include <i2s.h>
#include <soc.h>
#include "i2s_cavs.h"
#define SYS_LOG_DOMAIN "dev/i2s_cavs"
#define SYS_LOG_LEVEL CONFIG_SYS_LOG_I2S_LEVEL
#include <logging/sys_log.h>
#ifdef CONFIG_DCACHE_WRITEBACK
#define DCACHE_INVALIDATE(addr, size) \
{ dcache_invalidate_region(addr, size); }
#define DCACHE_CLEAN(addr, size) \
{ dcache_writeback_region(addr, size); }
#else
#define DCACHE_INVALIDATE(addr, size) \
do { } while (0)
#define DCACHE_CLEAN(addr, size) \
do { } while (0)
#endif
#define CAVS_SSP_WORD_SIZE_BITS_MIN 4
#define CAVS_SSP_WORD_SIZE_BITS_MAX 32
#define CAVS_SSP_WORD_PER_FRAME_MIN 1
#define CAVS_SSP_WORD_PER_FRAME_MAX 8
struct queue_item {
void *mem_block;
size_t size;
};
/* Minimal ring buffer implementation:
* buf - holds the pointer to Queue items
* len - Max number of Queue items that can be referenced
* head - current write index number
* tail - current read index number
*/
struct ring_buf {
struct queue_item *buf;
u16_t len;
u16_t head;
u16_t tail;
};
/* This indicates the Tx/Rx stream. Most members of the stream are
* self-explanatory except for sem.
* sem - This is initialized to CONFIG_I2S_CAVS_TX_BLOCK_COUNT. If at all
* mem_block_queue gets filled with the MAX blocks configured, this semaphore
* ensures nothing gets written into the mem_block_queue before a slot gets
* freed (which happens when a block gets read out).
*/
struct stream {
s32_t state;
struct k_sem sem;
u32_t dma_channel;
struct dma_config dma_cfg;
struct i2s_config cfg;
struct ring_buf mem_block_queue;
void *mem_block;
bool last_block;
int (*stream_start)(struct stream *,
volatile struct i2s_cavs_ssp *const, struct device *);
void (*stream_disable)(struct stream *,
volatile struct i2s_cavs_ssp *const, struct device *);
void (*queue_drop)(struct stream *);
};
struct i2s_cavs_config {
struct i2s_cavs_ssp *regs;
struct i2s_cavs_mn_div *mn_regs;
u32_t irq_id;
void (*irq_config)(void);
};
/* Device run time data */
struct i2s_cavs_dev_data {
struct device *dev_dma;
struct stream tx;
};
#define DEV_NAME(dev) ((dev)->config->name)
#define DEV_CFG(dev) \
((const struct i2s_cavs_config *const)(dev)->config->config_info)
#define DEV_DATA(dev) \
((struct i2s_cavs_dev_data *const)(dev)->driver_data)
static struct device *get_dev_from_dma_channel(u32_t dma_channel);
static void dma_tx_callback(struct device *, u32_t, int);
static void tx_stream_disable(struct stream *,
volatile struct i2s_cavs_ssp *const, struct device *);
static inline u16_t modulo_inc(u16_t val, u16_t max)
{
val++;
return (val < max) ? val : 0;
}
/*
* Get data from the queue
*/
static int queue_get(struct ring_buf *rb, u8_t mode, void **mem_block,
size_t *size)
{
unsigned int key;
key = irq_lock();
/* In case of ping-pong mode, the buffers are not freed after
* reading. They are fixed in size. Another thread will populate
* the pong buffer while the ping buffer is being read out and
* vice versa. Hence, we just need to keep reading from buffer0
* (ping buffer) followed by buffer1 (pong buffer) and the same
* cycle continues.
*
* In case of non-ping-pong modes, each buffer is freed after it
* is read. The tail pointer will keep progressing depending upon
* the reads. The head pointer will move whenever there's a write.
* If tail equals head, it would mean we have read everything there
* is and the buffer is empty.
*/
if (rb->tail == rb->head) {
if ((mode & I2S_OPT_PINGPONG) == I2S_OPT_PINGPONG) {
/* Point back to the first element */
rb->tail = 0;
} else {
/* Ring buffer is empty */
irq_unlock(key);
return -ENOMEM;
}
}
*mem_block = rb->buf[rb->tail].mem_block;
*size = rb->buf[rb->tail].size;
rb->tail = modulo_inc(rb->tail, rb->len);
irq_unlock(key);
return 0;
}
/*
* Put data in the queue
*/
static int queue_put(struct ring_buf *rb, u8_t mode, void *mem_block,
size_t size)
{
u16_t head_next;
unsigned int key;
key = irq_lock();
head_next = rb->head;
head_next = modulo_inc(head_next, rb->len);
/* In case of ping-pong mode, the below comparison incorrectly
* leads to complications as the buffers are always predefined.
* Hence excluding ping-pong mode from this comparison.
*/
if ((mode & I2S_OPT_PINGPONG) != I2S_OPT_PINGPONG) {
if (head_next == rb->tail) {
/* Ring buffer is full */
irq_unlock(key);
return -ENOMEM;
}
}
rb->buf[rb->head].mem_block = mem_block;
rb->buf[rb->head].size = size;
rb->head = head_next;
irq_unlock(key);
return 0;
}
static int start_dma(struct device *dev_dma, u32_t channel,
struct dma_config *cfg, void *src, void *dst,
u32_t blk_size)
{
int ret;
struct dma_block_config blk_cfg = {
.block_size = blk_size,
.source_address = (u32_t)src,
.dest_address = (u32_t)dst,
};
cfg->head_block = &blk_cfg;
ret = dma_config(dev_dma, channel, cfg);
if (ret < 0) {
SYS_LOG_ERR("dma_config failed: %d", ret);
return ret;
}
ret = dma_start(dev_dma, channel);
if (ret < 0) {
SYS_LOG_ERR("dma_start failed: %d", ret);
}
return ret;
}
/* This function is executed in the interrupt context */
static void dma_tx_callback(struct device *dev_dma, u32_t channel, int status)
{
struct device *dev = get_dev_from_dma_channel(channel);
const struct i2s_cavs_config *const dev_cfg = DEV_CFG(dev);
struct i2s_cavs_dev_data *const dev_data = DEV_DATA(dev);
volatile struct i2s_cavs_ssp *const ssp = dev_cfg->regs;
struct stream *strm = &dev_data->tx;
size_t mem_block_size;
int ret;
__ASSERT_NO_MSG(strm->mem_block != NULL);
if ((strm->cfg.options & I2S_OPT_PINGPONG) != I2S_OPT_PINGPONG) {
/* All block data sent */
k_mem_slab_free(strm->cfg.mem_slab, &strm->mem_block);
strm->mem_block = NULL;
}
/* Stop transmission if there was an error */
if (strm->state == I2S_STATE_ERROR) {
SYS_LOG_DBG("TX error detected");
goto tx_disable;
}
/* Stop transmission if we were requested */
if (strm->last_block) {
strm->state = I2S_STATE_READY;
goto tx_disable;
}
/* Prepare to send the next data block */
ret = queue_get(&strm->mem_block_queue, strm->cfg.options,
&strm->mem_block, &mem_block_size);
if (ret < 0) {
if (strm->state == I2S_STATE_STOPPING) {
strm->state = I2S_STATE_READY;
} else {
strm->state = I2S_STATE_ERROR;
}
goto tx_disable;
}
k_sem_give(&strm->sem);
/* Assure cache coherency before DMA read operation */
DCACHE_CLEAN(strm->mem_block, mem_block_size);
ret = start_dma(dev_data->dev_dma, strm->dma_channel, &strm->dma_cfg,
strm->mem_block, (void *)&(ssp->ssd),
mem_block_size);
if (ret < 0) {
SYS_LOG_DBG("Failed to start TX DMA transfer: %d", ret);
goto tx_disable;
}
return;
tx_disable:
tx_stream_disable(strm, ssp, dev_data->dev_dma);
}
static int i2s_cavs_configure(struct device *dev, enum i2s_dir dir,
struct i2s_config *i2s_cfg)
{
const struct i2s_cavs_config *const dev_cfg = DEV_CFG(dev);
struct i2s_cavs_dev_data *const dev_data = DEV_DATA(dev);
volatile struct i2s_cavs_ssp *const ssp = dev_cfg->regs;
volatile struct i2s_cavs_mn_div *const mn_div = dev_cfg->mn_regs;
u8_t num_words = i2s_cfg->channels;
u8_t word_size_bits = i2s_cfg->word_size;
u8_t word_size_bytes;
u32_t bit_clk_freq, mclk;
struct stream *strm;
u32_t ssc0;
u32_t ssc1;
u32_t ssc2;
u32_t ssc3;
u32_t sspsp;
u32_t sspsp2;
u32_t sstsa;
u32_t ssrsa;
u32_t ssto;
u32_t ssioc = 0;
u32_t mdiv;
u32_t i2s_m = 0;
u32_t i2s_n = 0;
u32_t frame_len = 0;
bool inverted_frame = false;
if (dir == I2S_DIR_TX) {
strm = &dev_data->tx;
} else {
SYS_LOG_ERR("TX direction must be selected");
return -EINVAL;
}
if (strm->state != I2S_STATE_NOT_READY &&
strm->state != I2S_STATE_READY) {
SYS_LOG_ERR("invalid state");
return -EINVAL;
}
if (i2s_cfg->frame_clk_freq == 0) {
strm->queue_drop(strm);
(void)memset(&strm->cfg, 0, sizeof(struct i2s_config));
strm->state = I2S_STATE_NOT_READY;
return 0;
}
if (word_size_bits < CAVS_SSP_WORD_SIZE_BITS_MIN ||
word_size_bits > CAVS_SSP_WORD_SIZE_BITS_MAX) {
SYS_LOG_ERR("Unsupported I2S word size");
return -EINVAL;
}
if (num_words < CAVS_SSP_WORD_PER_FRAME_MIN ||
num_words > CAVS_SSP_WORD_PER_FRAME_MAX) {
SYS_LOG_ERR("Unsupported words per frame number");
return -EINVAL;
}
memcpy(&strm->cfg, i2s_cfg, sizeof(struct i2s_config));
/* reset SSP settings */
/* sscr0 dynamic settings are DSS, EDSS, SCR, FRDC, ECS */
ssc0 = SSCR0_MOD | SSCR0_PSP | SSCR0_RIM | SSCR0_TIM;
/* sscr1 dynamic settings are SFRMDIR, SCLKDIR, SCFR */
ssc1 = SSCR1_TTE | SSCR1_TTELP | SSCR1_RWOT | SSCR1_TRAIL;
/* sscr2 dynamic setting is LJDFD */
ssc2 = SSCR2_SDFD | SSCR2_TURM1;
/* sscr3 dynamic settings are TFT, RFT */
ssc3 = 0;
/* sspsp dynamic settings are SCMODE, SFRMP, DMYSTRT, SFRMWDTH */
sspsp = 0;
/* sspsp2 no dynamic setting */
sspsp2 = 0x0;
/* ssto no dynamic setting */
ssto = 0x0;
/* sstsa dynamic setting is TTSA, set according to num_words */
sstsa = SSTSA_TXEN | BIT_MASK(num_words);
/* ssrsa dynamic setting is RTSA, set according to num_words */
ssrsa = SSRSA_RXEN | BIT_MASK(num_words);
if (i2s_cfg->options & I2S_OPT_BIT_CLK_SLAVE) {
/* set BCLK mode as slave */
ssc1 |= SSCR1_SCLKDIR;
} else {
/* enable BCLK output */
ssioc = SSIOC_SCOE;
}
if (i2s_cfg->options & I2S_OPT_FRAME_CLK_SLAVE) {
/* set WCLK mode as slave */
ssc1 |= SSCR1_SFRMDIR;
}
ssioc |= SSIOC_SFCR;
/* clock signal polarity */
switch (i2s_cfg->format & I2S_FMT_CLK_FORMAT_MASK) {
case I2S_FMT_CLK_NF_NB:
break;
case I2S_FMT_CLK_NF_IB:
sspsp |= SSPSP_SCMODE(2);
inverted_frame = true; /* handled later with format */
break;
case I2S_FMT_CLK_IF_NB:
break;
case I2S_FMT_CLK_IF_IB:
sspsp |= SSPSP_SCMODE(2);
inverted_frame = true; /* handled later with format */
break;
default:
SYS_LOG_ERR("Unsupported Clock format");
return -EINVAL;
}
mclk = soc_get_ref_clk_freq();
bit_clk_freq = i2s_cfg->frame_clk_freq * word_size_bits * num_words;
/* BCLK is generated from MCLK - must be divisible */
if (mclk % bit_clk_freq) {
SYS_LOG_INF("MCLK/BCLK is not an integer, using M/N divider");
/*
* Simplification: Instead of calculating lowest values of
* M and N, just set M and N as BCLK and MCLK respectively
* in 0.1KHz units
* In addition, double M so that it can be later divided by 2
* to get an approximately 50% duty cycle clock
*/
i2s_m = (bit_clk_freq << 1) / 100;
i2s_n = mclk / 100;
/* set divider value of 1 which divides the clock by 2 */
mdiv = 1;
/* Select M/N divider as the clock source */
ssc0 |= SSCR0_ECS;
} else {
mdiv = (mclk / bit_clk_freq) - 1;
}
/* divisor must be within SCR range */
if (mdiv > (SSCR0_SCR_MASK >> 8)) {
SYS_LOG_ERR("Divisor is not within SCR range");
return -EINVAL;
}
/* set the SCR divisor */
ssc0 |= SSCR0_SCR(mdiv);
/* format */
switch (i2s_cfg->format & I2S_FMT_DATA_FORMAT_MASK) {
case I2S_FMT_DATA_FORMAT_I2S:
ssc0 |= SSCR0_FRDC(i2s_cfg->channels);
/* set asserted frame length */
frame_len = word_size_bits;
/* handle frame polarity, I2S default is falling/active low */
sspsp |= SSPSP_SFRMP(!inverted_frame);
break;
case I2S_FMT_DATA_FORMAT_LEFT_JUSTIFIED:
ssc0 |= SSCR0_FRDC(i2s_cfg->channels);
/* LJDFD enable */
ssc2 &= ~SSCR2_LJDFD;
/* set asserted frame length */
frame_len = word_size_bits;
/* LEFT_J default is rising/active high, opposite of I2S */
sspsp |= SSPSP_SFRMP(inverted_frame);
break;
case I2S_FMT_DATA_FORMAT_PCM_SHORT:
case I2S_FMT_DATA_FORMAT_PCM_LONG:
default:
SYS_LOG_ERR("Unsupported I2S data format");
return -EINVAL;
}
sspsp |= SSPSP_SFRMWDTH(frame_len);
if (word_size_bits > 16) {
ssc0 |= (SSCR0_EDSS | SSCR0_DSIZE(word_size_bits - 16));
} else {
ssc0 |= SSCR0_DSIZE(word_size_bits);
}
ssp->ssc0 = ssc0;
ssp->ssc1 = ssc1;
ssp->ssc2 = ssc2;
ssp->ssc3 = ssc3;
ssp->sspsp2 = sspsp2;
ssp->sspsp = sspsp;
ssp->ssioc = ssioc;
ssp->ssto = ssto;
ssp->sstsa = sstsa;
ssp->ssrsa = ssrsa;
mn_div->mval = I2S_MNVAL(i2s_m);
mn_div->nval = I2S_MNVAL(i2s_n);
/* Set up DMA channel parameters */
word_size_bytes = (word_size_bits + 7) / 8;
strm->dma_cfg.source_data_size = word_size_bytes;
strm->dma_cfg.dest_data_size = word_size_bytes;
strm->state = I2S_STATE_READY;
return 0;
}
static int tx_stream_start(struct stream *strm,
volatile struct i2s_cavs_ssp *const ssp,
struct device *dev_dma)
{
size_t mem_block_size;
int ret;
ret = queue_get(&strm->mem_block_queue, strm->cfg.options,
&strm->mem_block, &mem_block_size);
if (ret < 0) {
return ret;
}
k_sem_give(&strm->sem);
/* Assure cache coherency before DMA read operation */
DCACHE_CLEAN(strm->mem_block, mem_block_size);
ret = start_dma(dev_dma, strm->dma_channel, &strm->dma_cfg,
strm->mem_block, (void *)&(ssp->ssd),
mem_block_size);
if (ret < 0) {
SYS_LOG_ERR("Failed to start TX DMA transfer: %d", ret);
return ret;
}
/* enable port */
ssp->ssc0 |= SSCR0_SSE;
/* Enable DMA service request handshake logic. Though DMA is
* already started, it won't work without the handshake logic.
*/
ssp->ssc1 |= SSCR1_TSRE;
ssp->sstsa |= (0x1 << 8);
return 0;
}
static void tx_stream_disable(struct stream *strm,
volatile struct i2s_cavs_ssp *const ssp,
struct device *dev_dma)
{
/* Disable DMA service request handshake logic. Handshake is
* not required now since DMA is not in operation.
*/
ssp->ssc1 &= ~SSCR1_TSRE;
ssp->sstsa &= ~(0x1 << 8);
dma_stop(dev_dma, strm->dma_channel);
if (((strm->cfg.options & I2S_OPT_PINGPONG) != I2S_OPT_PINGPONG) &&
(strm->mem_block != NULL)) {
k_mem_slab_free(strm->cfg.mem_slab, &strm->mem_block);
strm->mem_block = NULL;
}
strm->mem_block_queue.head = 0;
strm->mem_block_queue.tail = 0;
}
static void tx_queue_drop(struct stream *strm)
{
size_t size;
void *mem_block;
unsigned int n = 0;
while (queue_get(&strm->mem_block_queue, strm->cfg.options,
&mem_block, &size) == 0) {
if ((strm->cfg.options & I2S_OPT_PINGPONG)
!= I2S_OPT_PINGPONG) {
k_mem_slab_free(strm->cfg.mem_slab, &mem_block);
n++;
}
}
strm->mem_block_queue.head = 0;
strm->mem_block_queue.tail = 0;
for (; n > 0; n--) {
k_sem_give(&strm->sem);
}
}
static int i2s_cavs_trigger(struct device *dev, enum i2s_dir dir,
enum i2s_trigger_cmd cmd)
{
const struct i2s_cavs_config *const dev_cfg = DEV_CFG(dev);
struct i2s_cavs_dev_data *const dev_data = DEV_DATA(dev);
volatile struct i2s_cavs_ssp *const ssp = dev_cfg->regs;
struct stream *strm;
unsigned int key;
int ret;
if (dir == I2S_DIR_TX) {
strm = &dev_data->tx;
} else {
SYS_LOG_ERR("TX direction must be selected");
return -EINVAL;
}
switch (cmd) {
case I2S_TRIGGER_START:
if (strm->state != I2S_STATE_READY) {
SYS_LOG_DBG("START trigger: invalid state");
return -EIO;
}
__ASSERT_NO_MSG(strm->mem_block == NULL);
ret = strm->stream_start(strm, ssp, dev_data->dev_dma);
if (ret < 0) {
SYS_LOG_DBG("START trigger failed %d", ret);
return ret;
}
strm->state = I2S_STATE_RUNNING;
strm->last_block = false;
break;
case I2S_TRIGGER_STOP:
key = irq_lock();
if (strm->state != I2S_STATE_RUNNING) {
irq_unlock(key);
SYS_LOG_DBG("STOP trigger: invalid state");
return -EIO;
}
strm->state = I2S_STATE_STOPPING;
irq_unlock(key);
strm->last_block = true;
break;
case I2S_TRIGGER_DRAIN:
key = irq_lock();
if (strm->state != I2S_STATE_RUNNING) {
irq_unlock(key);
SYS_LOG_DBG("DRAIN trigger: invalid state");
return -EIO;
}
strm->state = I2S_STATE_STOPPING;
irq_unlock(key);
break;
case I2S_TRIGGER_DROP:
if (strm->state == I2S_STATE_NOT_READY) {
SYS_LOG_DBG("DROP trigger: invalid state");
return -EIO;
}
strm->stream_disable(strm, ssp, dev_data->dev_dma);
strm->queue_drop(strm);
strm->state = I2S_STATE_READY;
break;
case I2S_TRIGGER_PREPARE:
if (strm->state != I2S_STATE_ERROR) {
SYS_LOG_DBG("PREPARE trigger: invalid state");
return -EIO;
}
strm->state = I2S_STATE_READY;
strm->queue_drop(strm);
break;
default:
SYS_LOG_ERR("Unsupported trigger command");
return -EINVAL;
}
return 0;
}
static int i2s_cavs_write(struct device *dev, void *mem_block, size_t size)
{
struct i2s_cavs_dev_data *const dev_data = DEV_DATA(dev);
struct stream *strm = &dev_data->tx;
int ret;
if (dev_data->tx.state != I2S_STATE_RUNNING &&
dev_data->tx.state != I2S_STATE_READY) {
SYS_LOG_ERR("invalid state");
return -EIO;
}
ret = k_sem_take(&dev_data->tx.sem, dev_data->tx.cfg.timeout);
if (ret < 0) {
SYS_LOG_ERR("Failure taking sem");
return ret;
}
/* Add data to the end of the TX queue */
queue_put(&dev_data->tx.mem_block_queue, strm->cfg.options,
mem_block, size);
return 0;
}
/* clear IRQ sources atm */
static void i2s_cavs_isr(void *arg)
{
struct device *dev = (struct device *)arg;
const struct i2s_cavs_config *const dev_cfg = DEV_CFG(dev);
volatile struct i2s_cavs_ssp *const ssp = dev_cfg->regs;
u32_t temp;
/* clear IRQ */
temp = ssp->sss;
ssp->sss = temp;
}
static int i2s1_cavs_initialize(struct device *dev)
{
const struct i2s_cavs_config *const dev_cfg = DEV_CFG(dev);
struct i2s_cavs_dev_data *const dev_data = DEV_DATA(dev);
/* Configure interrupts */
dev_cfg->irq_config();
/* Initialize semaphores */
k_sem_init(&dev_data->tx.sem, CONFIG_I2S_CAVS_TX_BLOCK_COUNT,
CONFIG_I2S_CAVS_TX_BLOCK_COUNT);
dev_data->dev_dma = device_get_binding(CONFIG_I2S_CAVS_DMA_NAME);
if (!dev_data->dev_dma) {
SYS_LOG_ERR("%s device not found", CONFIG_I2S_CAVS_DMA_NAME);
return -ENODEV;
}
/* Enable module's IRQ */
irq_enable(dev_cfg->irq_id);
SYS_LOG_INF("Device %s initialized", DEV_NAME(dev));
return 0;
}
static const struct i2s_driver_api i2s_cavs_driver_api = {
.configure = i2s_cavs_configure,
.write = i2s_cavs_write,
.trigger = i2s_cavs_trigger,
};
/* I2S1 */
static struct device DEVICE_NAME_GET(i2s1_cavs);
static struct device *get_dev_from_dma_channel(u32_t dma_channel)
{
return &DEVICE_NAME_GET(i2s1_cavs);
}
struct queue_item i2s1_ring_buf[CONFIG_I2S_CAVS_TX_BLOCK_COUNT];
static void i2s1_irq_config(void)
{
IRQ_CONNECT(I2S1_CAVS_IRQ, CONFIG_I2S_CAVS_1_IRQ_PRI, i2s_cavs_isr,
DEVICE_GET(i2s1_cavs), 0);
}
static const struct i2s_cavs_config i2s1_cavs_config = {
.regs = (struct i2s_cavs_ssp *)SSP_BASE(1),
.mn_regs = (struct i2s_cavs_mn_div *)SSP_MN_DIV_BASE(1),
.irq_id = I2S1_CAVS_IRQ,
.irq_config = i2s1_irq_config,
};
static struct i2s_cavs_dev_data i2s1_cavs_data = {
.tx = {
.dma_channel = CONFIG_I2S_CAVS_1_DMA_TX_CHANNEL,
.dma_cfg = {
.source_data_size = 1,
.dest_data_size = 1,
.source_burst_length = 1,
.dest_burst_length = 1,
.dma_callback = dma_tx_callback,
.complete_callback_en = 0,
.error_callback_en = 1,
.block_count = 1,
.channel_direction = MEMORY_TO_PERIPHERAL,
.dma_slot = DMA_HANDSHAKE_SSP1_TX,
},
.mem_block_queue.buf = i2s1_ring_buf,
.mem_block_queue.len = ARRAY_SIZE(i2s1_ring_buf),
.stream_start = tx_stream_start,
.stream_disable = tx_stream_disable,
.queue_drop = tx_queue_drop,
},
};
DEVICE_AND_API_INIT(i2s1_cavs, CONFIG_I2S_CAVS_1_NAME, &i2s1_cavs_initialize,
&i2s1_cavs_data, &i2s1_cavs_config, POST_KERNEL,
CONFIG_I2S_INIT_PRIORITY, &i2s_cavs_driver_api);
| {
"content_hash": "0989d95f4950de8caf7cb6197cf8875f",
"timestamp": "",
"source": "github",
"line_count": 795,
"max_line_length": 78,
"avg_line_length": 24.81006289308176,
"alnum_prop": 0.6577266274589333,
"repo_name": "kraj/zephyr",
"id": "58b18e1238682ab4bdd69c2015f67069f035132b",
"size": "19812",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "drivers/i2s/i2s_cavs.c",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1195132"
},
{
"name": "Batchfile",
"bytes": "209"
},
{
"name": "C",
"bytes": "300593976"
},
{
"name": "C++",
"bytes": "3020664"
},
{
"name": "CMake",
"bytes": "441343"
},
{
"name": "EmberScript",
"bytes": "796"
},
{
"name": "Makefile",
"bytes": "3113"
},
{
"name": "Objective-C",
"bytes": "33545"
},
{
"name": "Perl",
"bytes": "202119"
},
{
"name": "Python",
"bytes": "815624"
},
{
"name": "Shell",
"bytes": "22752"
},
{
"name": "Verilog",
"bytes": "6394"
}
],
"symlink_target": ""
} |
<?php
namespace Ecommerce\UserBundle\Controller;
use Ecommerce\FrontendBundle\Controller\CustomController;
use Ecommerce\UserBundle\Entity\RecoverPassword;
use Ecommerce\UserBundle\Entity\User;
use Ecommerce\UserBundle\Event\UserEvent;
use Ecommerce\UserBundle\Event\UserEvents;
use Ecommerce\UserBundle\Form\Type\RecoverPasswordType;
use Ecommerce\UserBundle\Form\Type\RegistrationType;
use Ecommerce\UserBundle\Form\Type\ValidatedCodeType;
use Ecommerce\UserBundle\Model\Registration;
use Symfony\Component\HttpFoundation\Request;
use Sensio\Bundle\FrameworkExtraBundle\Configuration\ParamConverter;
class AccessController extends CustomController
{
public function loginAction()
{
return $this->render('FrontendBundle:Commons:login.html.twig');
}
public function registerAction(Request $request)
{
$registration = new Registration();
$form = $this->createForm(new RegistrationType(), $registration);
$handler = $this->get('user.register_user_form_handler');
if ($handler->handle($form, $request)) {
$user = $registration->getUser();
$userEvent = new UserEvent($user);
$dispatcher = $this->get('event_dispatcher');
$dispatcher->dispatch(UserEvents::NEW_USER, $userEvent);
return $this->redirect($this->generateUrl('success_register'));
}
return $this->render('UserBundle:Access:register.html.twig', array('form' => $form->createView()));
}
public function successRegisterAction()
{
$form = $this->createForm(new ValidatedCodeType());
return $this->render('UserBundle:Access:success-register.html.twig', array('user' => $this->get('security.context')->getToken()->getUser(), 'form' => $form->createView()));
}
public function validateUserAction(Request $request)
{
$form = $this->createForm(new ValidatedCodeType());
$form->handleRequest($request);
$user = $this->getCurrentUser();
if ($form->isValid()) {
$code = $form->get('code')->getData();
if ($user instanceof User && $user->getValidatedCode() == $code) {
$em = $this->getEntityManager();
$user->setValidated(true);
$em->persist($user);
$em->flush();
$this->setTranslatedFlashMessage('Tu cuenta ha sido validada, ya puedes acceder a tu perfil y configurar tus datos.');
$this->resetToken($user);
return $this->redirect($this->generateUrl('ecommerce_homepage'));
} else {
$this->setTranslatedFlashMessage('El código introducido no coincide con el que te hemos mandado.', 'error');
}
}
return $this->render('UserBundle:Access:validate-code.html.twig', array('form' => $form->createView(), 'user' => $user));
}
public function resendActivationEmailAction()
{
$user = $this->getCurrentUser();
$userEvent = new UserEvent($user);
$dispatcher = $this->get('event_dispatcher');
$dispatcher->dispatch(UserEvents::RESEND_ACTIVATION_EMAIL, $userEvent);
$this->setTranslatedFlashMessage('El correo de activación ha sido reenviado a tu cuenta.');
return $this->redirect($this->generateUrl('validate_user'));
}
public function checkIfEmailIsAvailableAction(Request $request)
{
$jsonResponse = json_encode(array('available' => 'false'));
if ($request->isXmlHttpRequest()) {
$form = $request->query->get('registration');
$email = current($form['user']['email']);
if ($this->checkEmailAvailable($email)) {
$jsonResponse = json_encode(array('available' => 'true'));
}
}
return $this->getHttpJsonResponse($jsonResponse);
}
private function checkEmailAvailable($email)
{
$em = $this->getEntityManager();
$user = $em->getRepository('UserBundle:User')->findOneByEmail($email);
return !($user instanceof User);
}
public function forgotPasswordAction(Request $request)
{
$form = $this->createFormBuilder()->add('email', 'email', array('required' => true, 'attr' => array('placeholder' => 'Introduzca su e-mail')))->getForm();
$form->handleRequest($request);
if ($form->isValid()) {
$em = $this->getEntityManager();
$data = $form->getData();
$user = $em->getRepository('UserBundle:User')->findOneBy(array('email' => $data['email']));
if (isset($user)) {
$recoverPassword = new RecoverPassword();
$recoverPassword->setEmail($user->getEmail());
$recoverPassword->setSalt($user->getSalt());
$em->persist($recoverPassword);
$em->flush();
$userEvent = new UserEvent($user);
$dispatcher = $this->get('event_dispatcher');
$dispatcher->dispatch(UserEvents::RECOVER_PASSWORD, $userEvent);
$this->setTranslatedFlashMessage('El correo con las instrucciones para reestablecer tu contraseña ha sido enviado correctamente. Recuerda que dispones de 24 horas a partir de ahora para reestablercerla.');
return $this->redirect($this->generateUrl('login'));
}
$this->setTranslatedFlashMessage('El correo electrónico introducido no corresponde a ninguna cuenta. Asegurate de escribirlo correctamente', 'error');
}
return $this->render('UserBundle:Access:forgot-password.html.twig', array('form' => $form->createView()));
}
/**
* @ParamConverter("user", class="UserBundle:User")
*/
public function changePasswordAction(User $user, Request $request)
{
$em = $this->getEntityManager();
$recoverPassword = $em->getRepository('UserBundle:RecoverPassword')->findOneBy(array('email' => $user->getEmail()));
if (!$recoverPassword) {
return $this->redirect($this->generateUrl('frontend_homepage'));
}
$now = new \DateTime('now');
$dateRequest = $recoverPassword->getDateRequest();
$dateRequest->modify('+1 days');
if ($dateRequest > $now) {
$form = $this->createForm(new RecoverPasswordType());
$form->handleRequest($request);
if ($form->isValid()) {
$data = $form->getData();
$user->setPassword($data['password']);
$encoder = $this->get('security.encoder_factory')->getEncoder($user);
$encodePassword = $encoder->encodePassword($user->getPassword(), $user->getSalt());
$user->setPassword($encodePassword);
$em->persist($user);
$em->remove($recoverPassword);
$em->flush();
$this->setTranslatedFlashMessage('Tu contraseña ha sido reestablecida. Ya puedes acceder con normalidad a tu cuenta');
return $this->redirect($this->generateUrl('login'));
}
return $this->render('UserBundle:Access:new-password.html.twig', array('form' => $form->createView(), 'user' => $user));
} else {
$this->setTranslatedFlashMessage('Han pasado más de 24 horas desde que solicitaste el cambio de contraseña. Por favor, solicitalo de nuevo.');
return $this->redirect($this->generateUrl('login'));
}
}
/**
* @ParamConverter("user", class="UserBundle:User")
*/
public function forbiddenAction(User $user, Request $request)
{
return $this->render('UserBundle:Access:forbidden.html.twig', array('user' => $user));
}
}
| {
"content_hash": "4da63cc810aa94ff283070154c5a7b41",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 221,
"avg_line_length": 41.483870967741936,
"alnum_prop": 0.6121047174701918,
"repo_name": "mancas/cartujano",
"id": "aa6e78e42959879853612cfa7c335ad2b4561902",
"size": "7723",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Ecommerce/UserBundle/Controller/AccessController.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "348273"
},
{
"name": "HTML",
"bytes": "285028"
},
{
"name": "JavaScript",
"bytes": "618916"
},
{
"name": "PHP",
"bytes": "380739"
}
],
"symlink_target": ""
} |
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.18444
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace Porty.Properties
{
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "11.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase
{
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default
{
get
{
return defaultInstance;
}
}
}
}
| {
"content_hash": "834cb027d62bdc33f4a4073a28a700b4",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 151,
"avg_line_length": 35.333333333333336,
"alnum_prop": 0.5792452830188679,
"repo_name": "corytodd/porty",
"id": "7de8d17fa1b7f7d9e0514d1c33e153a51f4cd4a2",
"size": "1062",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Porty/Properties/Settings.Designer.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "17292"
}
],
"symlink_target": ""
} |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE590_Free_Memory_Not_on_Heap__delete_array_int_alloca_64a.cpp
Label Definition File: CWE590_Free_Memory_Not_on_Heap__delete_array.label.xml
Template File: sources-sink-64a.tmpl.cpp
*/
/*
* @description
* CWE: 590 Free Memory Not on Heap
* BadSource: alloca Data buffer is allocated on the stack with alloca()
* GoodSource: Allocate memory on the heap
* Sinks:
* BadSink : Print then free data
* Flow Variant: 64 Data flow: void pointer to data passed from one function to another in different source files
*
* */
#include "std_testcase.h"
#include <wchar.h>
namespace CWE590_Free_Memory_Not_on_Heap__delete_array_int_alloca_64
{
#ifndef OMITBAD
/* bad function declaration */
void badSink(void * dataVoidPtr);
void bad()
{
int * data;
data = NULL; /* Initialize data */
{
/* FLAW: data is allocated on the stack and deallocated in the BadSink */
int * dataBuffer = (int *)ALLOCA(100*sizeof(int));
{
size_t i;
for (i = 0; i < 100; i++)
{
dataBuffer[i] = 5;
}
}
data = dataBuffer;
}
badSink(&data);
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B uses the GoodSource with the BadSink */
void goodG2BSink(void * dataVoidPtr);
static void goodG2B()
{
int * data;
data = NULL; /* Initialize data */
{
/* FIX: data is allocated on the heap and deallocated in the BadSink */
int * dataBuffer = new int[100];
{
size_t i;
for (i = 0; i < 100; i++)
{
dataBuffer[i] = 5;
}
}
data = dataBuffer;
}
goodG2BSink(&data);
}
void good()
{
goodG2B();
}
#endif /* OMITGOOD */
} /* close namespace */
/* Below is the main(). It is only used when building this testcase on
its own for testing or for building a binary to use in testing binary
analysis tools. It is not used when compiling all the testcases as one
application, which is how source code analysis tools are tested. */
#ifdef INCLUDEMAIN
using namespace CWE590_Free_Memory_Not_on_Heap__delete_array_int_alloca_64; /* so that we can use good and bad easily */
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
| {
"content_hash": "518ca8644b6f36d93a3b9e45c5ddc34c",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 120,
"avg_line_length": 24.844036697247706,
"alnum_prop": 0.5915805022156573,
"repo_name": "maurer/tiamat",
"id": "71f4519dd621ead5db104c5aa626f7e55970ff10",
"size": "2708",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "samples/Juliet/testcases/CWE590_Free_Memory_Not_on_Heap/s01/CWE590_Free_Memory_Not_on_Heap__delete_array_int_alloca_64a.cpp",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
<!doctype html>
<html class="no-js" lang="en" ng-app>
<head>
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Welcome to snake</title>
<link rel="stylesheet" href="/foundation-5.5.2/css/foundation.css" />
<link rel="stylesheet" href="/foundation-icons/foundation-icons.css" />
<script src="/foundation-5.5.2/js/vendor/modernizr.js"></script>
</head>
<body>
@include('template.menu')
<div class="container">
@yield('content')
</div>
<script src="/foundation-5.5.2/js/vendor/jquery.js"></script>
<script src="/foundation-5.5.2/js/foundation.min.js"></script>
<script src="/angularjs/1.4.3/angular.min.js">
<script>
$(document).foundation();
</script>
</body> | {
"content_hash": "337795b6553e5d6a9defca6fe8105efe",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 76,
"avg_line_length": 32.916666666666664,
"alnum_prop": 0.6291139240506329,
"repo_name": "JohannesSanders/Snake-Angular-Laravel",
"id": "dce8890f34533792d773a0328fad1d1eebcabafb",
"size": "790",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "snake/resources/views/template/master.blade.php",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "412"
},
{
"name": "CSS",
"bytes": "72"
},
{
"name": "HTML",
"bytes": "20915"
},
{
"name": "JavaScript",
"bytes": "9357"
},
{
"name": "PHP",
"bytes": "72055"
}
],
"symlink_target": ""
} |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.privateca.v1.model;
/**
* Request message for CertificateAuthorityService.UndeleteCertificateAuthority.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Certificate Authority API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class UndeleteCertificateAuthorityRequest extends com.google.api.client.json.GenericJson {
/**
* Optional. An ID to identify requests. Specify a unique request ID so that if you must retry
* your request, the server will know to ignore the request if it has already been completed. The
* server will guarantee that for at least 60 minutes since the first request. For example,
* consider a situation where you make an initial request and t he request times out. If you make
* the request again with the same request ID, the server can check if original operation with the
* same request ID was received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments. The request ID must be a valid UUID with the
* exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String requestId;
/**
* Optional. An ID to identify requests. Specify a unique request ID so that if you must retry
* your request, the server will know to ignore the request if it has already been completed. The
* server will guarantee that for at least 60 minutes since the first request. For example,
* consider a situation where you make an initial request and t he request times out. If you make
* the request again with the same request ID, the server can check if original operation with the
* same request ID was received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments. The request ID must be a valid UUID with the
* exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
* @return value or {@code null} for none
*/
public java.lang.String getRequestId() {
return requestId;
}
/**
* Optional. An ID to identify requests. Specify a unique request ID so that if you must retry
* your request, the server will know to ignore the request if it has already been completed. The
* server will guarantee that for at least 60 minutes since the first request. For example,
* consider a situation where you make an initial request and t he request times out. If you make
* the request again with the same request ID, the server can check if original operation with the
* same request ID was received, and if so, will ignore the second request. This prevents clients
* from accidentally creating duplicate commitments. The request ID must be a valid UUID with the
* exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
* @param requestId requestId or {@code null} for none
*/
public UndeleteCertificateAuthorityRequest setRequestId(java.lang.String requestId) {
this.requestId = requestId;
return this;
}
@Override
public UndeleteCertificateAuthorityRequest set(String fieldName, Object value) {
return (UndeleteCertificateAuthorityRequest) super.set(fieldName, value);
}
@Override
public UndeleteCertificateAuthorityRequest clone() {
return (UndeleteCertificateAuthorityRequest) super.clone();
}
}
| {
"content_hash": "25c26bab41efa7dcd8804b353ed019d2",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 182,
"avg_line_length": 51.125,
"alnum_prop": 0.7543898644143143,
"repo_name": "googleapis/google-api-java-client-services",
"id": "79a837de8fddf10fb8be5f36e154a473626874e9",
"size": "4499",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "clients/google-api-services-privateca/v1/2.0.0/com/google/api/services/privateca/v1/model/UndeleteCertificateAuthorityRequest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package com.opengamma.engine.marketdata;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.ComputationTargetResolver;
import com.opengamma.engine.ComputationTargetSpecification;
import com.opengamma.engine.marketdata.availability.MarketDataAvailabilityProvider;
import com.opengamma.engine.target.ComputationTargetSpecificationResolver;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.engine.value.ValueSpecification;
/**
* In-memory implementation of a {@link MarketDataInjector}.
*/
public class MarketDataInjectorImpl implements MarketDataInjector {
private static final Logger s_logger = LoggerFactory.getLogger(MarketDataInjectorImpl.class);
/**
* A snapshot of the state of the injection.
*/
public final class Snapshot {
private final MarketDataAvailabilityProvider _availability;
private final Map<ValueRequirement, Object> _valuesByRequirement;
private final Map<ValueSpecification, Object> _valuesBySpecification;
private Snapshot(final MarketDataAvailabilityProvider availability, final Map<ValueRequirement, Object> valuesByRequirement, final Map<ValueSpecification, Object> valuesBySpecification) {
_availability = availability;
_valuesByRequirement = valuesByRequirement;
_valuesBySpecification = valuesBySpecification;
}
public void init() {
if (!_valuesByRequirement.isEmpty()) {
ComputationTargetResolver.AtVersionCorrection targetResolver = getComputationTargetResolver();
if (targetResolver != null) {
final ComputationTargetSpecificationResolver.AtVersionCorrection specificationResolver = targetResolver.getSpecificationResolver();
for (Map.Entry<ValueRequirement, Object> valueByRequirement : _valuesByRequirement.entrySet()) {
final ComputationTargetSpecification targetSpec = specificationResolver.getTargetSpecification(valueByRequirement.getKey().getTargetReference());
if (targetSpec != null) {
final ComputationTarget target = targetResolver.resolve(targetSpec);
final Object targetValue = (target != null) ? target.getValue() : null;
final ValueSpecification resolved = _availability.getAvailability(targetSpec, targetValue, valueByRequirement.getKey());
if (resolved != null) {
s_logger.info("Injecting {} as {}", valueByRequirement, resolved);
_valuesBySpecification.put(resolved, valueByRequirement.getValue());
} else {
s_logger.debug("Not injecting {} - no availability from {}", valueByRequirement, _availability);
}
} else {
s_logger.warn("Couldn't resolve {} for injected value requirement", valueByRequirement.getKey());
}
}
} else {
s_logger.warn("Values injected by requirement, but no target resolver");
}
}
}
public Object query(final ValueSpecification value) {
return _valuesBySpecification.get(value);
}
}
private final ConcurrentMap<ValueRequirement, Object> _valuesByRequirement = new ConcurrentHashMap<ValueRequirement, Object>();
private final ConcurrentMap<ValueSpecification, Object> _valuesBySpecification = new ConcurrentHashMap<ValueSpecification, Object>();
private ComputationTargetResolver.AtVersionCorrection _targetResolver;
public Snapshot snapshot(final MarketDataAvailabilityProvider availability) {
if (_valuesByRequirement.isEmpty() && _valuesBySpecification.isEmpty()) {
return null;
}
final Map<ValueRequirement, Object> valuesByRequirement = new HashMap<ValueRequirement, Object>(_valuesByRequirement);
final Map<ValueSpecification, Object> valuesBySpecification = new HashMap<ValueSpecification, Object>(_valuesBySpecification);
if (valuesByRequirement.isEmpty() && valuesBySpecification.isEmpty()) {
return null;
}
return new Snapshot(availability, valuesByRequirement, valuesBySpecification);
}
public void setComputationTargetResolver(final ComputationTargetResolver.AtVersionCorrection targetResolver) {
_targetResolver = targetResolver;
}
public ComputationTargetResolver.AtVersionCorrection getComputationTargetResolver() {
return _targetResolver;
}
// MarketDataInjector
@Override
public void addValue(ValueRequirement valueRequirement, Object value) {
_valuesByRequirement.put(valueRequirement, value);
}
@Override
public void addValue(ValueSpecification valueSpecification, Object value) {
_valuesBySpecification.put(valueSpecification, value);
}
@Override
public void removeValue(ValueRequirement valueRequirement) {
_valuesByRequirement.remove(valueRequirement);
}
@Override
public void removeValue(ValueSpecification valueSpecification) {
_valuesBySpecification.remove(valueSpecification);
}
}
| {
"content_hash": "b8650f0e6f48b4af9b1e05fce65a1014",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 191,
"avg_line_length": 42.17355371900826,
"alnum_prop": 0.7519106407995297,
"repo_name": "nssales/OG-Platform",
"id": "b2b817dd4cce7f2ab1e92dfbe68201df3a5edea4",
"size": "5240",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "projects/OG-Engine/src/main/java/com/opengamma/engine/marketdata/MarketDataInjectorImpl.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4064"
},
{
"name": "CSS",
"bytes": "212432"
},
{
"name": "GAP",
"bytes": "1490"
},
{
"name": "Groovy",
"bytes": "11518"
},
{
"name": "HTML",
"bytes": "284313"
},
{
"name": "Java",
"bytes": "80833346"
},
{
"name": "JavaScript",
"bytes": "1672518"
},
{
"name": "PLSQL",
"bytes": "105"
},
{
"name": "PLpgSQL",
"bytes": "13175"
},
{
"name": "Protocol Buffer",
"bytes": "53119"
},
{
"name": "SQLPL",
"bytes": "1004"
},
{
"name": "Shell",
"bytes": "10958"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "db9ef1e89a1e337d2a907e4a3f11320f",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "3f4a682acb340f3d66f682c88c6a06440c9d532d",
"size": "182",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Caryophyllales/Aizoaceae/Mesembryanthemum/Mesembryanthemum praepingue/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="CloudStorageReaderSettingsTest.cs" company="Naos Project">
// Copyright (c) Naos Project 2019. All rights reserved.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Naos.AWS.Domain.Test
{
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using FakeItEasy;
using OBeautifulCode.AutoFakeItEasy;
using OBeautifulCode.CodeAnalysis.Recipes;
using OBeautifulCode.CodeGen.ModelObject.Recipes;
using OBeautifulCode.Math.Recipes;
using Xunit;
using static System.FormattableString;
[SuppressMessage("Microsoft.Maintainability", "CA1505:AvoidUnmaintainableCode", Justification = ObcSuppressBecause.CA1505_AvoidUnmaintainableCode_DisagreeWithAssessment)]
public static partial class CloudStorageReaderSettingsTest
{
[SuppressMessage("Microsoft.Maintainability", "CA1505:AvoidUnmaintainableCode", Justification = ObcSuppressBecause.CA1505_AvoidUnmaintainableCode_DisagreeWithAssessment)]
[SuppressMessage("Microsoft.Performance", "CA1810:InitializeReferenceTypeStaticFieldsInline", Justification = ObcSuppressBecause.CA1810_InitializeReferenceTypeStaticFieldsInline_FieldsDeclaredInCodeGeneratedPartialTestClass)]
static CloudStorageReaderSettingsTest()
{
}
}
} | {
"content_hash": "7e3a85e1c101fbe0cb8b8f6217ea7040",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 233,
"avg_line_length": 45.64705882352941,
"alnum_prop": 0.6623711340206185,
"repo_name": "NaosProject/Naos.AWS",
"id": "2fbea5be180fa43197531ee4bc155763117ba9e3",
"size": "1554",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Naos.AWS.Domain.Test/CloudStorageReaderSettingsTest.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "5647692"
},
{
"name": "Smalltalk",
"bytes": "7860"
}
],
"symlink_target": ""
} |
BarbaServerConnection::BarbaServerConnection(BarbaServerConfig* config, u_long clientVirtualIp, u_long clientIp)
: BarbaConnection(config)
{
ClientVirtualIp = clientVirtualIp;
ClientIp = clientIp;
}
BarbaServerConnection::~BarbaServerConnection()
{
}
void BarbaServerConnection::ReportNewConnection()
{
std::tstring ip = BarbaUtils::ConvertIpToString(ClientIp, theApp->LogAnonymously);
std::tstring virtualIp = BarbaUtils::ConvertIpToString(ClientVirtualIp, false);
LPCTSTR mode = BarbaMode_ToString(GetConfig()->Mode);
std::tstring tunnelPorts = GetConfig()->TunnelPorts.ToString();
BarbaLog(_T("New %s! %s - %s:%s, VirtualIP: %s, ConnectionID: %u."), GetConfig()->GetName(theApp->LogAnonymously).data(), BarbaUtils::ConvertIpToString(ClientIp, theApp->LogAnonymously).data(), mode, tunnelPorts.data(), virtualIp.data(), GetId());
BarbaNotify(_T("New %s\r\nClient IP: %s\r\nClient Virtual IP: %s\r\nProtocol: %s:%s"), GetConfig()->GetName(false).data(), BarbaUtils::ConvertIpToString(ClientIp, false).data(), virtualIp.data(), mode, tunnelPorts.data());
}
| {
"content_hash": "b656b6b03c21212e8d9f618fcb45cacc",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 248,
"avg_line_length": 48.63636363636363,
"alnum_prop": 0.7542056074766356,
"repo_name": "BarbaTunnelCoder/BarbaTunnel",
"id": "73619f5ed35afa209386917e95b09b5fa439d551",
"size": "1178",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "BarbaTunnel/BarbaServer/BarbaServerConnection.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1305"
},
{
"name": "C",
"bytes": "40104"
},
{
"name": "C#",
"bytes": "47823"
},
{
"name": "C++",
"bytes": "307469"
},
{
"name": "Visual Basic",
"bytes": "3046"
}
],
"symlink_target": ""
} |
using NMF.Collections.Generic;
using NMF.Collections.ObjectModel;
using NMF.Expressions;
using NMF.Expressions.Linq;
using NMF.Models;
using NMF.Models.Collections;
using NMF.Models.Expressions;
using NMF.Models.Meta;
using NMF.Models.Repository;
using NMF.Serialization;
using NMF.Utilities;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics;
using System.Linq;
using TTC2017.SmartGrids.CIM.IEC61968.AssetModels;
using TTC2017.SmartGrids.CIM.IEC61968.Assets;
using TTC2017.SmartGrids.CIM.IEC61968.Common;
using TTC2017.SmartGrids.CIM.IEC61968.Customers;
using TTC2017.SmartGrids.CIM.IEC61968.Work;
using TTC2017.SmartGrids.CIM.IEC61970.Core;
using TTC2017.SmartGrids.CIM.IEC61970.Informative.Financial;
using TTC2017.SmartGrids.CIM.IEC61970.Informative.InfAssetModels;
using TTC2017.SmartGrids.CIM.IEC61970.Informative.InfAssets;
using TTC2017.SmartGrids.CIM.IEC61970.Informative.InfCommon;
using TTC2017.SmartGrids.CIM.IEC61970.Informative.InfCustomers;
using TTC2017.SmartGrids.CIM.IEC61970.Informative.InfLocations;
using TTC2017.SmartGrids.CIM.IEC61970.Informative.InfOperations;
using TTC2017.SmartGrids.CIM.IEC61970.Informative.InfTypeAsset;
using TTC2017.SmartGrids.CIM.IEC61970.Informative.InfWork;
using TTC2017.SmartGrids.CIM.IEC61970.Informative.MarketOperations;
using TTC2017.SmartGrids.CIM.IEC61970.Meas;
namespace TTC2017.SmartGrids.CIM.IEC61970.Informative.InfERPSupport
{
public class ErpRecDelvLineItemAssetsCollection : ObservableOppositeOrderedSet<IErpRecDelvLineItem, IAsset>
{
public ErpRecDelvLineItemAssetsCollection(IErpRecDelvLineItem parent) :
base(parent)
{
}
private void OnItemDeleted(object sender, System.EventArgs e)
{
this.Remove(((IAsset)(sender)));
}
protected override void SetOpposite(IAsset item, IErpRecDelvLineItem parent)
{
if ((parent != null))
{
item.Deleted += this.OnItemDeleted;
item.ErpRecDeliveryItems.Add(parent);
}
else
{
item.Deleted -= this.OnItemDeleted;
item.ErpRecDeliveryItems.Remove(this.Parent);
}
}
}
}
| {
"content_hash": "a93bdeeab746adcb18e02cb2f20bffbc",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 111,
"avg_line_length": 34.31884057971015,
"alnum_prop": 0.7369087837837838,
"repo_name": "georghinkel/ttc2017smartGrids",
"id": "c96a42ac7e9f55385d89961ae1df07d621acfade",
"size": "2803",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "solutions/NMF/Schema/IEC61970/Informative/InfERPSupport/ErpRecDelvLineItemAssetsCollection.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "79261108"
},
{
"name": "Java",
"bytes": "38407170"
},
{
"name": "Python",
"bytes": "6055"
},
{
"name": "R",
"bytes": "15405"
},
{
"name": "Rebol",
"bytes": "287"
}
],
"symlink_target": ""
} |
package org.gmod.gbol.simpleObject.generated;
import org.gmod.gbol.simpleObject.*;
/**
* StockPublication generated by hbm2java
*/
public abstract class AbstractStockPublication extends AbstractSimpleObject implements java.io.Serializable {
private Integer stockPublicationId;
private Publication publication;
private Stock stock;
public AbstractStockPublication() {
}
public AbstractStockPublication(Publication publication, Stock stock) {
this.publication = publication;
this.stock = stock;
}
public Integer getStockPublicationId() {
return this.stockPublicationId;
}
public void setStockPublicationId(Integer stockPublicationId) {
this.stockPublicationId = stockPublicationId;
}
public Publication getPublication() {
return this.publication;
}
public void setPublication(Publication publication) {
this.publication = publication;
}
public Stock getStock() {
return this.stock;
}
public void setStock(Stock stock) {
this.stock = stock;
}
public boolean equals(Object other) {
if ( (this == other ) ) return true;
if ( (other == null ) ) return false;
if ( !(other instanceof AbstractStockPublication) ) return false;
AbstractStockPublication castOther = ( AbstractStockPublication ) other;
return ( (this.getPublication()==castOther.getPublication()) || ( this.getPublication()!=null && castOther.getPublication()!=null && this.getPublication().equals(castOther.getPublication()) ) )
&& ( (this.getStock()==castOther.getStock()) || ( this.getStock()!=null && castOther.getStock()!=null && this.getStock().equals(castOther.getStock()) ) );
}
public int hashCode() {
int result = 17;
result = 37 * result + ( getPublication() == null ? 0 : this.getPublication().hashCode() );
result = 37 * result + ( getStock() == null ? 0 : this.getStock().hashCode() );
return result;
}
public AbstractStockPublication generateClone() {
AbstractStockPublication cloned = new StockPublication();
cloned.publication = this.publication;
cloned.stock = this.stock;
return cloned;
}
}
| {
"content_hash": "c3a203cec7803fbbae826244c5526ae0",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 196,
"avg_line_length": 29.493506493506494,
"alnum_prop": 0.6613826508146191,
"repo_name": "nathandunn/GBOL",
"id": "8a27d0a0e21fcf806c0e8ddd81080e763d8d86d5",
"size": "2271",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "GBOL/src/org/gmod/gbol/simpleObject/generated/AbstractStockPublication.java",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Java",
"bytes": "1407858"
}
],
"symlink_target": ""
} |
/**
* Returns a Vows topic function, which will return
* a MockWritableStream, a fake Writable Stream.
*
* Implements writeHead, like a http.ServerResponse.
* Only the (status, headers) signature is implemented.
*
* Verify with these instance variables:
*
* - $store: Written data, from write
* - $status: HTTP status code, from writeHead
* - $headers: HTTP header object, from writeHead
*
* @return {Function} Vows topic function
*/
module.exports = function () {
return function () {
function MockWritableStream () {
this.$store = "";
}
var proto = MockWritableStream.prototype;
proto.writeHead = function (status, /* msg, */ headers) {
this.$status = status;
this.$headers = headers;
};
proto.end = function (input) {
if (input) {
this.write(input);
}
this.$end = true;
};
proto.write = function (input) {
if (this.$end) {
throw new Error("Unable to write: closed.");
}
if (Buffer.isBuffer(input)) {
this.$store += input.toString("utf8");
} else {
this.$store += input;
}
};
return new MockWritableStream();
};
};
| {
"content_hash": "bb6b90e11690253612f922579f51dd04",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 65,
"avg_line_length": 25.46153846153846,
"alnum_prop": 0.5324773413897281,
"repo_name": "reid/onyx",
"id": "f9c484acf8b50146211e274b49846e9deb72c353",
"size": "1408",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "test/lib/writable-stream.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "15183"
},
{
"name": "Racket",
"bytes": "31"
},
{
"name": "Shell",
"bytes": "476"
}
],
"symlink_target": ""
} |
package zipopenfs
import (
"io"
"os"
"sync"
"time"
"golang.org/x/tools/godoc/vfs"
)
// ReadSeekerAt is a simple wrapper around a ReadSeeker to make it
// a ReaderAt. Note that using this canibalizes the seeking behavior of
// the underlying io.ReadSeeker, so it should not be used again
type ReadSeekerAt struct {
wrapped io.ReadSeeker
ratLock sync.Mutex
}
func (f *ReadSeekerAt) ReadAt(p []byte, off int64) (n int, err error) {
f.ratLock.Lock()
defer f.ratLock.Unlock()
_, err = f.wrapped.Seek(off, 0)
if err != nil {
return 0, err
}
n, err = f.wrapped.Read(p)
return
}
// ReadSeekerToReaderAt takes a io.ReadSeeker and returns a new
// io.ReaderAt that will be a proxy for the ReadSeeker.
// Note that this new ReaderAt is not as powerful, because it does
// not allow ReadAt calls to be run in parallel (they lock while waiting)
func ReadSeekerToReaderAt(rs io.ReadSeeker) io.ReaderAt {
return &ReadSeekerAt{wrapped: rs}
}
// Wraps a vfs.ReadSeekCloser by adding an extra close method
type wrappedReadSeekCloser struct {
wrapped vfs.ReadSeekCloser
after_close io.Closer
}
func (wrsc *wrappedReadSeekCloser) Close() error {
err := wrsc.wrapped.Close()
err2 := wrsc.after_close.Close()
if err != nil {
return err
} else {
return err2
}
}
func (wrsc *wrappedReadSeekCloser) Read(p []byte) (int, error) {
return wrsc.wrapped.Read(p)
}
func (wrsc *wrappedReadSeekCloser) Seek(offset int64, whence int) (int64, error) {
return wrsc.wrapped.Seek(offset, whence)
}
func WrapReaderWithCloser(rsc vfs.ReadSeekCloser, closer io.Closer) vfs.ReadSeekCloser {
return &wrappedReadSeekCloser{rsc, closer}
}
type FSCloser interface {
vfs.FileSystem
io.Closer
}
// wraps a vfs.FileSystem by adding an extra close to call
type wrappedFileSystem struct {
wrapped FSCloser
after_close io.Closer
}
func (wfs *wrappedFileSystem) Open(p string) (vfs.ReadSeekCloser, error) {
return wfs.wrapped.Open(p)
}
func (wfs *wrappedFileSystem) Lstat(p string) (os.FileInfo, error) {
return wfs.wrapped.Lstat(p)
}
func (wfs *wrappedFileSystem) Stat(p string) (os.FileInfo, error) {
return wfs.wrapped.Stat(p)
}
func (wfs *wrappedFileSystem) ReadDir(p string) ([]os.FileInfo, error) {
return wfs.wrapped.ReadDir(p)
}
func (wfs *wrappedFileSystem) Close() error {
err := wfs.wrapped.Close()
err2 := wfs.after_close.Close()
if err != nil {
return err
} else {
return err2
}
}
func (wfs *wrappedFileSystem) String() string {
return wfs.wrapped.String()
}
// Takes an FSCloser and returns a new FSCloser with an additional
// Close method
func WrapFSCloserWithCloser(fs FSCloser, closer io.Closer) FSCloser {
return &wrappedFileSystem{fs, closer}
}
// FakeDirFileInfo is just a os.FileInfo that has nothing but a name
// and says it's a directory
type FakeDirFileInfo struct {
name string
}
func (i FakeDirFileInfo) Name() string { return i.name }
func (i FakeDirFileInfo) Size() int64 { return 0 }
func (i FakeDirFileInfo) ModTime() time.Time { return time.Time{} }
func (i FakeDirFileInfo) Mode() os.FileMode { return os.ModeDir | 0555 }
func (i FakeDirFileInfo) IsDir() bool { return true }
func (i FakeDirFileInfo) Sys() interface{} { return nil }
func MakeFakeDirFileInfo(name string) os.FileInfo {
return FakeDirFileInfo{name}
}
| {
"content_hash": "eeb9d78c06841e5f9959e41386a11289",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 88,
"avg_line_length": 25.229007633587788,
"alnum_prop": 0.7246596066565809,
"repo_name": "allanlw/zipserve",
"id": "6c17e7ff3cc64e68b9c6fcc2c0e73996d2a10668",
"size": "3305",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zipopenfs/iowrap.go",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Go",
"bytes": "8511"
}
],
"symlink_target": ""
} |
<?php
namespace Admin\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* TrUsuarioPerfilRecurso
*
* @ORM\Table(name="tr_usuario_perfil_recurso", indexes={@ORM\Index(name="IDX_E85C0CBE7AC04DB2", columns={"seq_perfil"}), @ORM\Index(name="IDX_E85C0CBE7F8094CC", columns={"seq_recurso"}), @ORM\Index(name="IDX_E85C0CBEEF5E13F5", columns={"seq_usuario"})})
* @ORM\Entity(repositoryClass="Admin\Repository\UsuarioPerfilRecursoRepository")
*/
class TrUsuarioPerfilRecurso
{
/**
* @var \Admin\Entity\TbPerfil
*
* @ORM\Id
* @ORM\GeneratedValue(strategy="NONE")
* @ORM\OneToOne(targetEntity="Admin\Entity\TbPerfil")
* @ORM\JoinColumns({
* @ORM\JoinColumn(name="seq_perfil", referencedColumnName="seq_perfil")
* })
*/
private $seqPerfil;
/**
* @var \Admin\Entity\TbRecurso
*
* @ORM\Id
* @ORM\GeneratedValue(strategy="NONE")
* @ORM\OneToOne(targetEntity="Admin\Entity\TbRecurso")
* @ORM\JoinColumns({
* @ORM\JoinColumn(name="seq_recurso", referencedColumnName="seq_recurso")
* })
*/
private $seqRecurso;
/**
* @var \Admin\Entity\TbUsuario
*
* @ORM\Id
* @ORM\GeneratedValue(strategy="NONE")
* @ORM\OneToOne(targetEntity="Admin\Entity\TbUsuario")
* @ORM\JoinColumns({
* @ORM\JoinColumn(name="seq_usuario", referencedColumnName="seq_usuario")
* })
*/
private $seqUsuario;
/**
* Convert the object to an array.
*
* @return array
*/
public function getArrayCopy()
{
return get_object_vars($this);
}
/**
* @return TbPerfil
*/
public function getSeqPerfil()
{
return $this->seqPerfil;
}
/**
* @param TbPerfil $seqPerfil
*/
public function setSeqPerfil($seqPerfil)
{
$this->seqPerfil = $seqPerfil;
}
/**
* @return TbRecurso
*/
public function getSeqRecurso()
{
return $this->seqRecurso;
}
/**
* @param TbRecurso $seqRecurso
*/
public function setSeqRecurso($seqRecurso)
{
$this->seqRecurso = $seqRecurso;
}
/**
* @return TbUsuario
*/
public function getSeqUsuario()
{
return $this->seqUsuario;
}
/**
* @param TbUsuario $seqUsuario
*/
public function setSeqUsuario($seqUsuario)
{
$this->seqUsuario = $seqUsuario;
}
}
| {
"content_hash": "4da8d7129b45aa3740d5e76349b7ed11",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 254,
"avg_line_length": 22.229357798165136,
"alnum_prop": 0.5889393314073462,
"repo_name": "darlanmdantas/adesam",
"id": "b693261ded19661119a7e0b2abd2665bfda1f3a1",
"size": "2423",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "module/Admin/src/Admin/Entity/TrUsuarioPerfilRecurso.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "711"
},
{
"name": "CSS",
"bytes": "404876"
},
{
"name": "HTML",
"bytes": "68129"
},
{
"name": "JavaScript",
"bytes": "245416"
},
{
"name": "PHP",
"bytes": "142843"
}
],
"symlink_target": ""
} |
package org.onosproject.net.packet;
import static com.google.common.base.Preconditions.checkArgument;
/**
* Abstraction of an inbound packet processor.
*/
public interface PacketProcessor {
int ADVISOR_MAX = Integer.MAX_VALUE / 3;
int DIRECTOR_MAX = (Integer.MAX_VALUE / 3) * 2;
int OBSERVER_MAX = Integer.MAX_VALUE;
/**
* Returns a priority in the ADVISOR range, where processors can take early action and
* influence the packet context. However, they cannot handle the packet (i.e. call send() or block()).
* The valid range is from 1 to ADVISOR_MAX.
* Processors in this range get to see the packet first.
*
* @param priority priority within ADVISOR range
* @return overall priority
*/
static int advisor(int priority) {
int overallPriority = priority + 1;
checkArgument(overallPriority > 0 && overallPriority <= ADVISOR_MAX,
"Priority not within ADVISOR range");
return overallPriority;
}
/**
* Returns a priority in the DIRECTOR range, where processors can handle the packet.
* The valid range is from ADVISOR_MAX+1 to DIRECTOR_MAX.
* Processors in this range get to see the packet second, after ADVISORS.
*
* @param priority priority within the DIRECTOR range
* @return overall priority
*/
static int director(int priority) {
int overallPriority = ADVISOR_MAX + priority + 1;
checkArgument(overallPriority > ADVISOR_MAX && overallPriority <= DIRECTOR_MAX,
"Priority not within DIRECTOR range");
return overallPriority;
}
/**
* Returns a priority in the OBSERVER range, where processors cannot take any action,
* but can observe what action has been taken until then.
* The valid range is from DIRECTOR_MAX+1 to OBSERVER_MAX.
* Processors in this range get to see the packet last, after ADVISORS and DIRECTORS.
*
* @param priority priority within the OBSERVER range
* @return overall priority
*/
static int observer(int priority) {
int overallPriority = DIRECTOR_MAX + priority + 1;
checkArgument(overallPriority > DIRECTOR_MAX,
"Priority not within OBSERVER range");
return overallPriority;
}
/**
* Processes the inbound packet as specified in the given context.
*
* @param context packet processing context
*/
void process(PacketContext context);
}
| {
"content_hash": "18548779e91ab87f09259d5474bb9dcb",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 106,
"avg_line_length": 36.17391304347826,
"alnum_prop": 0.6618589743589743,
"repo_name": "gkatsikas/onos",
"id": "577e7a86bde6a67b9486bbe8dab166f2b185d475",
"size": "3113",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "core/api/src/main/java/org/onosproject/net/packet/PacketProcessor.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "367433"
},
{
"name": "Dockerfile",
"bytes": "3187"
},
{
"name": "HTML",
"bytes": "332756"
},
{
"name": "Java",
"bytes": "38789778"
},
{
"name": "JavaScript",
"bytes": "3999775"
},
{
"name": "Jinja",
"bytes": "2272195"
},
{
"name": "Makefile",
"bytes": "1852"
},
{
"name": "P4",
"bytes": "197536"
},
{
"name": "Python",
"bytes": "489477"
},
{
"name": "SCSS",
"bytes": "3578"
},
{
"name": "Shell",
"bytes": "342726"
},
{
"name": "Starlark",
"bytes": "495306"
},
{
"name": "TypeScript",
"bytes": "959357"
}
],
"symlink_target": ""
} |
package org.kie.wb.test.rest.security;
import org.guvnor.rest.client.Space;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.kie.wb.test.rest.AccessRestTestBase;
import org.kie.wb.test.rest.User;
@RunWith(Parameterized.class)
public class SpaceAccessIntegrationTest extends AccessRestTestBase {
public SpaceAccessIntegrationTest(User user) {
super(user);
}
@Test
public void testCreateSpace() {
Space orgUnit = new Space();
orgUnit.setName("createSpaceWith" + user.getUserName());
orgUnit.setOwner(USER_ID);
assertOperation(() -> roleClient.createSpace(orgUnit));
}
@Test
public void testDeleteSpace() {
String name = "deleteSpaceWith" + user.getUserName();
createSpace(name);
assertOperation(() -> roleClient.deleteSpace(name));
}
@Test
public void testGetSpace() {
String name = "getSpaceWith" + user.getUserName();
createSpace(name);
assertOperation(() -> roleClient.getSpace(name));
}
@Test
public void testGetSpaces() {
assertOperation(roleClient::getSpaces);
}
}
| {
"content_hash": "9d671317c73f670950a1ebf46b7c110d",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 68,
"avg_line_length": 24.3265306122449,
"alnum_prop": 0.6703020134228188,
"repo_name": "jomarko/kie-wb-distributions",
"id": "c1fafb149678ecbb303cd8cbe5dc065c8a3e3f6b",
"size": "1813",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "business-central-tests/business-central-tests-rest/src/test/java/org/kie/wb/test/rest/security/SpaceAccessIntegrationTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "2917"
},
{
"name": "CSS",
"bytes": "16549"
},
{
"name": "HTML",
"bytes": "17193"
},
{
"name": "Java",
"bytes": "396525"
},
{
"name": "JavaScript",
"bytes": "3911"
},
{
"name": "Shell",
"bytes": "2593"
},
{
"name": "XSLT",
"bytes": "1766"
}
],
"symlink_target": ""
} |
'use strict';
var webdriver = require('selenium-webdriver');
describe('docs.angularjs.org', function() {
beforeEach(function() {
// read and clear logs from previous tests
browser.manage().logs().get('browser');
});
afterEach(function() {
// verify that there were no console errors in the browser
browser.manage().logs().get('browser').then(function(browserLog) {
var filteredLog = browserLog.filter(function(logEntry) {
return logEntry.level.value > webdriver.logging.Level.WARNING.value;
});
expect(filteredLog.length).toEqual(0);
if (filteredLog.length) {
console.log('browser console errors: ' + require('util').inspect(filteredLog));
}
});
});
describe('App', function() {
// it('should filter the module list when searching', function () {
// browser.get();
// browser.waitForAngular();
// var search = element(by.model('q'));
// search.clear();
// search.sendKeys('ngBind');
// var firstModule = element(by.css('.search-results a'));
// expect(firstModule.getText()).toEqual('ngBind');
// });
it('should change the page content when clicking a link to a service', function() {
browser.get('build/docs/index-production.html');
var ngBindLink = element(by.css('.definition-table td a[href="api/ng/directive/ngClick"]'));
ngBindLink.click();
var pageBody = element(by.css('h1'));
expect(pageBody.getText()).toEqual('ngClick');
});
it('should be resilient to trailing slashes', function() {
browser.get('build/docs/index-production.html#!/api/ng/function/angular.noop/');
var pageBody = element(by.css('h1'));
expect(pageBody.getText()).toEqual('angular.noop');
});
it('should be resilient to trailing "index"', function() {
browser.get('build/docs/index-production.html#!/api/ng/function/angular.noop/index');
var pageBody = element(by.css('h1'));
expect(pageBody.getText()).toEqual('angular.noop');
});
it('should be resilient to trailing "index/"', function() {
browser.get('build/docs/index-production.html#!/api/ng/function/angular.noop/index/');
var pageBody = element(by.css('h1'));
expect(pageBody.getText()).toEqual('angular.noop');
});
it('should display formatted error messages on error doc pages', function() {
browser.get('build/docs/index-production.html#!error/ng/areq?p0=Missing&p1=not%20a%20function,%20got%20undefined');
expect(element(by.css('.minerr-errmsg')).getText()).toEqual('Argument \'Missing\' is not a function, got undefined');
});
it('should display an error if the page does not exist', function() {
browser.get('build/docs/index-production.html#!/api/does/not/exist');
expect(element(by.css('h1')).getText()).toBe('Oops!');
});
});
});
| {
"content_hash": "ba7901b32b9894860fbae064d28c1cb1",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 123,
"avg_line_length": 33.51162790697674,
"alnum_prop": 0.6401804302567662,
"repo_name": "kylewuolle/angular.js",
"id": "4bacb00de97ac302fcfd1284477ea08593b58c1d",
"size": "2882",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "docs/app/e2e/app.scenario.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3738"
},
{
"name": "HTML",
"bytes": "81623"
},
{
"name": "JavaScript",
"bytes": "6615100"
},
{
"name": "PHP",
"bytes": "7222"
},
{
"name": "PostScript",
"bytes": "19551"
},
{
"name": "Shell",
"bytes": "19994"
}
],
"symlink_target": ""
} |
<?xml version="1.0"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android" package="com.google.firebase.auth.module">
<uses-sdk android:minSdkVersion="9"/>
</manifest>
| {
"content_hash": "3f2b43dc53e1462e3e38333dc87a1bdc",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 111,
"avg_line_length": 37.8,
"alnum_prop": 0.7195767195767195,
"repo_name": "lucasvss/CallRangers",
"id": "246f1cf64fa40f326ddab8c6906b4d5ce859bbd4",
"size": "189",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "app/build/intermediates/exploded-aar/com.google.firebase/firebase-auth-module/9.8.0/AndroidManifest.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1254642"
}
],
"symlink_target": ""
} |
void xftpclient::SuperControl::Get (Tokenizer & optns)
{
using std::tr1::shared_ptr;
using namespace std;
using namespace xftp;
using namespace boost::lambda;
auto_ptr<string> path;
// Select first nonempty string as name of file to be retrieved
Tokenizer::iterator i =
find_if (optns.begin (), optns.end (),
!bind (&string::empty, boost::lambda::_1));
if (i!=optns.end ())
path.reset (new string (*i));
else
// return if no file
XFTP_DIE ("No filename specified", return);
// open file for writing (this will be copy of remote file) and create it as
// empty one
FILE * f = fopen (path->c_str (), "w");
if (!f)
XFTP_DIE ("Could not open file for writing "<< *path
<< stringError (errno), return);
fclose (f);
// create data thread for single data transfer link (multiple links are
// not yet supported
DataThread* dt=AssureDT (single_data );
// XML transaction STEP 1:
// request to open remote file (client:open XML command is analogous to
// UNIX open routine)
// 1 a) : setup message
client::MsgList * list_request = new client::MsgList;
list_request->SetRecurse (0);
list_request->SetDir (*path);
xmlStream.PostMessage (shared_ptr<Message>(list_request), false);
client::MsgOpen * open = new client::MsgOpen;
open->SetFilename (*path);
// 1 b) post message
xmlStream.PostMessage (shared_ptr<Message>(open), true);
// STEP 2: verify, if server allows us to operate specific file
shared_ptr<Message> Result ( xmlStream.GetMessage ());
XFTP_ASSERT (Result, "file does not exist", xmlStream.GetMessage ();return);
int file_size =
dynamic_cast<server::MsgList * > ( Result.get () ) -> GetNode ()
.GetMetadata ().filesize;
// If OK, set up data transmission
SuperData& sd = dynamic_cast<SuperData&>(*dt);
sd.ExecuteCommand (SuperData::Get, *path, file_size);
// STEP 3: order server to post whole file (client:read is - as above -
// simmilar to UNIX read routine)
client::MsgRead * read = new client::MsgRead;
read->SetSize (file_size);
read->SetStreamID (single_data);
Result= xmlStream.GetMessage ();
XFTP_ASSERT (Result, "failed to open file", return);
// 3 b) Post READ and CLOSE message
xmlStream.PostMessage (shared_ptr<Message>(read), false);
xmlStream.PostMessage (shared_ptr<Message>(
new client::MsgClose), true);
/// server is implemented in way, that allows it to cope with situations,
/// where client doesn't close file, but server is not required to support
/// such client any longer.
// Assure, thar both read and close return success.
Result=xmlStream.GetMessage ();
XFTP_ASSERT (Result, "read error", xmlStream.GetMessage ();return);
Result=xmlStream.GetMessage ();
XFTP_ASSERT (Result, "close error", return);
}
| {
"content_hash": "ad6c2c569640e463ae2bd3d24099e39f",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 77,
"avg_line_length": 34.63291139240506,
"alnum_prop": 0.6948099415204678,
"repo_name": "maciek-27/Rgp",
"id": "f2dcf348d3f566416edb39a5726dcd276db9879f",
"size": "2981",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "extra/xftp/xftp-client/supercontrol_get.c++",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "16809"
},
{
"name": "C++",
"bytes": "851138"
},
{
"name": "CMake",
"bytes": "22751"
},
{
"name": "Makefile",
"bytes": "122230"
},
{
"name": "Perl",
"bytes": "2239"
},
{
"name": "Shell",
"bytes": "192"
},
{
"name": "TeX",
"bytes": "3205"
}
],
"symlink_target": ""
} |
using namespace std;
using namespace boost;
using namespace boost::assign;
using namespace json_spirit;
void ScriptPubKeyToJSON(const CScript& scriptPubKey, Object& out, bool fIncludeHex)
{
txnouttype type;
vector<CTxDestination> addresses;
int nRequired;
out.push_back(Pair("asm", scriptPubKey.ToString()));
if (fIncludeHex)
out.push_back(Pair("hex", HexStr(scriptPubKey.begin(), scriptPubKey.end())));
if (!ExtractDestinations(scriptPubKey, type, addresses, nRequired))
{
out.push_back(Pair("type", GetTxnOutputType(type)));
return;
}
out.push_back(Pair("reqSigs", nRequired));
out.push_back(Pair("type", GetTxnOutputType(type)));
Array a;
BOOST_FOREACH(const CTxDestination& addr, addresses)
a.push_back(CBitcoinAddress(addr).ToString());
out.push_back(Pair("addresses", a));
}
void TxToJSON(const CTransaction& tx, const uint256 hashBlock, Object& entry)
{
entry.push_back(Pair("txid", tx.GetHash().GetHex()));
entry.push_back(Pair("version", tx.nVersion));
entry.push_back(Pair("time", (int64_t)tx.nTime));
entry.push_back(Pair("locktime", (int64_t)tx.nLockTime));
Array vin;
BOOST_FOREACH(const CTxIn& txin, tx.vin)
{
Object in;
if (tx.IsCoinBase())
in.push_back(Pair("coinbase", HexStr(txin.scriptSig.begin(), txin.scriptSig.end())));
else
{
in.push_back(Pair("txid", txin.prevout.hash.GetHex()));
in.push_back(Pair("vout", (int64_t)txin.prevout.n));
Object o;
o.push_back(Pair("asm", txin.scriptSig.ToString()));
o.push_back(Pair("hex", HexStr(txin.scriptSig.begin(), txin.scriptSig.end())));
in.push_back(Pair("scriptSig", o));
}
in.push_back(Pair("sequence", (int64_t)txin.nSequence));
vin.push_back(in);
}
entry.push_back(Pair("vin", vin));
Array vout;
for (unsigned int i = 0; i < tx.vout.size(); i++)
{
const CTxOut& txout = tx.vout[i];
Object out;
out.push_back(Pair("value", ValueFromAmount(txout.nValue)));
out.push_back(Pair("n", (int64_t)i));
Object o;
ScriptPubKeyToJSON(txout.scriptPubKey, o, false);
out.push_back(Pair("scriptPubKey", o));
vout.push_back(out);
}
entry.push_back(Pair("vout", vout));
if (hashBlock != 0)
{
entry.push_back(Pair("blockhash", hashBlock.GetHex()));
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock);
if (mi != mapBlockIndex.end() && (*mi).second)
{
CBlockIndex* pindex = (*mi).second;
if (pindex->IsInMainChain())
{
entry.push_back(Pair("confirmations", 1 + nBestHeight - pindex->nHeight));
entry.push_back(Pair("time", (int64_t)pindex->nTime));
entry.push_back(Pair("blocktime", (int64_t)pindex->nTime));
}
else
entry.push_back(Pair("confirmations", 0));
}
}
}
Value getrawtransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"getrawtransaction <txid> [verbose=0]\n"
"If verbose=0, returns a string that is\n"
"serialized, hex-encoded data for <txid>.\n"
"If verbose is non-zero, returns an Object\n"
"with information about <txid>.");
uint256 hash;
hash.SetHex(params[0].get_str());
bool fVerbose = false;
if (params.size() > 1)
fVerbose = (params[1].get_int() != 0);
CTransaction tx;
uint256 hashBlock = 0;
if (!GetTransaction(hash, tx, hashBlock))
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "No information available about transaction");
CDataStream ssTx(SER_NETWORK, PROTOCOL_VERSION);
ssTx << tx;
string strHex = HexStr(ssTx.begin(), ssTx.end());
if (!fVerbose)
return strHex;
Object result;
result.push_back(Pair("hex", strHex));
TxToJSON(tx, hashBlock, result);
return result;
}
Value listunspent(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 3)
throw runtime_error(
"listunspent [minconf=1] [maxconf=9999999] [\"address\",...]\n"
"Returns array of unspent transaction outputs\n"
"with between minconf and maxconf (inclusive) confirmations.\n"
"Optionally filtered to only include txouts paid to specified addresses.\n"
"Results are an array of Objects, each of which has:\n"
"{txid, vout, scriptPubKey, amount, confirmations}");
RPCTypeCheck(params, list_of(int_type)(int_type)(array_type));
int nMinDepth = 1;
if (params.size() > 0)
nMinDepth = params[0].get_int();
int nMaxDepth = 9999999;
if (params.size() > 1)
nMaxDepth = params[1].get_int();
set<CBitcoinAddress> setAddress;
if (params.size() > 2)
{
Array inputs = params[2].get_array();
BOOST_FOREACH(Value& input, inputs)
{
CBitcoinAddress address(input.get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("Invalid ELcoin address: ")+input.get_str());
if (setAddress.count(address))
throw JSONRPCError(RPC_INVALID_PARAMETER, string("Invalid parameter, duplicated address: ")+input.get_str());
setAddress.insert(address);
}
}
Array results;
vector<COutput> vecOutputs;
pwalletMain->AvailableCoins(vecOutputs, false);
BOOST_FOREACH(const COutput& out, vecOutputs)
{
if (out.nDepth < nMinDepth || out.nDepth > nMaxDepth)
continue;
if(setAddress.size())
{
CTxDestination address;
if(!ExtractDestination(out.tx->vout[out.i].scriptPubKey, address))
continue;
if (!setAddress.count(address))
continue;
}
int64_t nValue = out.tx->vout[out.i].nValue;
const CScript& pk = out.tx->vout[out.i].scriptPubKey;
Object entry;
entry.push_back(Pair("txid", out.tx->GetHash().GetHex()));
entry.push_back(Pair("vout", out.i));
CTxDestination address;
if (ExtractDestination(out.tx->vout[out.i].scriptPubKey, address))
{
entry.push_back(Pair("address", CBitcoinAddress(address).ToString()));
if (pwalletMain->mapAddressBook.count(address))
entry.push_back(Pair("account", pwalletMain->mapAddressBook[address]));
}
entry.push_back(Pair("scriptPubKey", HexStr(pk.begin(), pk.end())));
entry.push_back(Pair("amount",ValueFromAmount(nValue)));
entry.push_back(Pair("confirmations",out.nDepth));
results.push_back(entry);
}
return results;
}
Value createrawtransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 2)
throw runtime_error(
"createrawtransaction [{\"txid\":txid,\"vout\":n},...] {address:amount,...}\n"
"Create a transaction spending given inputs\n"
"(array of objects containing transaction id and output number),\n"
"sending to given address(es).\n"
"Returns hex-encoded raw transaction.\n"
"Note that the transaction's inputs are not signed, and\n"
"it is not stored in the wallet or transmitted to the network.");
RPCTypeCheck(params, list_of(array_type)(obj_type));
Array inputs = params[0].get_array();
Object sendTo = params[1].get_obj();
CTransaction rawTx;
BOOST_FOREACH(Value& input, inputs)
{
const Object& o = input.get_obj();
const Value& txid_v = find_value(o, "txid");
if (txid_v.type() != str_type)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, missing txid key");
string txid = txid_v.get_str();
if (!IsHex(txid))
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, expected hex txid");
const Value& vout_v = find_value(o, "vout");
if (vout_v.type() != int_type)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, missing vout key");
int nOutput = vout_v.get_int();
if (nOutput < 0)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, vout must be positive");
CTxIn in(COutPoint(uint256(txid), nOutput));
rawTx.vin.push_back(in);
}
set<CBitcoinAddress> setAddress;
BOOST_FOREACH(const Pair& s, sendTo)
{
CBitcoinAddress address(s.name_);
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("Invalid ELcoin address: ")+s.name_);
if (setAddress.count(address))
throw JSONRPCError(RPC_INVALID_PARAMETER, string("Invalid parameter, duplicated address: ")+s.name_);
setAddress.insert(address);
CScript scriptPubKey;
scriptPubKey.SetDestination(address.Get());
int64_t nAmount = AmountFromValue(s.value_);
CTxOut out(nAmount, scriptPubKey);
rawTx.vout.push_back(out);
}
CDataStream ss(SER_NETWORK, PROTOCOL_VERSION);
ss << rawTx;
return HexStr(ss.begin(), ss.end());
}
Value decoderawtransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"decoderawtransaction <hex string>\n"
"Return a JSON object representing the serialized, hex-encoded transaction.");
RPCTypeCheck(params, list_of(str_type));
vector<unsigned char> txData(ParseHex(params[0].get_str()));
CDataStream ssData(txData, SER_NETWORK, PROTOCOL_VERSION);
CTransaction tx;
try {
ssData >> tx;
}
catch (std::exception &e) {
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed");
}
Object result;
TxToJSON(tx, 0, result);
return result;
}
Value decodescript(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"decodescript <hex string>\n"
"Decode a hex-encoded script.");
RPCTypeCheck(params, list_of(str_type));
Object r;
CScript script;
if (params[0].get_str().size() > 0){
vector<unsigned char> scriptData(ParseHexV(params[0], "argument"));
script = CScript(scriptData.begin(), scriptData.end());
} else {
// Empty scripts are valid
}
ScriptPubKeyToJSON(script, r, false);
r.push_back(Pair("p2sh", CBitcoinAddress(script.GetID()).ToString()));
return r;
}
Value signrawtransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 4)
throw runtime_error(
"signrawtransaction <hex string> [{\"txid\":txid,\"vout\":n,\"scriptPubKey\":hex},...] [<privatekey1>,...] [sighashtype=\"ALL\"]\n"
"Sign inputs for raw transaction (serialized, hex-encoded).\n"
"Second optional argument (may be null) is an array of previous transaction outputs that\n"
"this transaction depends on but may not yet be in the blockchain.\n"
"Third optional argument (may be null) is an array of base58-encoded private\n"
"keys that, if given, will be the only keys used to sign the transaction.\n"
"Fourth optional argument is a string that is one of six values; ALL, NONE, SINGLE or\n"
"ALL|ANYONECANPAY, NONE|ANYONECANPAY, SINGLE|ANYONECANPAY.\n"
"Returns json object with keys:\n"
" hex : raw transaction with signature(s) (hex-encoded string)\n"
" complete : 1 if transaction has a complete set of signature (0 if not)"
+ HelpRequiringPassphrase());
RPCTypeCheck(params, list_of(str_type)(array_type)(array_type)(str_type), true);
vector<unsigned char> txData(ParseHex(params[0].get_str()));
CDataStream ssData(txData, SER_NETWORK, PROTOCOL_VERSION);
vector<CTransaction> txVariants;
while (!ssData.empty())
{
try {
CTransaction tx;
ssData >> tx;
txVariants.push_back(tx);
}
catch (std::exception &e) {
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed");
}
}
if (txVariants.empty())
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "Missing transaction");
// mergedTx will end up with all the signatures; it
// starts as a clone of the rawtx:
CTransaction mergedTx(txVariants[0]);
bool fComplete = true;
// Fetch previous transactions (inputs):
map<COutPoint, CScript> mapPrevOut;
for (unsigned int i = 0; i < mergedTx.vin.size(); i++)
{
CTransaction tempTx;
MapPrevTx mapPrevTx;
CTxDB txdb("r");
map<uint256, CTxIndex> unused;
bool fInvalid;
// FetchInputs aborts on failure, so we go one at a time.
tempTx.vin.push_back(mergedTx.vin[i]);
tempTx.FetchInputs(txdb, unused, false, false, mapPrevTx, fInvalid);
// Copy results into mapPrevOut:
BOOST_FOREACH(const CTxIn& txin, tempTx.vin)
{
const uint256& prevHash = txin.prevout.hash;
if (mapPrevTx.count(prevHash) && mapPrevTx[prevHash].second.vout.size()>txin.prevout.n)
mapPrevOut[txin.prevout] = mapPrevTx[prevHash].second.vout[txin.prevout.n].scriptPubKey;
}
}
// Add previous txouts given in the RPC call:
if (params.size() > 1 && params[1].type() != null_type)
{
Array prevTxs = params[1].get_array();
BOOST_FOREACH(Value& p, prevTxs)
{
if (p.type() != obj_type)
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "expected object with {\"txid'\",\"vout\",\"scriptPubKey\"}");
Object prevOut = p.get_obj();
RPCTypeCheck(prevOut, map_list_of("txid", str_type)("vout", int_type)("scriptPubKey", str_type));
string txidHex = find_value(prevOut, "txid").get_str();
if (!IsHex(txidHex))
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "txid must be hexadecimal");
uint256 txid;
txid.SetHex(txidHex);
int nOut = find_value(prevOut, "vout").get_int();
if (nOut < 0)
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "vout must be positive");
string pkHex = find_value(prevOut, "scriptPubKey").get_str();
if (!IsHex(pkHex))
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "scriptPubKey must be hexadecimal");
vector<unsigned char> pkData(ParseHex(pkHex));
CScript scriptPubKey(pkData.begin(), pkData.end());
COutPoint outpoint(txid, nOut);
if (mapPrevOut.count(outpoint))
{
// Complain if scriptPubKey doesn't match
if (mapPrevOut[outpoint] != scriptPubKey)
{
string err("Previous output scriptPubKey mismatch:\n");
err = err + mapPrevOut[outpoint].ToString() + "\nvs:\n"+
scriptPubKey.ToString();
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, err);
}
}
else
mapPrevOut[outpoint] = scriptPubKey;
}
}
bool fGivenKeys = false;
CBasicKeyStore tempKeystore;
if (params.size() > 2 && params[2].type() != null_type)
{
fGivenKeys = true;
Array keys = params[2].get_array();
BOOST_FOREACH(Value k, keys)
{
CBitcoinSecret vchSecret;
bool fGood = vchSecret.SetString(k.get_str());
if (!fGood)
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,"Invalid private key");
CKey key;
bool fCompressed;
CSecret secret = vchSecret.GetSecret(fCompressed);
key.SetSecret(secret, fCompressed);
tempKeystore.AddKey(key);
}
}
else
EnsureWalletIsUnlocked();
const CKeyStore& keystore = (fGivenKeys ? tempKeystore : *pwalletMain);
int nHashType = SIGHASH_ALL;
if (params.size() > 3 && params[3].type() != null_type)
{
static map<string, int> mapSigHashValues =
boost::assign::map_list_of
(string("ALL"), int(SIGHASH_ALL))
(string("ALL|ANYONECANPAY"), int(SIGHASH_ALL|SIGHASH_ANYONECANPAY))
(string("NONE"), int(SIGHASH_NONE))
(string("NONE|ANYONECANPAY"), int(SIGHASH_NONE|SIGHASH_ANYONECANPAY))
(string("SINGLE"), int(SIGHASH_SINGLE))
(string("SINGLE|ANYONECANPAY"), int(SIGHASH_SINGLE|SIGHASH_ANYONECANPAY))
;
string strHashType = params[3].get_str();
if (mapSigHashValues.count(strHashType))
nHashType = mapSigHashValues[strHashType];
else
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid sighash param");
}
bool fHashSingle = ((nHashType & ~SIGHASH_ANYONECANPAY) == SIGHASH_SINGLE);
// Sign what we can:
for (unsigned int i = 0; i < mergedTx.vin.size(); i++)
{
CTxIn& txin = mergedTx.vin[i];
if (mapPrevOut.count(txin.prevout) == 0)
{
fComplete = false;
continue;
}
const CScript& prevPubKey = mapPrevOut[txin.prevout];
txin.scriptSig.clear();
// Only sign SIGHASH_SINGLE if there's a corresponding output:
if (!fHashSingle || (i < mergedTx.vout.size()))
SignSignature(keystore, prevPubKey, mergedTx, i, nHashType);
// ... and merge in other signatures:
BOOST_FOREACH(const CTransaction& txv, txVariants)
{
txin.scriptSig = CombineSignatures(prevPubKey, mergedTx, i, txin.scriptSig, txv.vin[i].scriptSig);
}
if (!VerifyScript(txin.scriptSig, prevPubKey, mergedTx, i, 0))
fComplete = false;
}
Object result;
CDataStream ssTx(SER_NETWORK, PROTOCOL_VERSION);
ssTx << mergedTx;
result.push_back(Pair("hex", HexStr(ssTx.begin(), ssTx.end())));
result.push_back(Pair("complete", fComplete));
return result;
}
Value sendrawtransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 1)
throw runtime_error(
"sendrawtransaction <hex string>\n"
"Submits raw transaction (serialized, hex-encoded) to local node and network.");
RPCTypeCheck(params, list_of(str_type));
// parse hex string from parameter
vector<unsigned char> txData(ParseHex(params[0].get_str()));
CDataStream ssData(txData, SER_NETWORK, PROTOCOL_VERSION);
CTransaction tx;
// deserialize binary data stream
try {
ssData >> tx;
}
catch (std::exception &e) {
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed");
}
uint256 hashTx = tx.GetHash();
// See if the transaction is already in a block
// or in the memory pool:
CTransaction existingTx;
uint256 hashBlock = 0;
if (GetTransaction(hashTx, existingTx, hashBlock))
{
if (hashBlock != 0)
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("transaction already in block ")+hashBlock.GetHex());
// Not in block, but already in the memory pool; will drop
// through to re-relay it.
}
else
{
// push to local node
if (!AcceptToMemoryPool(mempool, tx, NULL))
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX rejected");
SyncWithWallets(tx, NULL, true);
}
RelayTransaction(tx, hashTx);
return hashTx.GetHex();
}
| {
"content_hash": "c8bfc3ee6bb8d5be36fe21f5dca8d84c",
"timestamp": "",
"source": "github",
"line_count": 548,
"max_line_length": 143,
"avg_line_length": 36.13503649635037,
"alnum_prop": 0.6016058983941016,
"repo_name": "elco-coin/elcoin-source",
"id": "c6da401c60cec947569303959169f552947a3108",
"size": "20208",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/rpcrawtransaction.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "51312"
},
{
"name": "C",
"bytes": "35897"
},
{
"name": "C++",
"bytes": "2582441"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Groff",
"bytes": "12684"
},
{
"name": "HTML",
"bytes": "50620"
},
{
"name": "Makefile",
"bytes": "13025"
},
{
"name": "NSIS",
"bytes": "5914"
},
{
"name": "Objective-C",
"bytes": "858"
},
{
"name": "Objective-C++",
"bytes": "3537"
},
{
"name": "Python",
"bytes": "41580"
},
{
"name": "QMake",
"bytes": "13966"
},
{
"name": "Shell",
"bytes": "9083"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:drawable="@drawable/g_green_btn_pressed" android:state_pressed="true"></item>
<item android:drawable="@drawable/register_right_top_btn_disable" android:state_enabled="false"></item>
<item android:drawable="@drawable/register_right_top_btn_normal"></item>
</selector> | {
"content_hash": "42526b6ad614cb7ef28a48983c610cd3",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 107,
"avg_line_length": 51.25,
"alnum_prop": 0.7146341463414634,
"repo_name": "leiming19877/nim_demo",
"id": "ea943e647e25e84e85102e7c89462c03f0fb22b1",
"size": "410",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demo/res/drawable/register_right_top_btn_selector.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "1297094"
}
],
"symlink_target": ""
} |
<!--
Copyright 2005-2014 The Kuali Foundation
Licensed under the Educational Community License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.opensource.org/licenses/ecl2.php
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<config>
<param name="krad.url" override="false">${application.url}/kr-krad</param>
<param name="krad.lookup.url" override="false">${krad.url}/lookup</param>
<param name="krad.inquiry.url" override="false">${krad.url}/inquiry</param>
<param name="rice.server.krad.url" override="false">${rice.server.url}/kr-krad</param>
<param name="rice.server.krad.lookup.url" override="false">${rice.server.krad.url}/lookup</param>
<param name="krad.externalizable.images.url" override="false">${application.url}/krad/images/</param>
<!-- Indicates whether KRAD should run in dev mode, impacts things like using full script/css files instead of minified -->
<param name="rice.krad.dev.mode" override="false">false</param>
<!-- Indicates whether Less files should be included instead of CSS files. Only applies if rice.krad.dev.mode is true -->
<param name="rice.krad.include.less" override="false">false</param>
<!-- View Lifecycle debug level settings. These may be used for troubleshooting in development,
but should be disabled for production to avoid performance issues and assertion errors -->
<param name="rice.krad.lifecycle.strict" override="false">false</param>
<param name="rice.krad.lifecycle.trace" override="false">false</param>
<!-- KRAD view lifecycle asynchronous mode settings. When enabled, all view lifecycle processing
will be broken up into multiple threads. This may improve performance for systems with multiple
cores. -->
<param name="rice.krad.lifecycle.asynchronous" override="false">false</param>
<!-- Minimum number of lifecycle worker threads to maintain in the pool -->
<param name="rice.krad.lifecycle.asynchronous.minThreads" override="false">4</param>
<!-- Maximum number of lifecycle worker threads to maintain in the pool -->
<param name="rice.krad.lifecycle.asynchronous.maxThreads" override="false">48</param>
<!-- Time, in milliseconds, to wait for view lifecycle to complete before interrupting -->
<param name="rice.krad.lifecycle.asynchronous.timeout" override="false">30000</param>
<!-- Indicates whether or not KRAD will perform incremental FreeMarker rendering within the view
lifecycle, or if all rendering will be deferred for the Spring MVC view phase. Depending on the
components involved in the view, this may improve performance, particularly when running with the
view lifecycle in asynchronous mode -->
<param name="rice.krad.lifecycle.render" override="false">false</param>
<!-- Indicates whether the hidden script inputs should be removed from the html after executing -->
<param name="rice.krad.script.cleanup" override="false">true</param>
<param name="attachments.directory" override="false">${java.io.tmpdir}/${environment}/attachments</param>
<param name="attachments.pending.directory" override="false">${attachments.directory}/pending</param>
<!-- Reloading Dictionary Config -->
<param name="reload.data.dictionary.classes.dir" override="false">target/classes</param>
<param name="reload.data.dictionary.source.dir" override="false">src/main/resources</param>
<param name="reload.data.dictionary.interval" override="false">3000</param>
<param name="load.data.dictionary" override="false">true</param>=
<param name="validate.data.dictionary" override="false">true</param>
<param name="validate.data.dictionary.ebo.references" override="false">true</param>
<param name="validate.views.onbuild" override="false">false</param>
<!-- indicates whether messages from an external repository should bea loaded for data dictionary beans. Enable if
external messages are being used for dictionary text. Disable if external messages are not, or to improve startup
time in development -->
<param name="load.dictionary.external.messages" override="false">true</param>
<param name="maxNumberOfSessionForms" override="false">10</param>
<!-- list of resource bundle names (full qualified classnames) for application messages (default namespace
of KUALI), for messages associated with a namespace use the resourceBundleName configuration on the
corresponding ModuleDefinition -->
<param name="resourceBundleNames" override="false">org.kuali.rice.krad.KRADApplicationResources,org.kuali.rice.krad.ApplicationResources,org.kuali.rice.kew.ApplicationResources,org.kuali.rice.krms.ApplicationResources,org.kuali.rice.core.web.cache.CacheApplicationResources</param>
<param name="krad.ehcache.config.location" override="false">classpath:org/kuali/rice/krad/config/krad.ehcache.xml</param>
<param name="rice.krad.componentPublishing.enabled" override="false">false</param>
<param name="rice.krad.componentPublishing.delay" override="false">15000</param>
<!-- validation patterns -->
<param name="validationPatternRegex.date" override="false">(19|2[0-9])[0-9]{2}\-(0?[1-9]|1[012])\-(0?[1-9]|[12][0-9]|3[01])</param>
<param name="validationPatternRegex.noWhitespace" override="false">\\S+</param>
<param name="validationPatternRegex.time12" override="false">(1[0-2]|0?[1-9]):([0-5][0-9])(:[0-5][0-9])?</param>
<param name="validationPatternRegex.time24" override="false">(2[0-3]|1[0-9]|0?[0-9]):([0-5][0-9])(:[0-5][0-9])?</param>
<param name="validationPatternRegex.url" override="false">(http|https|ftp)\://[a-zA-Z0-9\-\.]+\.[a-zA-Z]{2,3}(:[a-zA-Z0-9]*)?/?([a-zA-Z0-9\-\._\?\,\'/\\\+&%\$#\=~])*</param>
<param name="validationPatternRegex.timestamp" override="false">(19|2[0-9])[0-9]{2}\-(0?[1-9]|1[012])\-(0?[1-9]|[12][0-9]|3[01])\\s+[0-9]{2}:[0-9]{2}:[0-9]{2}\\.[0-9]+</param>
<param name="validationPatternRegex.emailAddress" override="false">[A-Za-z0-9]+([_.\-][A-Za-z0-9]+)*@([A-Za-z0-9][A-Za-z0-9_-]*\\.)+[A-Za-z]{2,}</param>
<param name="validationPatternRegex.floatingPoint" override="false">([0-9]*\\.)?[0-9]+</param>
<param name="validationPatternRegex.bigDecimal" override="false">([0-9,]*\\.)?[0-9]+</param>
<param name="validationPatternRegex.javaClass" override="false">[A-Za-z_$][_$a-zA-Z0-9]*(\\.[_$a-zA-Z0-9]+)*</param>
<param name="validationPatternRegex.month" override="false">(0?[1-9]|1[012])</param>
<param name="validationPatternRegex.year" override="false">(16|17|18|19|20|21)[0-9]{2}</param>
<param name="validationPatternRegex.zipcode" override="false">[0-9]{5}(\-[0-9]{4})?</param>
<param name="validationPatternRegex.phoneNumber" override="false">[0-9]{3}\-[0-9]{3}\-[0-9]{4}</param>
<!-- packages to scan for finding component classes that support the custom schema -->
<param name="rice.krad.schema.packages" override="false">org.kuali.rice.krad.uif,org.kuali.rice.krad.datadictionary,org.kuali.rice.krad.lookup</param>
<param name="rice.krad.views.autoGeneration.enabled" override="false">false</param>
</config> | {
"content_hash": "e5c4aaa4f18440144156570c646cad46",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 283,
"avg_line_length": 68.6788990825688,
"alnum_prop": 0.7135987176061982,
"repo_name": "mztaylor/rice-git",
"id": "8900414be246f4143c2dec58c55103ec23d73511",
"size": "7486",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "rice-framework/krad-web-framework/src/main/resources/META-INF/framework-config-defaults.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "795267"
},
{
"name": "Groovy",
"bytes": "2170621"
},
{
"name": "Java",
"bytes": "34571234"
},
{
"name": "JavaScript",
"bytes": "2652150"
},
{
"name": "PHP",
"bytes": "15766"
},
{
"name": "Shell",
"bytes": "10444"
},
{
"name": "XSLT",
"bytes": "107686"
}
],
"symlink_target": ""
} |
using pb = global::Google.ProtocolBuffers;
using pbc = global::Google.ProtocolBuffers.Collections;
using pbd = global::Google.ProtocolBuffers.Descriptors;
using scg = global::System.Collections.Generic;
namespace Google.ProtocolBuffers.TestProtos {
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public static partial class UnitTestXmlSerializerTestProtoFile {
#region Extension registration
public static void RegisterAllExtensions(pb::ExtensionRegistry registry) {
registry.Add(global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.ExtensionEnum);
registry.Add(global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.ExtensionText);
registry.Add(global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.ExtensionNumber);
registry.Add(global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.ExtensionMessage);
}
#endregion
#region Extensions
public const int ExtensionEnumFieldNumber = 101;
public static pb::GeneratedExtensionBase<global::Google.ProtocolBuffers.TestProtos.EnumOptions> ExtensionEnum;
public const int ExtensionTextFieldNumber = 102;
public static pb::GeneratedExtensionBase<string> ExtensionText;
public const int ExtensionNumberFieldNumber = 103;
public static pb::GeneratedExtensionBase<scg::IList<int>> ExtensionNumber;
public const int ExtensionMessageFieldNumber = 199;
public static pb::GeneratedExtensionBase<global::Google.ProtocolBuffers.TestProtos.TestXmlExtension> ExtensionMessage;
#endregion
#region Static variables
internal static pbd::MessageDescriptor internal__static_protobuf_unittest_extra_TestXmlChild__Descriptor;
internal static pb::FieldAccess.FieldAccessorTable<global::Google.ProtocolBuffers.TestProtos.TestXmlChild, global::Google.ProtocolBuffers.TestProtos.TestXmlChild.Builder> internal__static_protobuf_unittest_extra_TestXmlChild__FieldAccessorTable;
internal static pbd::MessageDescriptor internal__static_protobuf_unittest_extra_TestXmlNoFields__Descriptor;
internal static pb::FieldAccess.FieldAccessorTable<global::Google.ProtocolBuffers.TestProtos.TestXmlNoFields, global::Google.ProtocolBuffers.TestProtos.TestXmlNoFields.Builder> internal__static_protobuf_unittest_extra_TestXmlNoFields__FieldAccessorTable;
internal static pbd::MessageDescriptor internal__static_protobuf_unittest_extra_TestXmlRescursive__Descriptor;
internal static pb::FieldAccess.FieldAccessorTable<global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive, global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive.Builder> internal__static_protobuf_unittest_extra_TestXmlRescursive__FieldAccessorTable;
internal static pbd::MessageDescriptor internal__static_protobuf_unittest_extra_TestXmlMessage__Descriptor;
internal static pb::FieldAccess.FieldAccessorTable<global::Google.ProtocolBuffers.TestProtos.TestXmlMessage, global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Builder> internal__static_protobuf_unittest_extra_TestXmlMessage__FieldAccessorTable;
internal static pbd::MessageDescriptor internal__static_protobuf_unittest_extra_TestXmlMessage_Children__Descriptor;
internal static pb::FieldAccess.FieldAccessorTable<global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children, global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children.Builder> internal__static_protobuf_unittest_extra_TestXmlMessage_Children__FieldAccessorTable;
internal static pbd::MessageDescriptor internal__static_protobuf_unittest_extra_TestXmlExtension__Descriptor;
internal static pb::FieldAccess.FieldAccessorTable<global::Google.ProtocolBuffers.TestProtos.TestXmlExtension, global::Google.ProtocolBuffers.TestProtos.TestXmlExtension.Builder> internal__static_protobuf_unittest_extra_TestXmlExtension__FieldAccessorTable;
#endregion
#region Descriptor
public static pbd::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbd::FileDescriptor descriptor;
static UnitTestXmlSerializerTestProtoFile() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CiRleHRlc3QvdW5pdHRlc3RfZXh0cmFzX3htbHRlc3QucHJvdG8SF3Byb3Rv",
"YnVmX3VuaXR0ZXN0X2V4dHJhGiRnb29nbGUvcHJvdG9idWYvY3NoYXJwX29w",
"dGlvbnMucHJvdG8iVQoMVGVzdFhtbENoaWxkEjUKB29wdGlvbnMYAyADKA4y",
"JC5wcm90b2J1Zl91bml0dGVzdF9leHRyYS5FbnVtT3B0aW9ucxIOCgZiaW5h",
"cnkYBCABKAwiEQoPVGVzdFhtbE5vRmllbGRzIk4KEVRlc3RYbWxSZXNjdXJz",
"aXZlEjkKBWNoaWxkGAEgASgLMioucHJvdG9idWZfdW5pdHRlc3RfZXh0cmEu",
"VGVzdFhtbFJlc2N1cnNpdmUitwIKDlRlc3RYbWxNZXNzYWdlEg4KBm51bWJl",
"chgGIAEoAxIPCgdudW1iZXJzGAIgAygFEgwKBHRleHQYAyABKAkSEgoJdGV4",
"dGxpbmVzGLwFIAMoCRINCgV2YWxpZBgFIAEoCBI0CgVjaGlsZBgBIAEoCzIl",
"LnByb3RvYnVmX3VuaXR0ZXN0X2V4dHJhLlRlc3RYbWxDaGlsZBJDCghjaGls",
"ZHJlbhiRAyADKAoyMC5wcm90b2J1Zl91bml0dGVzdF9leHRyYS5UZXN0WG1s",
"TWVzc2FnZS5DaGlsZHJlbhpRCghDaGlsZHJlbhI1CgdvcHRpb25zGAMgAygO",
"MiQucHJvdG9idWZfdW5pdHRlc3RfZXh0cmEuRW51bU9wdGlvbnMSDgoGYmlu",
"YXJ5GAQgASgMKgUIZBDIASIiChBUZXN0WG1sRXh0ZW5zaW9uEg4KBm51bWJl",
"chgBIAIoBSoqCgtFbnVtT3B0aW9ucxIHCgNPTkUQABIHCgNUV08QARIJCgVU",
"SFJFRRACOmUKDmV4dGVuc2lvbl9lbnVtEicucHJvdG9idWZfdW5pdHRlc3Rf",
"ZXh0cmEuVGVzdFhtbE1lc3NhZ2UYZSABKA4yJC5wcm90b2J1Zl91bml0dGVz",
"dF9leHRyYS5FbnVtT3B0aW9uczo/Cg5leHRlbnNpb25fdGV4dBInLnByb3Rv",
"YnVmX3VuaXR0ZXN0X2V4dHJhLlRlc3RYbWxNZXNzYWdlGGYgASgJOkUKEGV4",
"dGVuc2lvbl9udW1iZXISJy5wcm90b2J1Zl91bml0dGVzdF9leHRyYS5UZXN0",
"WG1sTWVzc2FnZRhnIAMoBUICEAE6bgoRZXh0ZW5zaW9uX21lc3NhZ2USJy5w",
"cm90b2J1Zl91bml0dGVzdF9leHRyYS5UZXN0WG1sTWVzc2FnZRjHASABKAsy",
"KS5wcm90b2J1Zl91bml0dGVzdF9leHRyYS5UZXN0WG1sRXh0ZW5zaW9uQk5I",
"AcI+SQohR29vZ2xlLlByb3RvY29sQnVmZmVycy5UZXN0UHJvdG9zEiJVbml0",
"VGVzdFhtbFNlcmlhbGl6ZXJUZXN0UHJvdG9GaWxlSAE="));
pbd::FileDescriptor.InternalDescriptorAssigner assigner = delegate(pbd::FileDescriptor root) {
descriptor = root;
internal__static_protobuf_unittest_extra_TestXmlChild__Descriptor = Descriptor.MessageTypes[0];
internal__static_protobuf_unittest_extra_TestXmlChild__FieldAccessorTable =
new pb::FieldAccess.FieldAccessorTable<global::Google.ProtocolBuffers.TestProtos.TestXmlChild, global::Google.ProtocolBuffers.TestProtos.TestXmlChild.Builder>(internal__static_protobuf_unittest_extra_TestXmlChild__Descriptor,
new string[] { "Options", "Binary", });
internal__static_protobuf_unittest_extra_TestXmlNoFields__Descriptor = Descriptor.MessageTypes[1];
internal__static_protobuf_unittest_extra_TestXmlNoFields__FieldAccessorTable =
new pb::FieldAccess.FieldAccessorTable<global::Google.ProtocolBuffers.TestProtos.TestXmlNoFields, global::Google.ProtocolBuffers.TestProtos.TestXmlNoFields.Builder>(internal__static_protobuf_unittest_extra_TestXmlNoFields__Descriptor,
new string[] { });
internal__static_protobuf_unittest_extra_TestXmlRescursive__Descriptor = Descriptor.MessageTypes[2];
internal__static_protobuf_unittest_extra_TestXmlRescursive__FieldAccessorTable =
new pb::FieldAccess.FieldAccessorTable<global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive, global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive.Builder>(internal__static_protobuf_unittest_extra_TestXmlRescursive__Descriptor,
new string[] { "Child", });
internal__static_protobuf_unittest_extra_TestXmlMessage__Descriptor = Descriptor.MessageTypes[3];
internal__static_protobuf_unittest_extra_TestXmlMessage__FieldAccessorTable =
new pb::FieldAccess.FieldAccessorTable<global::Google.ProtocolBuffers.TestProtos.TestXmlMessage, global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Builder>(internal__static_protobuf_unittest_extra_TestXmlMessage__Descriptor,
new string[] { "Number", "Numbers", "Text", "Textlines", "Valid", "Child", "Children", });
internal__static_protobuf_unittest_extra_TestXmlMessage_Children__Descriptor = internal__static_protobuf_unittest_extra_TestXmlMessage__Descriptor.NestedTypes[0];
internal__static_protobuf_unittest_extra_TestXmlMessage_Children__FieldAccessorTable =
new pb::FieldAccess.FieldAccessorTable<global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children, global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children.Builder>(internal__static_protobuf_unittest_extra_TestXmlMessage_Children__Descriptor,
new string[] { "Options", "Binary", });
internal__static_protobuf_unittest_extra_TestXmlExtension__Descriptor = Descriptor.MessageTypes[4];
internal__static_protobuf_unittest_extra_TestXmlExtension__FieldAccessorTable =
new pb::FieldAccess.FieldAccessorTable<global::Google.ProtocolBuffers.TestProtos.TestXmlExtension, global::Google.ProtocolBuffers.TestProtos.TestXmlExtension.Builder>(internal__static_protobuf_unittest_extra_TestXmlExtension__Descriptor,
new string[] { "Number", });
global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.ExtensionEnum = pb::GeneratedSingleExtension<global::Google.ProtocolBuffers.TestProtos.EnumOptions>.CreateInstance(global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.Descriptor.Extensions[0]);
global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.ExtensionText = pb::GeneratedSingleExtension<string>.CreateInstance(global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.Descriptor.Extensions[1]);
global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.ExtensionNumber = pb::GeneratedRepeatExtension<int>.CreateInstance(global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.Descriptor.Extensions[2]);
global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.ExtensionMessage = pb::GeneratedSingleExtension<global::Google.ProtocolBuffers.TestProtos.TestXmlExtension>.CreateInstance(global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.Descriptor.Extensions[3]);
pb::ExtensionRegistry registry = pb::ExtensionRegistry.CreateInstance();
RegisterAllExtensions(registry);
global::Google.ProtocolBuffers.DescriptorProtos.CSharpOptions.RegisterAllExtensions(registry);
return registry;
};
pbd::FileDescriptor.InternalBuildGeneratedFileFrom(descriptorData,
new pbd::FileDescriptor[] {
global::Google.ProtocolBuffers.DescriptorProtos.CSharpOptions.Descriptor,
}, assigner);
}
#endregion
}
#region Enums
public enum EnumOptions {
ONE = 0,
TWO = 1,
THREE = 2,
}
#endregion
#region Messages
[global::System.SerializableAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class TestXmlChild : pb::GeneratedMessage<TestXmlChild, TestXmlChild.Builder> {
private TestXmlChild() { }
private static readonly TestXmlChild defaultInstance = new TestXmlChild().MakeReadOnly();
private static readonly string[] _testXmlChildFieldNames = new string[] { "binary", "options" };
private static readonly uint[] _testXmlChildFieldTags = new uint[] { 34, 24 };
public static TestXmlChild DefaultInstance {
get { return defaultInstance; }
}
public override TestXmlChild DefaultInstanceForType {
get { return DefaultInstance; }
}
protected override TestXmlChild ThisMessage {
get { return this; }
}
public static pbd::MessageDescriptor Descriptor {
get { return global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.internal__static_protobuf_unittest_extra_TestXmlChild__Descriptor; }
}
protected override pb::FieldAccess.FieldAccessorTable<TestXmlChild, TestXmlChild.Builder> InternalFieldAccessors {
get { return global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.internal__static_protobuf_unittest_extra_TestXmlChild__FieldAccessorTable; }
}
public const int OptionsFieldNumber = 3;
private pbc::PopsicleList<global::Google.ProtocolBuffers.TestProtos.EnumOptions> options_ = new pbc::PopsicleList<global::Google.ProtocolBuffers.TestProtos.EnumOptions>();
public scg::IList<global::Google.ProtocolBuffers.TestProtos.EnumOptions> OptionsList {
get { return pbc::Lists.AsReadOnly(options_); }
}
public int OptionsCount {
get { return options_.Count; }
}
public global::Google.ProtocolBuffers.TestProtos.EnumOptions GetOptions(int index) {
return options_[index];
}
public const int BinaryFieldNumber = 4;
private bool hasBinary;
private pb::ByteString binary_ = pb::ByteString.Empty;
public bool HasBinary {
get { return hasBinary; }
}
public pb::ByteString Binary {
get { return binary_; }
}
public override bool IsInitialized {
get {
return true;
}
}
public override void WriteTo(pb::ICodedOutputStream output) {
int size = SerializedSize;
string[] field_names = _testXmlChildFieldNames;
if (options_.Count > 0) {
output.WriteEnumArray(3, field_names[1], options_);
}
if (hasBinary) {
output.WriteBytes(4, field_names[0], Binary);
}
UnknownFields.WriteTo(output);
}
private int memoizedSerializedSize = -1;
public override int SerializedSize {
get {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
if (options_.Count > 0) {
foreach (global::Google.ProtocolBuffers.TestProtos.EnumOptions element in options_) {
dataSize += pb::CodedOutputStream.ComputeEnumSizeNoTag((int) element);
}
size += dataSize;
size += 1 * options_.Count;
}
}
if (hasBinary) {
size += pb::CodedOutputStream.ComputeBytesSize(4, Binary);
}
size += UnknownFields.SerializedSize;
memoizedSerializedSize = size;
return size;
}
}
public static TestXmlChild ParseFrom(pb::ByteString data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static TestXmlChild ParseFrom(pb::ByteString data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static TestXmlChild ParseFrom(byte[] data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static TestXmlChild ParseFrom(byte[] data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static TestXmlChild ParseFrom(global::System.IO.Stream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static TestXmlChild ParseFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
public static TestXmlChild ParseDelimitedFrom(global::System.IO.Stream input) {
return CreateBuilder().MergeDelimitedFrom(input).BuildParsed();
}
public static TestXmlChild ParseDelimitedFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return CreateBuilder().MergeDelimitedFrom(input, extensionRegistry).BuildParsed();
}
public static TestXmlChild ParseFrom(pb::ICodedInputStream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static TestXmlChild ParseFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
private TestXmlChild MakeReadOnly() {
options_.MakeReadOnly();
return this;
}
public static Builder CreateBuilder() { return new Builder(); }
public override Builder ToBuilder() { return CreateBuilder(this); }
public override Builder CreateBuilderForType() { return new Builder(); }
public static Builder CreateBuilder(TestXmlChild prototype) {
return new Builder(prototype);
}
[global::System.SerializableAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class Builder : pb::GeneratedBuilder<TestXmlChild, Builder> {
protected override Builder ThisBuilder {
get { return this; }
}
public Builder() {
result = DefaultInstance;
resultIsReadOnly = true;
}
internal Builder(TestXmlChild cloneFrom) {
result = cloneFrom;
resultIsReadOnly = true;
}
private bool resultIsReadOnly;
private TestXmlChild result;
private TestXmlChild PrepareBuilder() {
if (resultIsReadOnly) {
TestXmlChild original = result;
result = new TestXmlChild();
resultIsReadOnly = false;
MergeFrom(original);
}
return result;
}
public override bool IsInitialized {
get { return result.IsInitialized; }
}
protected override TestXmlChild MessageBeingBuilt {
get { return PrepareBuilder(); }
}
public override Builder Clear() {
result = DefaultInstance;
resultIsReadOnly = true;
return this;
}
public override Builder Clone() {
if (resultIsReadOnly) {
return new Builder(result);
} else {
return new Builder().MergeFrom(result);
}
}
public override pbd::MessageDescriptor DescriptorForType {
get { return global::Google.ProtocolBuffers.TestProtos.TestXmlChild.Descriptor; }
}
public override TestXmlChild DefaultInstanceForType {
get { return global::Google.ProtocolBuffers.TestProtos.TestXmlChild.DefaultInstance; }
}
public override TestXmlChild BuildPartial() {
if (resultIsReadOnly) {
return result;
}
resultIsReadOnly = true;
return result.MakeReadOnly();
}
public override Builder MergeFrom(pb::IMessage other) {
if (other is TestXmlChild) {
return MergeFrom((TestXmlChild) other);
} else {
base.MergeFrom(other);
return this;
}
}
public override Builder MergeFrom(TestXmlChild other) {
if (other == global::Google.ProtocolBuffers.TestProtos.TestXmlChild.DefaultInstance) return this;
PrepareBuilder();
if (other.options_.Count != 0) {
result.options_.Add(other.options_);
}
if (other.HasBinary) {
Binary = other.Binary;
}
this.MergeUnknownFields(other.UnknownFields);
return this;
}
public override Builder MergeFrom(pb::ICodedInputStream input) {
return MergeFrom(input, pb::ExtensionRegistry.Empty);
}
public override Builder MergeFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
PrepareBuilder();
pb::UnknownFieldSet.Builder unknownFields = null;
uint tag;
string field_name;
while (input.ReadTag(out tag, out field_name)) {
if(tag == 0 && field_name != null) {
int field_ordinal = global::System.Array.BinarySearch(_testXmlChildFieldNames, field_name, global::System.StringComparer.Ordinal);
if(field_ordinal >= 0)
tag = _testXmlChildFieldTags[field_ordinal];
else {
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
continue;
}
}
switch (tag) {
case 0: {
throw pb::InvalidProtocolBufferException.InvalidTag();
}
default: {
if (pb::WireFormat.IsEndGroupTag(tag)) {
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
break;
}
case 26:
case 24: {
scg::ICollection<object> unknownItems;
input.ReadEnumArray<global::Google.ProtocolBuffers.TestProtos.EnumOptions>(tag, field_name, result.options_, out unknownItems);
if (unknownItems != null) {
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
foreach (object rawValue in unknownItems)
if (rawValue is int)
unknownFields.MergeVarintField(3, (ulong)(int)rawValue);
}
break;
}
case 34: {
result.hasBinary = input.ReadBytes(ref result.binary_);
break;
}
}
}
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
public pbc::IPopsicleList<global::Google.ProtocolBuffers.TestProtos.EnumOptions> OptionsList {
get { return PrepareBuilder().options_; }
}
public int OptionsCount {
get { return result.OptionsCount; }
}
public global::Google.ProtocolBuffers.TestProtos.EnumOptions GetOptions(int index) {
return result.GetOptions(index);
}
public Builder SetOptions(int index, global::Google.ProtocolBuffers.TestProtos.EnumOptions value) {
PrepareBuilder();
result.options_[index] = value;
return this;
}
public Builder AddOptions(global::Google.ProtocolBuffers.TestProtos.EnumOptions value) {
PrepareBuilder();
result.options_.Add(value);
return this;
}
public Builder AddRangeOptions(scg::IEnumerable<global::Google.ProtocolBuffers.TestProtos.EnumOptions> values) {
PrepareBuilder();
result.options_.Add(values);
return this;
}
public Builder ClearOptions() {
PrepareBuilder();
result.options_.Clear();
return this;
}
public bool HasBinary {
get { return result.hasBinary; }
}
public pb::ByteString Binary {
get { return result.Binary; }
set { SetBinary(value); }
}
public Builder SetBinary(pb::ByteString value) {
pb::ThrowHelper.ThrowIfNull(value, "value");
PrepareBuilder();
result.hasBinary = true;
result.binary_ = value;
return this;
}
public Builder ClearBinary() {
PrepareBuilder();
result.hasBinary = false;
result.binary_ = pb::ByteString.Empty;
return this;
}
}
static TestXmlChild() {
object.ReferenceEquals(global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.Descriptor, null);
}
}
[global::System.SerializableAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class TestXmlNoFields : pb::GeneratedMessage<TestXmlNoFields, TestXmlNoFields.Builder> {
private TestXmlNoFields() { }
private static readonly TestXmlNoFields defaultInstance = new TestXmlNoFields().MakeReadOnly();
private static readonly string[] _testXmlNoFieldsFieldNames = new string[] { };
private static readonly uint[] _testXmlNoFieldsFieldTags = new uint[] { };
public static TestXmlNoFields DefaultInstance {
get { return defaultInstance; }
}
public override TestXmlNoFields DefaultInstanceForType {
get { return DefaultInstance; }
}
protected override TestXmlNoFields ThisMessage {
get { return this; }
}
public static pbd::MessageDescriptor Descriptor {
get { return global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.internal__static_protobuf_unittest_extra_TestXmlNoFields__Descriptor; }
}
protected override pb::FieldAccess.FieldAccessorTable<TestXmlNoFields, TestXmlNoFields.Builder> InternalFieldAccessors {
get { return global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.internal__static_protobuf_unittest_extra_TestXmlNoFields__FieldAccessorTable; }
}
public override bool IsInitialized {
get {
return true;
}
}
public override void WriteTo(pb::ICodedOutputStream output) {
int size = SerializedSize;
string[] field_names = _testXmlNoFieldsFieldNames;
UnknownFields.WriteTo(output);
}
private int memoizedSerializedSize = -1;
public override int SerializedSize {
get {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
size += UnknownFields.SerializedSize;
memoizedSerializedSize = size;
return size;
}
}
public static TestXmlNoFields ParseFrom(pb::ByteString data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static TestXmlNoFields ParseFrom(pb::ByteString data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static TestXmlNoFields ParseFrom(byte[] data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static TestXmlNoFields ParseFrom(byte[] data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static TestXmlNoFields ParseFrom(global::System.IO.Stream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static TestXmlNoFields ParseFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
public static TestXmlNoFields ParseDelimitedFrom(global::System.IO.Stream input) {
return CreateBuilder().MergeDelimitedFrom(input).BuildParsed();
}
public static TestXmlNoFields ParseDelimitedFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return CreateBuilder().MergeDelimitedFrom(input, extensionRegistry).BuildParsed();
}
public static TestXmlNoFields ParseFrom(pb::ICodedInputStream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static TestXmlNoFields ParseFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
private TestXmlNoFields MakeReadOnly() {
return this;
}
public static Builder CreateBuilder() { return new Builder(); }
public override Builder ToBuilder() { return CreateBuilder(this); }
public override Builder CreateBuilderForType() { return new Builder(); }
public static Builder CreateBuilder(TestXmlNoFields prototype) {
return new Builder(prototype);
}
[global::System.SerializableAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class Builder : pb::GeneratedBuilder<TestXmlNoFields, Builder> {
protected override Builder ThisBuilder {
get { return this; }
}
public Builder() {
result = DefaultInstance;
resultIsReadOnly = true;
}
internal Builder(TestXmlNoFields cloneFrom) {
result = cloneFrom;
resultIsReadOnly = true;
}
private bool resultIsReadOnly;
private TestXmlNoFields result;
private TestXmlNoFields PrepareBuilder() {
if (resultIsReadOnly) {
TestXmlNoFields original = result;
result = new TestXmlNoFields();
resultIsReadOnly = false;
MergeFrom(original);
}
return result;
}
public override bool IsInitialized {
get { return result.IsInitialized; }
}
protected override TestXmlNoFields MessageBeingBuilt {
get { return PrepareBuilder(); }
}
public override Builder Clear() {
result = DefaultInstance;
resultIsReadOnly = true;
return this;
}
public override Builder Clone() {
if (resultIsReadOnly) {
return new Builder(result);
} else {
return new Builder().MergeFrom(result);
}
}
public override pbd::MessageDescriptor DescriptorForType {
get { return global::Google.ProtocolBuffers.TestProtos.TestXmlNoFields.Descriptor; }
}
public override TestXmlNoFields DefaultInstanceForType {
get { return global::Google.ProtocolBuffers.TestProtos.TestXmlNoFields.DefaultInstance; }
}
public override TestXmlNoFields BuildPartial() {
if (resultIsReadOnly) {
return result;
}
resultIsReadOnly = true;
return result.MakeReadOnly();
}
public override Builder MergeFrom(pb::IMessage other) {
if (other is TestXmlNoFields) {
return MergeFrom((TestXmlNoFields) other);
} else {
base.MergeFrom(other);
return this;
}
}
public override Builder MergeFrom(TestXmlNoFields other) {
if (other == global::Google.ProtocolBuffers.TestProtos.TestXmlNoFields.DefaultInstance) return this;
PrepareBuilder();
this.MergeUnknownFields(other.UnknownFields);
return this;
}
public override Builder MergeFrom(pb::ICodedInputStream input) {
return MergeFrom(input, pb::ExtensionRegistry.Empty);
}
public override Builder MergeFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
PrepareBuilder();
pb::UnknownFieldSet.Builder unknownFields = null;
uint tag;
string field_name;
while (input.ReadTag(out tag, out field_name)) {
if(tag == 0 && field_name != null) {
int field_ordinal = global::System.Array.BinarySearch(_testXmlNoFieldsFieldNames, field_name, global::System.StringComparer.Ordinal);
if(field_ordinal >= 0)
tag = _testXmlNoFieldsFieldTags[field_ordinal];
else {
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
continue;
}
}
switch (tag) {
case 0: {
throw pb::InvalidProtocolBufferException.InvalidTag();
}
default: {
if (pb::WireFormat.IsEndGroupTag(tag)) {
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
break;
}
}
}
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
}
static TestXmlNoFields() {
object.ReferenceEquals(global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.Descriptor, null);
}
}
[global::System.SerializableAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class TestXmlRescursive : pb::GeneratedMessage<TestXmlRescursive, TestXmlRescursive.Builder> {
private TestXmlRescursive() { }
private static readonly TestXmlRescursive defaultInstance = new TestXmlRescursive().MakeReadOnly();
private static readonly string[] _testXmlRescursiveFieldNames = new string[] { "child" };
private static readonly uint[] _testXmlRescursiveFieldTags = new uint[] { 10 };
public static TestXmlRescursive DefaultInstance {
get { return defaultInstance; }
}
public override TestXmlRescursive DefaultInstanceForType {
get { return DefaultInstance; }
}
protected override TestXmlRescursive ThisMessage {
get { return this; }
}
public static pbd::MessageDescriptor Descriptor {
get { return global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.internal__static_protobuf_unittest_extra_TestXmlRescursive__Descriptor; }
}
protected override pb::FieldAccess.FieldAccessorTable<TestXmlRescursive, TestXmlRescursive.Builder> InternalFieldAccessors {
get { return global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.internal__static_protobuf_unittest_extra_TestXmlRescursive__FieldAccessorTable; }
}
public const int ChildFieldNumber = 1;
private bool hasChild;
private global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive child_;
public bool HasChild {
get { return hasChild; }
}
public global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive Child {
get { return child_ ?? global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive.DefaultInstance; }
}
public override bool IsInitialized {
get {
return true;
}
}
public override void WriteTo(pb::ICodedOutputStream output) {
int size = SerializedSize;
string[] field_names = _testXmlRescursiveFieldNames;
if (hasChild) {
output.WriteMessage(1, field_names[0], Child);
}
UnknownFields.WriteTo(output);
}
private int memoizedSerializedSize = -1;
public override int SerializedSize {
get {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasChild) {
size += pb::CodedOutputStream.ComputeMessageSize(1, Child);
}
size += UnknownFields.SerializedSize;
memoizedSerializedSize = size;
return size;
}
}
public static TestXmlRescursive ParseFrom(pb::ByteString data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static TestXmlRescursive ParseFrom(pb::ByteString data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static TestXmlRescursive ParseFrom(byte[] data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static TestXmlRescursive ParseFrom(byte[] data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static TestXmlRescursive ParseFrom(global::System.IO.Stream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static TestXmlRescursive ParseFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
public static TestXmlRescursive ParseDelimitedFrom(global::System.IO.Stream input) {
return CreateBuilder().MergeDelimitedFrom(input).BuildParsed();
}
public static TestXmlRescursive ParseDelimitedFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return CreateBuilder().MergeDelimitedFrom(input, extensionRegistry).BuildParsed();
}
public static TestXmlRescursive ParseFrom(pb::ICodedInputStream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static TestXmlRescursive ParseFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
private TestXmlRescursive MakeReadOnly() {
return this;
}
public static Builder CreateBuilder() { return new Builder(); }
public override Builder ToBuilder() { return CreateBuilder(this); }
public override Builder CreateBuilderForType() { return new Builder(); }
public static Builder CreateBuilder(TestXmlRescursive prototype) {
return new Builder(prototype);
}
[global::System.SerializableAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class Builder : pb::GeneratedBuilder<TestXmlRescursive, Builder> {
protected override Builder ThisBuilder {
get { return this; }
}
public Builder() {
result = DefaultInstance;
resultIsReadOnly = true;
}
internal Builder(TestXmlRescursive cloneFrom) {
result = cloneFrom;
resultIsReadOnly = true;
}
private bool resultIsReadOnly;
private TestXmlRescursive result;
private TestXmlRescursive PrepareBuilder() {
if (resultIsReadOnly) {
TestXmlRescursive original = result;
result = new TestXmlRescursive();
resultIsReadOnly = false;
MergeFrom(original);
}
return result;
}
public override bool IsInitialized {
get { return result.IsInitialized; }
}
protected override TestXmlRescursive MessageBeingBuilt {
get { return PrepareBuilder(); }
}
public override Builder Clear() {
result = DefaultInstance;
resultIsReadOnly = true;
return this;
}
public override Builder Clone() {
if (resultIsReadOnly) {
return new Builder(result);
} else {
return new Builder().MergeFrom(result);
}
}
public override pbd::MessageDescriptor DescriptorForType {
get { return global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive.Descriptor; }
}
public override TestXmlRescursive DefaultInstanceForType {
get { return global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive.DefaultInstance; }
}
public override TestXmlRescursive BuildPartial() {
if (resultIsReadOnly) {
return result;
}
resultIsReadOnly = true;
return result.MakeReadOnly();
}
public override Builder MergeFrom(pb::IMessage other) {
if (other is TestXmlRescursive) {
return MergeFrom((TestXmlRescursive) other);
} else {
base.MergeFrom(other);
return this;
}
}
public override Builder MergeFrom(TestXmlRescursive other) {
if (other == global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive.DefaultInstance) return this;
PrepareBuilder();
if (other.HasChild) {
MergeChild(other.Child);
}
this.MergeUnknownFields(other.UnknownFields);
return this;
}
public override Builder MergeFrom(pb::ICodedInputStream input) {
return MergeFrom(input, pb::ExtensionRegistry.Empty);
}
public override Builder MergeFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
PrepareBuilder();
pb::UnknownFieldSet.Builder unknownFields = null;
uint tag;
string field_name;
while (input.ReadTag(out tag, out field_name)) {
if(tag == 0 && field_name != null) {
int field_ordinal = global::System.Array.BinarySearch(_testXmlRescursiveFieldNames, field_name, global::System.StringComparer.Ordinal);
if(field_ordinal >= 0)
tag = _testXmlRescursiveFieldTags[field_ordinal];
else {
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
continue;
}
}
switch (tag) {
case 0: {
throw pb::InvalidProtocolBufferException.InvalidTag();
}
default: {
if (pb::WireFormat.IsEndGroupTag(tag)) {
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
break;
}
case 10: {
global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive.Builder subBuilder = global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive.CreateBuilder();
if (result.hasChild) {
subBuilder.MergeFrom(Child);
}
input.ReadMessage(subBuilder, extensionRegistry);
Child = subBuilder.BuildPartial();
break;
}
}
}
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
public bool HasChild {
get { return result.hasChild; }
}
public global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive Child {
get { return result.Child; }
set { SetChild(value); }
}
public Builder SetChild(global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive value) {
pb::ThrowHelper.ThrowIfNull(value, "value");
PrepareBuilder();
result.hasChild = true;
result.child_ = value;
return this;
}
public Builder SetChild(global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive.Builder builderForValue) {
pb::ThrowHelper.ThrowIfNull(builderForValue, "builderForValue");
PrepareBuilder();
result.hasChild = true;
result.child_ = builderForValue.Build();
return this;
}
public Builder MergeChild(global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive value) {
pb::ThrowHelper.ThrowIfNull(value, "value");
PrepareBuilder();
if (result.hasChild &&
result.child_ != global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive.DefaultInstance) {
result.child_ = global::Google.ProtocolBuffers.TestProtos.TestXmlRescursive.CreateBuilder(result.child_).MergeFrom(value).BuildPartial();
} else {
result.child_ = value;
}
result.hasChild = true;
return this;
}
public Builder ClearChild() {
PrepareBuilder();
result.hasChild = false;
result.child_ = null;
return this;
}
}
static TestXmlRescursive() {
object.ReferenceEquals(global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.Descriptor, null);
}
}
[global::System.SerializableAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class TestXmlMessage : pb::ExtendableMessage<TestXmlMessage, TestXmlMessage.Builder> {
private TestXmlMessage() { }
private static readonly TestXmlMessage defaultInstance = new TestXmlMessage().MakeReadOnly();
private static readonly string[] _testXmlMessageFieldNames = new string[] { "child", "children", "number", "numbers", "text", "textlines", "valid" };
private static readonly uint[] _testXmlMessageFieldTags = new uint[] { 10, 3211, 48, 16, 26, 5602, 40 };
public static TestXmlMessage DefaultInstance {
get { return defaultInstance; }
}
public override TestXmlMessage DefaultInstanceForType {
get { return DefaultInstance; }
}
protected override TestXmlMessage ThisMessage {
get { return this; }
}
public static pbd::MessageDescriptor Descriptor {
get { return global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.internal__static_protobuf_unittest_extra_TestXmlMessage__Descriptor; }
}
protected override pb::FieldAccess.FieldAccessorTable<TestXmlMessage, TestXmlMessage.Builder> InternalFieldAccessors {
get { return global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.internal__static_protobuf_unittest_extra_TestXmlMessage__FieldAccessorTable; }
}
#region Nested types
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public static partial class Types {
[global::System.SerializableAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class Children : pb::GeneratedMessage<Children, Children.Builder> {
private Children() { }
private static readonly Children defaultInstance = new Children().MakeReadOnly();
private static readonly string[] _childrenFieldNames = new string[] { "binary", "options" };
private static readonly uint[] _childrenFieldTags = new uint[] { 34, 24 };
public static Children DefaultInstance {
get { return defaultInstance; }
}
public override Children DefaultInstanceForType {
get { return DefaultInstance; }
}
protected override Children ThisMessage {
get { return this; }
}
public static pbd::MessageDescriptor Descriptor {
get { return global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.internal__static_protobuf_unittest_extra_TestXmlMessage_Children__Descriptor; }
}
protected override pb::FieldAccess.FieldAccessorTable<Children, Children.Builder> InternalFieldAccessors {
get { return global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.internal__static_protobuf_unittest_extra_TestXmlMessage_Children__FieldAccessorTable; }
}
public const int OptionsFieldNumber = 3;
private pbc::PopsicleList<global::Google.ProtocolBuffers.TestProtos.EnumOptions> options_ = new pbc::PopsicleList<global::Google.ProtocolBuffers.TestProtos.EnumOptions>();
public scg::IList<global::Google.ProtocolBuffers.TestProtos.EnumOptions> OptionsList {
get { return pbc::Lists.AsReadOnly(options_); }
}
public int OptionsCount {
get { return options_.Count; }
}
public global::Google.ProtocolBuffers.TestProtos.EnumOptions GetOptions(int index) {
return options_[index];
}
public const int BinaryFieldNumber = 4;
private bool hasBinary;
private pb::ByteString binary_ = pb::ByteString.Empty;
public bool HasBinary {
get { return hasBinary; }
}
public pb::ByteString Binary {
get { return binary_; }
}
public override bool IsInitialized {
get {
return true;
}
}
public override void WriteTo(pb::ICodedOutputStream output) {
int size = SerializedSize;
string[] field_names = _childrenFieldNames;
if (options_.Count > 0) {
output.WriteEnumArray(3, field_names[1], options_);
}
if (hasBinary) {
output.WriteBytes(4, field_names[0], Binary);
}
UnknownFields.WriteTo(output);
}
private int memoizedSerializedSize = -1;
public override int SerializedSize {
get {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
if (options_.Count > 0) {
foreach (global::Google.ProtocolBuffers.TestProtos.EnumOptions element in options_) {
dataSize += pb::CodedOutputStream.ComputeEnumSizeNoTag((int) element);
}
size += dataSize;
size += 1 * options_.Count;
}
}
if (hasBinary) {
size += pb::CodedOutputStream.ComputeBytesSize(4, Binary);
}
size += UnknownFields.SerializedSize;
memoizedSerializedSize = size;
return size;
}
}
public static Children ParseFrom(pb::ByteString data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static Children ParseFrom(pb::ByteString data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static Children ParseFrom(byte[] data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static Children ParseFrom(byte[] data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static Children ParseFrom(global::System.IO.Stream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static Children ParseFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
public static Children ParseDelimitedFrom(global::System.IO.Stream input) {
return CreateBuilder().MergeDelimitedFrom(input).BuildParsed();
}
public static Children ParseDelimitedFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return CreateBuilder().MergeDelimitedFrom(input, extensionRegistry).BuildParsed();
}
public static Children ParseFrom(pb::ICodedInputStream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static Children ParseFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
private Children MakeReadOnly() {
options_.MakeReadOnly();
return this;
}
public static Builder CreateBuilder() { return new Builder(); }
public override Builder ToBuilder() { return CreateBuilder(this); }
public override Builder CreateBuilderForType() { return new Builder(); }
public static Builder CreateBuilder(Children prototype) {
return new Builder(prototype);
}
[global::System.SerializableAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class Builder : pb::GeneratedBuilder<Children, Builder> {
protected override Builder ThisBuilder {
get { return this; }
}
public Builder() {
result = DefaultInstance;
resultIsReadOnly = true;
}
internal Builder(Children cloneFrom) {
result = cloneFrom;
resultIsReadOnly = true;
}
private bool resultIsReadOnly;
private Children result;
private Children PrepareBuilder() {
if (resultIsReadOnly) {
Children original = result;
result = new Children();
resultIsReadOnly = false;
MergeFrom(original);
}
return result;
}
public override bool IsInitialized {
get { return result.IsInitialized; }
}
protected override Children MessageBeingBuilt {
get { return PrepareBuilder(); }
}
public override Builder Clear() {
result = DefaultInstance;
resultIsReadOnly = true;
return this;
}
public override Builder Clone() {
if (resultIsReadOnly) {
return new Builder(result);
} else {
return new Builder().MergeFrom(result);
}
}
public override pbd::MessageDescriptor DescriptorForType {
get { return global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children.Descriptor; }
}
public override Children DefaultInstanceForType {
get { return global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children.DefaultInstance; }
}
public override Children BuildPartial() {
if (resultIsReadOnly) {
return result;
}
resultIsReadOnly = true;
return result.MakeReadOnly();
}
public override Builder MergeFrom(pb::IMessage other) {
if (other is Children) {
return MergeFrom((Children) other);
} else {
base.MergeFrom(other);
return this;
}
}
public override Builder MergeFrom(Children other) {
if (other == global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children.DefaultInstance) return this;
PrepareBuilder();
if (other.options_.Count != 0) {
result.options_.Add(other.options_);
}
if (other.HasBinary) {
Binary = other.Binary;
}
this.MergeUnknownFields(other.UnknownFields);
return this;
}
public override Builder MergeFrom(pb::ICodedInputStream input) {
return MergeFrom(input, pb::ExtensionRegistry.Empty);
}
public override Builder MergeFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
PrepareBuilder();
pb::UnknownFieldSet.Builder unknownFields = null;
uint tag;
string field_name;
while (input.ReadTag(out tag, out field_name)) {
if(tag == 0 && field_name != null) {
int field_ordinal = global::System.Array.BinarySearch(_childrenFieldNames, field_name, global::System.StringComparer.Ordinal);
if(field_ordinal >= 0)
tag = _childrenFieldTags[field_ordinal];
else {
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
continue;
}
}
switch (tag) {
case 0: {
throw pb::InvalidProtocolBufferException.InvalidTag();
}
default: {
if (pb::WireFormat.IsEndGroupTag(tag)) {
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
break;
}
case 26:
case 24: {
scg::ICollection<object> unknownItems;
input.ReadEnumArray<global::Google.ProtocolBuffers.TestProtos.EnumOptions>(tag, field_name, result.options_, out unknownItems);
if (unknownItems != null) {
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
foreach (object rawValue in unknownItems)
if (rawValue is int)
unknownFields.MergeVarintField(3, (ulong)(int)rawValue);
}
break;
}
case 34: {
result.hasBinary = input.ReadBytes(ref result.binary_);
break;
}
}
}
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
public pbc::IPopsicleList<global::Google.ProtocolBuffers.TestProtos.EnumOptions> OptionsList {
get { return PrepareBuilder().options_; }
}
public int OptionsCount {
get { return result.OptionsCount; }
}
public global::Google.ProtocolBuffers.TestProtos.EnumOptions GetOptions(int index) {
return result.GetOptions(index);
}
public Builder SetOptions(int index, global::Google.ProtocolBuffers.TestProtos.EnumOptions value) {
PrepareBuilder();
result.options_[index] = value;
return this;
}
public Builder AddOptions(global::Google.ProtocolBuffers.TestProtos.EnumOptions value) {
PrepareBuilder();
result.options_.Add(value);
return this;
}
public Builder AddRangeOptions(scg::IEnumerable<global::Google.ProtocolBuffers.TestProtos.EnumOptions> values) {
PrepareBuilder();
result.options_.Add(values);
return this;
}
public Builder ClearOptions() {
PrepareBuilder();
result.options_.Clear();
return this;
}
public bool HasBinary {
get { return result.hasBinary; }
}
public pb::ByteString Binary {
get { return result.Binary; }
set { SetBinary(value); }
}
public Builder SetBinary(pb::ByteString value) {
pb::ThrowHelper.ThrowIfNull(value, "value");
PrepareBuilder();
result.hasBinary = true;
result.binary_ = value;
return this;
}
public Builder ClearBinary() {
PrepareBuilder();
result.hasBinary = false;
result.binary_ = pb::ByteString.Empty;
return this;
}
}
static Children() {
object.ReferenceEquals(global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.Descriptor, null);
}
}
}
#endregion
public const int NumberFieldNumber = 6;
private bool hasNumber;
private long number_;
public bool HasNumber {
get { return hasNumber; }
}
public long Number {
get { return number_; }
}
public const int NumbersFieldNumber = 2;
private pbc::PopsicleList<int> numbers_ = new pbc::PopsicleList<int>();
public scg::IList<int> NumbersList {
get { return pbc::Lists.AsReadOnly(numbers_); }
}
public int NumbersCount {
get { return numbers_.Count; }
}
public int GetNumbers(int index) {
return numbers_[index];
}
public const int TextFieldNumber = 3;
private bool hasText;
private string text_ = "";
public bool HasText {
get { return hasText; }
}
public string Text {
get { return text_; }
}
public const int TextlinesFieldNumber = 700;
private pbc::PopsicleList<string> textlines_ = new pbc::PopsicleList<string>();
public scg::IList<string> TextlinesList {
get { return pbc::Lists.AsReadOnly(textlines_); }
}
public int TextlinesCount {
get { return textlines_.Count; }
}
public string GetTextlines(int index) {
return textlines_[index];
}
public const int ValidFieldNumber = 5;
private bool hasValid;
private bool valid_;
public bool HasValid {
get { return hasValid; }
}
public bool Valid {
get { return valid_; }
}
public const int ChildFieldNumber = 1;
private bool hasChild;
private global::Google.ProtocolBuffers.TestProtos.TestXmlChild child_;
public bool HasChild {
get { return hasChild; }
}
public global::Google.ProtocolBuffers.TestProtos.TestXmlChild Child {
get { return child_ ?? global::Google.ProtocolBuffers.TestProtos.TestXmlChild.DefaultInstance; }
}
public const int ChildrenFieldNumber = 401;
private pbc::PopsicleList<global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children> children_ = new pbc::PopsicleList<global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children>();
public scg::IList<global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children> ChildrenList {
get { return children_; }
}
public int ChildrenCount {
get { return children_.Count; }
}
public global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children GetChildren(int index) {
return children_[index];
}
public override bool IsInitialized {
get {
if (!ExtensionsAreInitialized) return false;
return true;
}
}
public override void WriteTo(pb::ICodedOutputStream output) {
int size = SerializedSize;
string[] field_names = _testXmlMessageFieldNames;
pb::ExtendableMessage<TestXmlMessage, TestXmlMessage.Builder>.ExtensionWriter extensionWriter = CreateExtensionWriter(this);
if (hasChild) {
output.WriteMessage(1, field_names[0], Child);
}
if (numbers_.Count > 0) {
output.WriteInt32Array(2, field_names[3], numbers_);
}
if (hasText) {
output.WriteString(3, field_names[4], Text);
}
if (hasValid) {
output.WriteBool(5, field_names[6], Valid);
}
if (hasNumber) {
output.WriteInt64(6, field_names[2], Number);
}
extensionWriter.WriteUntil(200, output);
if (children_.Count > 0) {
output.WriteGroupArray(401, field_names[1], children_);
}
if (textlines_.Count > 0) {
output.WriteStringArray(700, field_names[5], textlines_);
}
UnknownFields.WriteTo(output);
}
private int memoizedSerializedSize = -1;
public override int SerializedSize {
get {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasNumber) {
size += pb::CodedOutputStream.ComputeInt64Size(6, Number);
}
{
int dataSize = 0;
foreach (int element in NumbersList) {
dataSize += pb::CodedOutputStream.ComputeInt32SizeNoTag(element);
}
size += dataSize;
size += 1 * numbers_.Count;
}
if (hasText) {
size += pb::CodedOutputStream.ComputeStringSize(3, Text);
}
{
int dataSize = 0;
foreach (string element in TextlinesList) {
dataSize += pb::CodedOutputStream.ComputeStringSizeNoTag(element);
}
size += dataSize;
size += 2 * textlines_.Count;
}
if (hasValid) {
size += pb::CodedOutputStream.ComputeBoolSize(5, Valid);
}
if (hasChild) {
size += pb::CodedOutputStream.ComputeMessageSize(1, Child);
}
foreach (global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children element in ChildrenList) {
size += pb::CodedOutputStream.ComputeGroupSize(401, element);
}
size += ExtensionsSerializedSize;
size += UnknownFields.SerializedSize;
memoizedSerializedSize = size;
return size;
}
}
public static TestXmlMessage ParseFrom(pb::ByteString data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static TestXmlMessage ParseFrom(pb::ByteString data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static TestXmlMessage ParseFrom(byte[] data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static TestXmlMessage ParseFrom(byte[] data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static TestXmlMessage ParseFrom(global::System.IO.Stream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static TestXmlMessage ParseFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
public static TestXmlMessage ParseDelimitedFrom(global::System.IO.Stream input) {
return CreateBuilder().MergeDelimitedFrom(input).BuildParsed();
}
public static TestXmlMessage ParseDelimitedFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return CreateBuilder().MergeDelimitedFrom(input, extensionRegistry).BuildParsed();
}
public static TestXmlMessage ParseFrom(pb::ICodedInputStream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static TestXmlMessage ParseFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
private TestXmlMessage MakeReadOnly() {
numbers_.MakeReadOnly();
textlines_.MakeReadOnly();
children_.MakeReadOnly();
return this;
}
public static Builder CreateBuilder() { return new Builder(); }
public override Builder ToBuilder() { return CreateBuilder(this); }
public override Builder CreateBuilderForType() { return new Builder(); }
public static Builder CreateBuilder(TestXmlMessage prototype) {
return new Builder(prototype);
}
[global::System.SerializableAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class Builder : pb::ExtendableBuilder<TestXmlMessage, Builder> {
protected override Builder ThisBuilder {
get { return this; }
}
public Builder() {
result = DefaultInstance;
resultIsReadOnly = true;
}
internal Builder(TestXmlMessage cloneFrom) {
result = cloneFrom;
resultIsReadOnly = true;
}
private bool resultIsReadOnly;
private TestXmlMessage result;
private TestXmlMessage PrepareBuilder() {
if (resultIsReadOnly) {
TestXmlMessage original = result;
result = new TestXmlMessage();
resultIsReadOnly = false;
MergeFrom(original);
}
return result;
}
public override bool IsInitialized {
get { return result.IsInitialized; }
}
protected override TestXmlMessage MessageBeingBuilt {
get { return PrepareBuilder(); }
}
public override Builder Clear() {
result = DefaultInstance;
resultIsReadOnly = true;
return this;
}
public override Builder Clone() {
if (resultIsReadOnly) {
return new Builder(result);
} else {
return new Builder().MergeFrom(result);
}
}
public override pbd::MessageDescriptor DescriptorForType {
get { return global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Descriptor; }
}
public override TestXmlMessage DefaultInstanceForType {
get { return global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.DefaultInstance; }
}
public override TestXmlMessage BuildPartial() {
if (resultIsReadOnly) {
return result;
}
resultIsReadOnly = true;
return result.MakeReadOnly();
}
public override Builder MergeFrom(pb::IMessage other) {
if (other is TestXmlMessage) {
return MergeFrom((TestXmlMessage) other);
} else {
base.MergeFrom(other);
return this;
}
}
public override Builder MergeFrom(TestXmlMessage other) {
if (other == global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.DefaultInstance) return this;
PrepareBuilder();
if (other.HasNumber) {
Number = other.Number;
}
if (other.numbers_.Count != 0) {
result.numbers_.Add(other.numbers_);
}
if (other.HasText) {
Text = other.Text;
}
if (other.textlines_.Count != 0) {
result.textlines_.Add(other.textlines_);
}
if (other.HasValid) {
Valid = other.Valid;
}
if (other.HasChild) {
MergeChild(other.Child);
}
if (other.children_.Count != 0) {
result.children_.Add(other.children_);
}
this.MergeExtensionFields(other);
this.MergeUnknownFields(other.UnknownFields);
return this;
}
public override Builder MergeFrom(pb::ICodedInputStream input) {
return MergeFrom(input, pb::ExtensionRegistry.Empty);
}
public override Builder MergeFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
PrepareBuilder();
pb::UnknownFieldSet.Builder unknownFields = null;
uint tag;
string field_name;
while (input.ReadTag(out tag, out field_name)) {
if(tag == 0 && field_name != null) {
int field_ordinal = global::System.Array.BinarySearch(_testXmlMessageFieldNames, field_name, global::System.StringComparer.Ordinal);
if(field_ordinal >= 0)
tag = _testXmlMessageFieldTags[field_ordinal];
else {
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
continue;
}
}
switch (tag) {
case 0: {
throw pb::InvalidProtocolBufferException.InvalidTag();
}
default: {
if (pb::WireFormat.IsEndGroupTag(tag)) {
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
break;
}
case 10: {
global::Google.ProtocolBuffers.TestProtos.TestXmlChild.Builder subBuilder = global::Google.ProtocolBuffers.TestProtos.TestXmlChild.CreateBuilder();
if (result.hasChild) {
subBuilder.MergeFrom(Child);
}
input.ReadMessage(subBuilder, extensionRegistry);
Child = subBuilder.BuildPartial();
break;
}
case 18:
case 16: {
input.ReadInt32Array(tag, field_name, result.numbers_);
break;
}
case 26: {
result.hasText = input.ReadString(ref result.text_);
break;
}
case 40: {
result.hasValid = input.ReadBool(ref result.valid_);
break;
}
case 48: {
result.hasNumber = input.ReadInt64(ref result.number_);
break;
}
case 3211: {
input.ReadGroupArray(tag, field_name, result.children_, global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children.DefaultInstance, extensionRegistry);
break;
}
case 5602: {
input.ReadStringArray(tag, field_name, result.textlines_);
break;
}
}
}
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
public bool HasNumber {
get { return result.hasNumber; }
}
public long Number {
get { return result.Number; }
set { SetNumber(value); }
}
public Builder SetNumber(long value) {
PrepareBuilder();
result.hasNumber = true;
result.number_ = value;
return this;
}
public Builder ClearNumber() {
PrepareBuilder();
result.hasNumber = false;
result.number_ = 0L;
return this;
}
public pbc::IPopsicleList<int> NumbersList {
get { return PrepareBuilder().numbers_; }
}
public int NumbersCount {
get { return result.NumbersCount; }
}
public int GetNumbers(int index) {
return result.GetNumbers(index);
}
public Builder SetNumbers(int index, int value) {
PrepareBuilder();
result.numbers_[index] = value;
return this;
}
public Builder AddNumbers(int value) {
PrepareBuilder();
result.numbers_.Add(value);
return this;
}
public Builder AddRangeNumbers(scg::IEnumerable<int> values) {
PrepareBuilder();
result.numbers_.Add(values);
return this;
}
public Builder ClearNumbers() {
PrepareBuilder();
result.numbers_.Clear();
return this;
}
public bool HasText {
get { return result.hasText; }
}
public string Text {
get { return result.Text; }
set { SetText(value); }
}
public Builder SetText(string value) {
pb::ThrowHelper.ThrowIfNull(value, "value");
PrepareBuilder();
result.hasText = true;
result.text_ = value;
return this;
}
public Builder ClearText() {
PrepareBuilder();
result.hasText = false;
result.text_ = "";
return this;
}
public pbc::IPopsicleList<string> TextlinesList {
get { return PrepareBuilder().textlines_; }
}
public int TextlinesCount {
get { return result.TextlinesCount; }
}
public string GetTextlines(int index) {
return result.GetTextlines(index);
}
public Builder SetTextlines(int index, string value) {
pb::ThrowHelper.ThrowIfNull(value, "value");
PrepareBuilder();
result.textlines_[index] = value;
return this;
}
public Builder AddTextlines(string value) {
pb::ThrowHelper.ThrowIfNull(value, "value");
PrepareBuilder();
result.textlines_.Add(value);
return this;
}
public Builder AddRangeTextlines(scg::IEnumerable<string> values) {
PrepareBuilder();
result.textlines_.Add(values);
return this;
}
public Builder ClearTextlines() {
PrepareBuilder();
result.textlines_.Clear();
return this;
}
public bool HasValid {
get { return result.hasValid; }
}
public bool Valid {
get { return result.Valid; }
set { SetValid(value); }
}
public Builder SetValid(bool value) {
PrepareBuilder();
result.hasValid = true;
result.valid_ = value;
return this;
}
public Builder ClearValid() {
PrepareBuilder();
result.hasValid = false;
result.valid_ = false;
return this;
}
public bool HasChild {
get { return result.hasChild; }
}
public global::Google.ProtocolBuffers.TestProtos.TestXmlChild Child {
get { return result.Child; }
set { SetChild(value); }
}
public Builder SetChild(global::Google.ProtocolBuffers.TestProtos.TestXmlChild value) {
pb::ThrowHelper.ThrowIfNull(value, "value");
PrepareBuilder();
result.hasChild = true;
result.child_ = value;
return this;
}
public Builder SetChild(global::Google.ProtocolBuffers.TestProtos.TestXmlChild.Builder builderForValue) {
pb::ThrowHelper.ThrowIfNull(builderForValue, "builderForValue");
PrepareBuilder();
result.hasChild = true;
result.child_ = builderForValue.Build();
return this;
}
public Builder MergeChild(global::Google.ProtocolBuffers.TestProtos.TestXmlChild value) {
pb::ThrowHelper.ThrowIfNull(value, "value");
PrepareBuilder();
if (result.hasChild &&
result.child_ != global::Google.ProtocolBuffers.TestProtos.TestXmlChild.DefaultInstance) {
result.child_ = global::Google.ProtocolBuffers.TestProtos.TestXmlChild.CreateBuilder(result.child_).MergeFrom(value).BuildPartial();
} else {
result.child_ = value;
}
result.hasChild = true;
return this;
}
public Builder ClearChild() {
PrepareBuilder();
result.hasChild = false;
result.child_ = null;
return this;
}
public pbc::IPopsicleList<global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children> ChildrenList {
get { return PrepareBuilder().children_; }
}
public int ChildrenCount {
get { return result.ChildrenCount; }
}
public global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children GetChildren(int index) {
return result.GetChildren(index);
}
public Builder SetChildren(int index, global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children value) {
pb::ThrowHelper.ThrowIfNull(value, "value");
PrepareBuilder();
result.children_[index] = value;
return this;
}
public Builder SetChildren(int index, global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children.Builder builderForValue) {
pb::ThrowHelper.ThrowIfNull(builderForValue, "builderForValue");
PrepareBuilder();
result.children_[index] = builderForValue.Build();
return this;
}
public Builder AddChildren(global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children value) {
pb::ThrowHelper.ThrowIfNull(value, "value");
PrepareBuilder();
result.children_.Add(value);
return this;
}
public Builder AddChildren(global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children.Builder builderForValue) {
pb::ThrowHelper.ThrowIfNull(builderForValue, "builderForValue");
PrepareBuilder();
result.children_.Add(builderForValue.Build());
return this;
}
public Builder AddRangeChildren(scg::IEnumerable<global::Google.ProtocolBuffers.TestProtos.TestXmlMessage.Types.Children> values) {
PrepareBuilder();
result.children_.Add(values);
return this;
}
public Builder ClearChildren() {
PrepareBuilder();
result.children_.Clear();
return this;
}
}
static TestXmlMessage() {
object.ReferenceEquals(global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.Descriptor, null);
}
}
[global::System.SerializableAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class TestXmlExtension : pb::GeneratedMessage<TestXmlExtension, TestXmlExtension.Builder> {
private TestXmlExtension() { }
private static readonly TestXmlExtension defaultInstance = new TestXmlExtension().MakeReadOnly();
private static readonly string[] _testXmlExtensionFieldNames = new string[] { "number" };
private static readonly uint[] _testXmlExtensionFieldTags = new uint[] { 8 };
public static TestXmlExtension DefaultInstance {
get { return defaultInstance; }
}
public override TestXmlExtension DefaultInstanceForType {
get { return DefaultInstance; }
}
protected override TestXmlExtension ThisMessage {
get { return this; }
}
public static pbd::MessageDescriptor Descriptor {
get { return global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.internal__static_protobuf_unittest_extra_TestXmlExtension__Descriptor; }
}
protected override pb::FieldAccess.FieldAccessorTable<TestXmlExtension, TestXmlExtension.Builder> InternalFieldAccessors {
get { return global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.internal__static_protobuf_unittest_extra_TestXmlExtension__FieldAccessorTable; }
}
public const int NumberFieldNumber = 1;
private bool hasNumber;
private int number_;
public bool HasNumber {
get { return hasNumber; }
}
public int Number {
get { return number_; }
}
public override bool IsInitialized {
get {
if (!hasNumber) return false;
return true;
}
}
public override void WriteTo(pb::ICodedOutputStream output) {
int size = SerializedSize;
string[] field_names = _testXmlExtensionFieldNames;
if (hasNumber) {
output.WriteInt32(1, field_names[0], Number);
}
UnknownFields.WriteTo(output);
}
private int memoizedSerializedSize = -1;
public override int SerializedSize {
get {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (hasNumber) {
size += pb::CodedOutputStream.ComputeInt32Size(1, Number);
}
size += UnknownFields.SerializedSize;
memoizedSerializedSize = size;
return size;
}
}
public static TestXmlExtension ParseFrom(pb::ByteString data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static TestXmlExtension ParseFrom(pb::ByteString data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static TestXmlExtension ParseFrom(byte[] data) {
return ((Builder) CreateBuilder().MergeFrom(data)).BuildParsed();
}
public static TestXmlExtension ParseFrom(byte[] data, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(data, extensionRegistry)).BuildParsed();
}
public static TestXmlExtension ParseFrom(global::System.IO.Stream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static TestXmlExtension ParseFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
public static TestXmlExtension ParseDelimitedFrom(global::System.IO.Stream input) {
return CreateBuilder().MergeDelimitedFrom(input).BuildParsed();
}
public static TestXmlExtension ParseDelimitedFrom(global::System.IO.Stream input, pb::ExtensionRegistry extensionRegistry) {
return CreateBuilder().MergeDelimitedFrom(input, extensionRegistry).BuildParsed();
}
public static TestXmlExtension ParseFrom(pb::ICodedInputStream input) {
return ((Builder) CreateBuilder().MergeFrom(input)).BuildParsed();
}
public static TestXmlExtension ParseFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
return ((Builder) CreateBuilder().MergeFrom(input, extensionRegistry)).BuildParsed();
}
private TestXmlExtension MakeReadOnly() {
return this;
}
public static Builder CreateBuilder() { return new Builder(); }
public override Builder ToBuilder() { return CreateBuilder(this); }
public override Builder CreateBuilderForType() { return new Builder(); }
public static Builder CreateBuilder(TestXmlExtension prototype) {
return new Builder(prototype);
}
[global::System.SerializableAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
public sealed partial class Builder : pb::GeneratedBuilder<TestXmlExtension, Builder> {
protected override Builder ThisBuilder {
get { return this; }
}
public Builder() {
result = DefaultInstance;
resultIsReadOnly = true;
}
internal Builder(TestXmlExtension cloneFrom) {
result = cloneFrom;
resultIsReadOnly = true;
}
private bool resultIsReadOnly;
private TestXmlExtension result;
private TestXmlExtension PrepareBuilder() {
if (resultIsReadOnly) {
TestXmlExtension original = result;
result = new TestXmlExtension();
resultIsReadOnly = false;
MergeFrom(original);
}
return result;
}
public override bool IsInitialized {
get { return result.IsInitialized; }
}
protected override TestXmlExtension MessageBeingBuilt {
get { return PrepareBuilder(); }
}
public override Builder Clear() {
result = DefaultInstance;
resultIsReadOnly = true;
return this;
}
public override Builder Clone() {
if (resultIsReadOnly) {
return new Builder(result);
} else {
return new Builder().MergeFrom(result);
}
}
public override pbd::MessageDescriptor DescriptorForType {
get { return global::Google.ProtocolBuffers.TestProtos.TestXmlExtension.Descriptor; }
}
public override TestXmlExtension DefaultInstanceForType {
get { return global::Google.ProtocolBuffers.TestProtos.TestXmlExtension.DefaultInstance; }
}
public override TestXmlExtension BuildPartial() {
if (resultIsReadOnly) {
return result;
}
resultIsReadOnly = true;
return result.MakeReadOnly();
}
public override Builder MergeFrom(pb::IMessage other) {
if (other is TestXmlExtension) {
return MergeFrom((TestXmlExtension) other);
} else {
base.MergeFrom(other);
return this;
}
}
public override Builder MergeFrom(TestXmlExtension other) {
if (other == global::Google.ProtocolBuffers.TestProtos.TestXmlExtension.DefaultInstance) return this;
PrepareBuilder();
if (other.HasNumber) {
Number = other.Number;
}
this.MergeUnknownFields(other.UnknownFields);
return this;
}
public override Builder MergeFrom(pb::ICodedInputStream input) {
return MergeFrom(input, pb::ExtensionRegistry.Empty);
}
public override Builder MergeFrom(pb::ICodedInputStream input, pb::ExtensionRegistry extensionRegistry) {
PrepareBuilder();
pb::UnknownFieldSet.Builder unknownFields = null;
uint tag;
string field_name;
while (input.ReadTag(out tag, out field_name)) {
if(tag == 0 && field_name != null) {
int field_ordinal = global::System.Array.BinarySearch(_testXmlExtensionFieldNames, field_name, global::System.StringComparer.Ordinal);
if(field_ordinal >= 0)
tag = _testXmlExtensionFieldTags[field_ordinal];
else {
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
continue;
}
}
switch (tag) {
case 0: {
throw pb::InvalidProtocolBufferException.InvalidTag();
}
default: {
if (pb::WireFormat.IsEndGroupTag(tag)) {
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
if (unknownFields == null) {
unknownFields = pb::UnknownFieldSet.CreateBuilder(this.UnknownFields);
}
ParseUnknownField(input, unknownFields, extensionRegistry, tag, field_name);
break;
}
case 8: {
result.hasNumber = input.ReadInt32(ref result.number_);
break;
}
}
}
if (unknownFields != null) {
this.UnknownFields = unknownFields.Build();
}
return this;
}
public bool HasNumber {
get { return result.hasNumber; }
}
public int Number {
get { return result.Number; }
set { SetNumber(value); }
}
public Builder SetNumber(int value) {
PrepareBuilder();
result.hasNumber = true;
result.number_ = value;
return this;
}
public Builder ClearNumber() {
PrepareBuilder();
result.hasNumber = false;
result.number_ = 0;
return this;
}
}
static TestXmlExtension() {
object.ReferenceEquals(global::Google.ProtocolBuffers.TestProtos.UnitTestXmlSerializerTestProtoFile.Descriptor, null);
}
}
#endregion
}
#endregion Designer generated code
| {
"content_hash": "757fc5e1d0caeb3963dfe434d9bcd3b3",
"timestamp": "",
"source": "github",
"line_count": 2258,
"max_line_length": 311,
"avg_line_length": 40.391939769707704,
"alnum_prop": 0.6418069184803464,
"repo_name": "igorgatis/protobuf-csharp-port",
"id": "8505c8f3c2d1b92def6cee44c06c4a2dd8a5e12c",
"size": "91388",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/ProtocolBuffers.Test/TestProtos/UnitTestXmlSerializerTestProtoFile.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "2133"
},
{
"name": "C#",
"bytes": "7345848"
},
{
"name": "HTML",
"bytes": "6020"
},
{
"name": "Protocol Buffer",
"bytes": "395102"
},
{
"name": "Python",
"bytes": "8912"
},
{
"name": "Shell",
"bytes": "2436"
}
],
"symlink_target": ""
} |
var cdb = require('cartodb.js-v3');
var BaseDialog = require('../../views/base_dialog/view');
var ViewFactory = require('../../view_factory');
var randomQuote = require('../../view_helpers/random_quote');
/**
* Create a vis from a dataset, required for some contexts to have a vis before be able to carry out next task
* - duplicate vis
* - add layer
*/
module.exports = BaseDialog.extend({
initialize: function() {
this.elder('initialize');
// Clean on hide and enter to confirm
// have to be mandatory
_.extend(
this.options,
{
clean_on_hide: true,
enter_to_confirm: true
}
);
if (!this.model) throw new Error('model is required (layer)');
this._initViews();
this._initBinds();
},
render_content: function() {
return this._panes.getActivePane().render().el;
},
_initViews: function() {
this._panes = new cdb.ui.common.TabPane({
el: this.el
});
this.addView(this._panes);
this._panes.addTab('confirm',
ViewFactory.createByTemplate('common/dialogs/delete_layer/template', {
})
);
this._panes.addTab('loading',
ViewFactory.createByTemplate('common/templates/loading', {
title: 'Deleting layer…',
quote: randomQuote()
})
);
this._panes.addTab('fail',
ViewFactory.createByTemplate('common/templates/fail', {
msg: 'Could not delete layer for some reason'
})
);
this._panes.active('confirm');
},
_initBinds: function() {
this._panes.bind('tabEnabled', this.render, this);
},
ok: function() {
this._panes.active('loading');
var self = this;
this.model.destroy({
wait: true,
success: function() {
self.close();
},
error: function() {
self._panes.active('fail');
}
});
}
});
| {
"content_hash": "4c21a9daac1eaf4783c00cde24ba37cb",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 110,
"avg_line_length": 24.263157894736842,
"alnum_prop": 0.591648590021692,
"repo_name": "codeandtheory/cartodb",
"id": "0faed80b66da8e978bb63c67eb0467230aad3904",
"size": "1846",
"binary": false,
"copies": "4",
"ref": "refs/heads/cfr_develop",
"path": "lib/assets/javascripts/cartodb/common/dialogs/delete_layer/delete_layer_view.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "731704"
},
{
"name": "Game Maker Language",
"bytes": "182471"
},
{
"name": "HTML",
"bytes": "3243374"
},
{
"name": "JavaScript",
"bytes": "4430990"
},
{
"name": "Makefile",
"bytes": "14280"
},
{
"name": "PLpgSQL",
"bytes": "1233"
},
{
"name": "Python",
"bytes": "10450"
},
{
"name": "Ruby",
"bytes": "3985764"
},
{
"name": "Shell",
"bytes": "12130"
},
{
"name": "Smarty",
"bytes": "110966"
}
],
"symlink_target": ""
} |
from toontown.parties import PartyGlobals
from toontown.parties.DistributedPartyDanceActivityBase import DistributedPartyDanceActivityBase
from toontown.toonbase import TTLocalizer
class DistributedPartyValentineDanceActivity(DistributedPartyDanceActivityBase):
notify = directNotify.newCategory('DistributedPartyValentineDanceActivity')
def __init__(self, cr):
DistributedPartyDanceActivityBase.__init__(self, cr, PartyGlobals.ActivityIds.PartyDance, PartyGlobals.DancePatternToAnims, model='phase_13/models/parties/tt_m_ara_pty_danceFloorValentine')
def getInstructions(self):
return TTLocalizer.PartyDanceActivityInstructions
def getTitle(self):
return TTLocalizer.PartyDanceActivityTitle
def load(self):
DistributedPartyDanceActivityBase.load(self)
parentGroup = self.danceFloor.find('**/discoBall_mesh')
correctBall = self.danceFloor.find('**/discoBall_10')
origBall = self.danceFloor.find('**/discoBall_mesh_orig')
if not correctBall.isEmpty():
numChildren = parentGroup.getNumChildren()
for i in xrange(numChildren):
child = parentGroup.getChild(i)
if child != correctBall:
child.hide()
| {
"content_hash": "5b9e607be8c7b8ed2fd96a9f1c898b58",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 197,
"avg_line_length": 46.629629629629626,
"alnum_prop": 0.727561556791104,
"repo_name": "ToontownUprising/src",
"id": "d61d5e40f0b8a9954ba1d8295be46bca633ef8da",
"size": "1259",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "toontown/parties/DistributedPartyValentineDanceActivity.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Perl",
"bytes": "36"
},
{
"name": "Python",
"bytes": "16244807"
},
{
"name": "Shell",
"bytes": "11615"
}
],
"symlink_target": ""
} |
package com.capitalone.dashboard.datafactory.jira.sdk.connector;
import com.capitalone.dashboard.datafactory.jira.sdk.util.SystemInfo;
import com.google.api.client.http.GenericUrl;
import com.google.api.client.http.HttpHeaders;
import com.google.api.client.http.HttpRequest;
import com.google.api.client.http.HttpRequestFactory;
import com.google.api.client.http.HttpResponse;
import org.apache.commons.lang3.NotImplementedException;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
/**
* {@inheritDoc}
*
* @author kfk884
*
*/
@SuppressWarnings("PMD.AvoidCatchingNPE") // this needs to be rewritten...fixme
public class GetResponseBuilderImpl extends BaseConnectionImpl implements
GetResponseBuilder {
private static final Logger LOGGER = LoggerFactory.getLogger(GetResponseBuilderImpl.class);
private static final int TIMEOUT = 120000;
/**
* Constructor which inherits artifacts from super class
*/
public GetResponseBuilderImpl() {
super();
}
/*
* (non-Javadoc)
*
* @see
* com.capitalone.jira.client.connector.GetResponseBuilder#getResponse(com
* .google.api.client.http.HttpRequestFactory, java.lang.String)
*/
@Override
public JSONObject getResponse(HttpRequestFactory rqFactory, String query) {
// Declare local HTTP, Credential, and response object related artifacts
JSONObject canonicalRs = new JSONObject();
HttpRequest request = null;
SystemInfo userAgent = new SystemInfo();
HttpResponse nativeRs = null;
try {
GenericUrl url = new GenericUrl(super.baseUrl + super.apiContextPath + query);
request = rqFactory.buildGetRequest(url);
request.setHeaders(new HttpHeaders().setAuthorization(
"Basic " + super.credentials).setUserAgent(
userAgent.generateApplicationUseHeader()));
request.setConnectTimeout(TIMEOUT);
request.setReadTimeout(TIMEOUT);
LOGGER.info("getResponse: url = " + url);
synchronized (request) { // WHY are you synchronizing??
nativeRs = request.execute();
}
} catch (IOException | NullPointerException e) {
LOGGER.error("There was a problem connecting to Jira with a given query:\n"
+ e.getMessage(), e);
} catch (IllegalArgumentException e) {
LOGGER.error("The given query was malformed\nPlease re-attempt the query without spaces or illegal HTTP characters handled by REST:"
+ e.getMessage(), e);
} catch (Exception e) {
LOGGER.error("An unexpected exception was caught while generating the HttpRequest artifact to talk with Jira:"
+ e.getMessage(), e);
} finally {
try {
canonicalRs = this.toCanonicalRs(nativeRs);
nativeRs.disconnect();
LOGGER.info("Jira web response message has been successfully generated and transformed");
} catch (IOException e) {
LOGGER.error("There was a problem retrieving Jira data from the input stream: "
+ e.getMessage(), e);
}
}
return canonicalRs;
}
@Override
public JSONArray getResponseArray(HttpRequestFactory rqFactory, String query) {
// Declare local HTTP, Credential, and response object related artifacts
JSONArray canonicalRs = new JSONArray();
HttpRequest request = null;
SystemInfo userAgent = new SystemInfo();
HttpResponse nativeRs = null;
try {
GenericUrl url = new GenericUrl(super.baseUrl + super.apiContextPath + query);
request = rqFactory.buildGetRequest(url);
request.setHeaders(new HttpHeaders().setAuthorization(
"Basic " + super.credentials).setUserAgent(
userAgent.generateApplicationUseHeader()));
request.setConnectTimeout(TIMEOUT);
request.setReadTimeout(TIMEOUT);
synchronized (request) { // WHY are you synchronizing??
nativeRs = request.execute();
}
} catch (IOException | NullPointerException e) {
LOGGER.error("There was a problem connecting to Jira with a given query:\n"
+ e.getMessage(), e);
} catch (IllegalArgumentException e) {
LOGGER.error("The given query was malformed\nPlease re-attempt the query without spaces or illegal HTTP characters handled by REST:\n"
+ e.getMessage(), e);
} catch (Exception e) {
LOGGER.error("An unexpected exception was caught while generating the HttpRequest artifact to talk with Jira:\n"
+ e.getMessage(), e);
} finally {
try {
canonicalRs = this.toCanonicalRsArray(nativeRs);
nativeRs.disconnect();
LOGGER.info("Jira web response message has been successfully generated and transformed");
} catch (IOException e) {
LOGGER.error("There was a problem retrieving Jira data from the input stream: "
+ e.getMessage(), e);
}
}
return canonicalRs;
}
/*
* (non-Javadoc)
*
* @see
* com.capitalone.jira.client.connector.GetResponseBuilder#getResponseOAuth
* (com .google.api.client.http.HttpRequestFactory, java.lang.String)
*/
public JSONObject getResponseOAuth(HttpRequestFactory rqFactory,
String query) throws NotImplementedException {
// TODO This is currently not implemented for OAuth functionality
throw new NotImplementedException("currently not implemented for OAuth functionality");
}
/**
* Converts an HttpResponse message content stream into a valid JSONObject
* for file consumption
*
* @param content
* HttpResponse message content as an input stream
* @return A valid JSONObject from the HttpResponse message content
*/
private JSONObject toCanonicalRs(HttpResponse nativeRs) throws IOException {
StringBuilder builder = new StringBuilder();
InputStream content = nativeRs.getContent();
BufferedReader bufferedReader = new BufferedReader(
new InputStreamReader(content));
for (String line; (line = bufferedReader.readLine()) != null;) {
builder.append(line).append("\n");
}
Object obj = JSONValue.parse(builder.toString());
JSONObject canonicalRs = (JSONObject) obj;
return canonicalRs;
}
/**
* Converts an HttpResponse message content stream into a valid JSONArray
* for file consumption
*
* @param content
* HttpResponse message content as an input stream
* @return A valid JSONArray from the HttpResponse message content
*/
private JSONArray toCanonicalRsArray(HttpResponse nativeRs) throws IOException {
StringBuilder builder = new StringBuilder();
InputStream content = nativeRs.getContent();
BufferedReader bufferedReader = new BufferedReader(
new InputStreamReader(content));
for (String line; (line = bufferedReader.readLine()) != null;) {
builder.append(line).append("\n");
}
Object obj = JSONValue.parse(builder.toString());
JSONArray canonicalRs = (JSONArray) obj;
return canonicalRs;
}
}
| {
"content_hash": "69f3d6aaee65850b71a5669432b8d2fe",
"timestamp": "",
"source": "github",
"line_count": 191,
"max_line_length": 137,
"avg_line_length": 35.50785340314136,
"alnum_prop": 0.7391624889413152,
"repo_name": "jimzucker/hygieia-temp",
"id": "0171f7ff797e54c643a075641c1a1c5537312c3a",
"size": "6782",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jira-feature-collector/src/main/java/com/capitalone/dashboard/datafactory/jira/sdk/connector/GetResponseBuilderImpl.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "5186"
},
{
"name": "CSS",
"bytes": "76286"
},
{
"name": "HTML",
"bytes": "91997"
},
{
"name": "Java",
"bytes": "794189"
},
{
"name": "JavaScript",
"bytes": "190816"
},
{
"name": "Shell",
"bytes": "8755"
}
],
"symlink_target": ""
} |
"use strict";
var PostsController = require('../../../controllers').PostsController
exports.addRoutes = function(app) {
app.post( '/v1/posts', PostsController.create)
app.get( '/v1/posts/:postId', PostsController.show)
app.put( '/v1/posts/:postId', PostsController.update)
app.delete('/v1/posts/:postId', PostsController.destroy)
app.post( '/v1/posts/:postId/like', PostsController.like)
app.post( '/v1/posts/:postId/unlike', PostsController.unlike)
app.post( '/v1/posts/:postId/hide', PostsController.hide)
app.post( '/v1/posts/:postId/unhide', PostsController.unhide)
}
| {
"content_hash": "782f8fedf060bd78276dc960cb7f6d97",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 69,
"avg_line_length": 45.642857142857146,
"alnum_prop": 0.6635367762128326,
"repo_name": "pepyatka/pepyatka-server",
"id": "cba996b8575a93d85389b484d85a90933838df04",
"size": "639",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "app/routes/api/v1/PostsRoute.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "417063"
}
],
"symlink_target": ""
} |
using GenFx.Validation;
using System;
using System.IO;
using System.Runtime.Serialization;
using Xunit;
namespace GenFx.Tests
{
/// <summary>
/// Contains unit tests for the <see cref="ValidationException"/> class.
/// </summary>
public class ValidationExceptionTest
{
/// <summary>
/// Tests that the ctor intializes the state correctly.
/// </summary>
[Fact]
public void ValidationException_Ctor()
{
ValidationException exception = new ValidationException();
Assert.NotNull(exception.Message);
string message = "test";
exception = new ValidationException(message);
Assert.Equal(message, exception.Message);
Exception innerException = new ArgumentException();
exception = new ValidationException(message, innerException);
Assert.Equal(message, exception.Message);
Assert.Same(innerException, exception.InnerException);
}
/// <summary>
/// Tests that the exception can be serialized.
/// </summary>
[Fact]
public void ValidationException_Serialization()
{
DataContractSerializer serializer = new DataContractSerializer(typeof(ValidationException));
using (MemoryStream stream = new MemoryStream())
{
serializer.WriteObject(stream, new ValidationException("message"));
stream.Position = 0;
ValidationException deserialized = (ValidationException)serializer.ReadObject(stream);
Assert.Equal("message", deserialized.Message);
}
}
}
}
| {
"content_hash": "06f12974c102215aa6196959a70b7bf7",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 104,
"avg_line_length": 33.21568627450981,
"alnum_prop": 0.6162927981109799,
"repo_name": "mthalman/GenFx",
"id": "df6788cfc630e77ddcefdeb323a2dd034538dc59",
"size": "1696",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/GenFx.Tests/ValidationExceptionTest.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "739317"
},
{
"name": "Smalltalk",
"bytes": "1454"
}
],
"symlink_target": ""
} |
package com.artificial.cachereader.wrappers.rt4.loaders;
import com.artificial.cachereader.fs.Archive;
import com.artificial.cachereader.fs.CacheType;
import com.artificial.cachereader.fs.FileData;
import com.artificial.cachereader.fs.RT4CacheSystem;
import com.artificial.cachereader.meta.ArchiveMeta;
import com.artificial.cachereader.meta.FileMeta;
import com.artificial.cachereader.wrappers.rt4.Component;
import com.artificial.cachereader.wrappers.rt4.Widget;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
public class WidgetLoader extends WrapperLoader<Widget> {
private final CacheType cache;
public WidgetLoader(RT4CacheSystem cacheSystem) {
super(cacheSystem);
cache = cacheSystem.getCacheSource().getCacheType(3);
}
@Override
public Widget load(final int widgetId) {
final Archive archive = getValidArchive(widgetId);
final ArchiveMeta meta = cache.getTable().getEntries().get(archive.getId());
final Widget ret = new Widget(this, widgetId);
ret.components = new Component[meta.getChildren().size()];
int index = 0;
for (final Map.Entry<Integer, FileMeta> b : meta.getChildren().entrySet()) {
final int fileId = b.getValue().getId();
final FileData data = archive.getFile(fileId);
final int componentId = (widgetId << 16) + fileId;
final Component comp = new Component(this, componentId);
comp.decode(data.getDataAsStream());
ret.components[comp.index = index++] = comp;
}
return ret;
}
@Override
public boolean canLoad(int id) {
return getWidgetArchive(id) != null;
}
private Archive getValidArchive(final int widgetId) {
Archive ret = getWidgetArchive(widgetId);
if (ret == null)
throw new IllegalArgumentException("Bad id");
return ret;
}
private Archive getWidgetArchive(final int widgetId) {
return cache.getArchive(widgetId);
}
public List<Widget> loadAll() {
final List<Widget> ret = new LinkedList<>();
for (final Map.Entry<Integer, ArchiveMeta> entry : cache.getTable().getEntries().entrySet()) {
ret.add(load(entry.getKey()));
}
return ret;
}
}
| {
"content_hash": "26bc389d201404775d99448be846288a",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 102,
"avg_line_length": 35.53846153846154,
"alnum_prop": 0.6735930735930736,
"repo_name": "ArtificialPB/DevelopmentKit",
"id": "f39a8d45691fee88a5e1d7702df35d603b6fbb5b",
"size": "2310",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/com/artificial/cachereader/wrappers/rt4/loaders/WidgetLoader.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "208818"
}
],
"symlink_target": ""
} |
package org.mindinformatics.gwt.domeo.model;
import java.util.Date;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.i18n.client.DateTimeFormat;
/**
* @author Paolo Ciccarese <[email protected]>
*/
public class JsoAnnotation extends JavaScriptObject {
protected JsoAnnotation() {}
public final native String getUuid() /*-{ return this.uuid; }-*/;
public final native String getLocalId() /*-{ return this.localId; }-*/;
public final native String getVersionNumber() /*-{ return this.versionNumber; }-*/;
public final native String getPreviousVersion() /*-{ return this.previousVersion; }-*/;
public final native String getCreatedOnAsString() /*-{ return this.createdon; }-*/;
public final Date getCreatedOn() {
DateTimeFormat fmt = DateTimeFormat.getFormat("MM/dd/yyyy HH:mm:ss Z");
return fmt.parse(getCreatedOnAsString());
}
public final native String getLastSavedOnAsString() /*-{ return this.lastSavedOn; }-*/;
public final Date getLastSavedOn() {
DateTimeFormat fmt = DateTimeFormat.getFormat("MM/dd/yyyy HH:mm:ss Z");
return fmt.parse(getLastSavedOnAsString());
}
// Creator
public final native JsArray<JavaScriptObject> getCreatedBy() /*-{
return this.createdby || [];
}-*/;
//public final native JsArray<JsoSelector> getSelectors() /*-{
// return this.selectors || [];
//}-*/;
}
| {
"content_hash": "3e88de4205d62051688c0597032d8f07",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 88,
"avg_line_length": 34.36585365853659,
"alnum_prop": 0.7224982256919801,
"repo_name": "domeo/DomeoClient",
"id": "323777831417b38934e982345506b7ba85d591a4",
"size": "1409",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/org/mindinformatics/gwt/domeo/model/JsoAnnotation.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "965819"
},
{
"name": "HTML",
"bytes": "4843630"
},
{
"name": "Java",
"bytes": "3090250"
},
{
"name": "JavaScript",
"bytes": "1067648"
}
],
"symlink_target": ""
} |
namespace aura {
namespace client {
class AURA_EXPORT DragDropClientObserver {
public:
// Called when dragging started.
virtual void OnDragStarted() = 0;
// Called when dragging ended.
virtual void OnDragEnded() = 0;
#if defined(OS_CHROMEOS)
// Called when the set of currently selected drag operation changes during the
// drag. |action| is a bitmask of the actions selected by the client. This is
// to provide feedback during the operation, it does not take precedence over
// the operation returned from StartDragAndDrop.
virtual void OnDragActionsChanged(int actions) {}
#endif
protected:
virtual ~DragDropClientObserver() = default;
};
} // namespace client
} // namespace aura
#endif // UI_AURA_CLIENT_DRAG_DROP_CLIENT_OBSERVER_H_
| {
"content_hash": "d95cc567aad4a66eb01d9696c6841ac6",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 80,
"avg_line_length": 28.48148148148148,
"alnum_prop": 0.7386215864759428,
"repo_name": "endlessm/chromium-browser",
"id": "aca9bda44bc32e524b7efece67be75d0fc0a4891",
"size": "1074",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ui/aura/client/drag_drop_client_observer.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
//
// detail/resolve_op.hpp
// ~~~~~~~~~~~~~~~~~~~~~
//
// Copyright (c) 2003-2015 Christopher M. Kohlhoff (chris at kohlhoff dot com)
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
#ifndef ASIO_DETAIL_RESOLVE_OP_HPP
#define ASIO_DETAIL_RESOLVE_OP_HPP
#if defined(_MSC_VER) && (_MSC_VER >= 1200)
# pragma once
#endif // defined(_MSC_VER) && (_MSC_VER >= 1200)
#include "asio/detail/config.hpp"
#include "asio/error.hpp"
#include "asio/io_service.hpp"
#include "asio/ip/basic_resolver_iterator.hpp"
#include "asio/ip/basic_resolver_query.hpp"
#include "asio/detail/bind_handler.hpp"
#include "asio/detail/fenced_block.hpp"
#include "asio/detail/handler_alloc_helpers.hpp"
#include "asio/detail/handler_invoke_helpers.hpp"
#include "asio/detail/memory.hpp"
#include "asio/detail/operation.hpp"
#include "asio/detail/socket_ops.hpp"
#include "asio/detail/push_options.hpp"
namespace asio {
namespace detail {
template <typename Protocol, typename Handler>
class resolve_op : public operation
{
public:
ASIO_DEFINE_HANDLER_PTR(resolve_op);
typedef asio::ip::basic_resolver_query<Protocol> query_type;
typedef asio::ip::basic_resolver_iterator<Protocol> iterator_type;
resolve_op(socket_ops::weak_cancel_token_type cancel_token,
const query_type& query, io_service_impl& ios, Handler& handler)
: operation(&resolve_op::do_complete),
cancel_token_(cancel_token),
query_(query),
io_service_impl_(ios),
handler_(ASIO_MOVE_CAST(Handler)(handler)),
addrinfo_(0)
{
handler_work<Handler>::start(handler_);
}
~resolve_op()
{
if (addrinfo_)
socket_ops::freeaddrinfo(addrinfo_);
}
static void do_complete(void* owner, operation* base,
const asio::error_code& /*ec*/,
std::size_t /*bytes_transferred*/)
{
// Take ownership of the operation object.
resolve_op* o(static_cast<resolve_op*>(base));
ptr p = { asio::detail::addressof(o->handler_), o, o };
handler_work<Handler> w(o->handler_);
if (owner && owner != &o->io_service_impl_)
{
// The operation is being run on the worker io_service. Time to perform
// the resolver operation.
// Perform the blocking host resolution operation.
socket_ops::background_getaddrinfo(o->cancel_token_,
o->query_.host_name().c_str(), o->query_.service_name().c_str(),
o->query_.hints(), &o->addrinfo_, o->ec_);
// Pass operation back to main io_service for completion.
o->io_service_impl_.post_deferred_completion(o);
p.v = p.p = 0;
}
else
{
// The operation has been returned to the main io_service. The completion
// handler is ready to be delivered.
ASIO_HANDLER_COMPLETION((o));
// Make a copy of the handler so that the memory can be deallocated
// before the upcall is made. Even if we're not about to make an upcall,
// a sub-object of the handler may be the true owner of the memory
// associated with the handler. Consequently, a local copy of the handler
// is required to ensure that any owning sub-object remains valid until
// after we have deallocated the memory here.
detail::binder2<Handler, asio::error_code, iterator_type>
handler(o->handler_, o->ec_, iterator_type());
p.h = asio::detail::addressof(handler.handler_);
if (o->addrinfo_)
{
handler.arg2_ = iterator_type::create(o->addrinfo_,
o->query_.host_name(), o->query_.service_name());
}
p.reset();
if (owner)
{
fenced_block b(fenced_block::half);
ASIO_HANDLER_INVOCATION_BEGIN((handler.arg1_, "..."));
w.complete(handler, handler.handler_);
ASIO_HANDLER_INVOCATION_END;
}
}
}
private:
socket_ops::weak_cancel_token_type cancel_token_;
query_type query_;
io_service_impl& io_service_impl_;
Handler handler_;
asio::error_code ec_;
asio::detail::addrinfo_type* addrinfo_;
};
} // namespace detail
} // namespace asio
#include "asio/detail/pop_options.hpp"
#endif // ASIO_DETAIL_RESOLVE_OP_HPP
| {
"content_hash": "e18a9c5379d0b1669e7cebd2262a4624",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 79,
"avg_line_length": 31.51127819548872,
"alnum_prop": 0.6544977332378907,
"repo_name": "letitvi/VideoGridPlayer",
"id": "0f250b44cf3aba6899bbb8fa034070d03ba876c6",
"size": "4191",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "thirdparty/source/asio-1.11.0/include/asio/detail/resolve_op.hpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1189"
},
{
"name": "C++",
"bytes": "26483"
},
{
"name": "Objective-C",
"bytes": "13"
}
],
"symlink_target": ""
} |
const { Emitter, CompositeDisposable } = require('atom');
const Unsupported = 'unsupported';
const Idle = 'idle';
const CheckingForUpdate = 'checking';
const DownloadingUpdate = 'downloading';
const UpdateAvailableToInstall = 'update-available';
const UpToDate = 'no-update-available';
const ErrorState = 'error';
let UpdateManager = class UpdateManager {
constructor() {
this.emitter = new Emitter();
this.currentVersion = atom.getVersion();
this.availableVersion = atom.getVersion();
this.resetState();
this.listenForAtomEvents();
}
listenForAtomEvents() {
this.subscriptions = new CompositeDisposable();
this.subscriptions.add(
atom.autoUpdater.onDidBeginCheckingForUpdate(() => {
this.setState(CheckingForUpdate);
}),
atom.autoUpdater.onDidBeginDownloadingUpdate(() => {
this.setState(DownloadingUpdate);
}),
atom.autoUpdater.onDidCompleteDownloadingUpdate(({ releaseVersion }) => {
this.setAvailableVersion(releaseVersion);
}),
atom.autoUpdater.onUpdateNotAvailable(() => {
this.setState(UpToDate);
}),
atom.autoUpdater.onUpdateError(() => {
this.setState(ErrorState);
}),
atom.config.observe('core.automaticallyUpdate', value => {
this.autoUpdatesEnabled = value;
this.emitDidChange();
})
);
// TODO: When https://github.com/atom/electron/issues/4587 is closed we can add this support.
// atom.autoUpdater.onUpdateAvailable =>
// @find('.about-updates-item').removeClass('is-shown')
// @updateAvailable.addClass('is-shown')
}
dispose() {
this.subscriptions.dispose();
}
onDidChange(callback) {
return this.emitter.on('did-change', callback);
}
emitDidChange() {
this.emitter.emit('did-change');
}
getAutoUpdatesEnabled() {
return (
this.autoUpdatesEnabled && this.state !== UpdateManager.State.Unsupported
);
}
setAutoUpdatesEnabled(enabled) {
return atom.config.set('core.automaticallyUpdate', enabled);
}
getErrorMessage() {
return atom.autoUpdater.getErrorMessage();
}
getState() {
return this.state;
}
setState(state) {
this.state = state;
this.emitDidChange();
}
resetState() {
this.state = atom.autoUpdater.platformSupportsUpdates()
? atom.autoUpdater.getState()
: Unsupported;
this.emitDidChange();
}
getAvailableVersion() {
return this.availableVersion;
}
setAvailableVersion(version) {
this.availableVersion = version;
if (this.availableVersion !== this.currentVersion) {
this.state = UpdateAvailableToInstall;
} else {
this.state = UpToDate;
}
this.emitDidChange();
}
checkForUpdate() {
atom.autoUpdater.checkForUpdate();
}
restartAndInstallUpdate() {
atom.autoUpdater.restartAndInstallUpdate();
}
getReleaseNotesURLForCurrentVersion() {
return this.getReleaseNotesURLForVersion(this.currentVersion);
}
getReleaseNotesURLForAvailableVersion() {
return this.getReleaseNotesURLForVersion(this.availableVersion);
}
getReleaseNotesURLForVersion(appVersion) {
// Dev versions will not have a releases page
if (appVersion.indexOf('dev') > -1) {
return 'https://atom.io/releases';
}
if (!appVersion.startsWith('v')) {
appVersion = `v${appVersion}`;
}
const releaseRepo =
appVersion.indexOf('nightly') > -1 ? 'atom-nightly-releases' : 'atom';
return `https://github.com/atom/${releaseRepo}/releases/tag/${appVersion}`;
}
};
UpdateManager.State = {
Unsupported: Unsupported,
Idle: Idle,
CheckingForUpdate: CheckingForUpdate,
DownloadingUpdate: DownloadingUpdate,
UpdateAvailableToInstall: UpdateAvailableToInstall,
UpToDate: UpToDate,
Error: ErrorState
};
module.exports = UpdateManager;
| {
"content_hash": "e05e3156b4776e9a6affb9935448e0d6",
"timestamp": "",
"source": "github",
"line_count": 151,
"max_line_length": 97,
"avg_line_length": 25.509933774834437,
"alnum_prop": 0.676791277258567,
"repo_name": "atom/atom",
"id": "eb30ce4d373822e8c1fd5bfc9dae6b7cc26ec048",
"size": "3852",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "packages/about/lib/update-manager.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2431"
},
{
"name": "CoffeeScript",
"bytes": "142807"
},
{
"name": "Dockerfile",
"bytes": "682"
},
{
"name": "EJS",
"bytes": "124"
},
{
"name": "HTML",
"bytes": "336"
},
{
"name": "JavaScript",
"bytes": "4525809"
},
{
"name": "Less",
"bytes": "445983"
},
{
"name": "Shell",
"bytes": "7778"
}
],
"symlink_target": ""
} |
"""
This module provides a hook which generates a cucumber json result file at the end of the run.
"""
from getpass import getuser
from socket import gethostname
from datetime import timedelta
import re
import json
import logging
from radish.terrain import world
from radish.hookregistry import after
from radish.exceptions import RadishError
from radish.scenariooutline import ScenarioOutline
from radish.scenarioloop import ScenarioLoop
from radish.stepmodel import Step
from radish.extensionregistry import extension
import radish.utils as utils
@extension
class CucumberJSONWriter(object):
"""
cucumber json Writer radish extension
"""
OPTIONS = [("--cucumber-json=<ccjson>", "write cucumber json result file after run")]
LOAD_IF = staticmethod(lambda config: config.cucumber_json)
LOAD_PRIORITY = 60
def __init__(self):
after.all(self.generate_ccjson)
def generate_ccjson(self, features, marker):
"""
Generates the cucumber json
"""
if not features:
raise RadishError("No features given to generate cucumber json file")
duration = timedelta()
for feature in features:
if feature.state in [Step.State.PASSED, Step.State.FAILED]:
duration += feature.duration
ccjson = []
for feature in features:
if not feature.has_to_run(world.config.scenarios):
continue
feature_description = "\n".join(feature.description)
feature_json = {
"uri": feature.path,
"type": "feature",
"keyword": feature.keyword,
"id": str(feature.id),
"name": feature.sentence,
"line": feature.line,
"description": feature_description,
"tags": [],
"elements": []
}
for i,j in enumerate(feature.tags):
feature_json["tags"].append({"name": "@" + j.name, "line": feature.line - len(feature.tags) + i})
for scenario in (s for s in feature.all_scenarios if not isinstance(s, (ScenarioOutline, ScenarioLoop))):
if not scenario.has_to_run(world.config.scenarios):
continue
scenario_json = {
"keyword": scenario.keyword,
"type": "scenario",
"id": str(scenario.id),
"name": scenario.sentence,
"line": scenario.line,
"description": "",
"steps": [],
"tags": []
}
start_line_no = scenario.line - len(scenario.tags)
for i, tag in enumerate(scenario.tags):
scenario_json["tags"].append({"name": "@" + tag.name, "line": start_line_no + i})
for step in scenario.all_steps:
duration = step.duration.total_seconds() * 1e9 if step.starttime and step.endtime else 0.0
step_json = {
"keyword": step.sentence.split()[0],
"name": step.sentence,
"line": step.line,
"result": {
"status": step.state,
"duration": duration
}
}
if step.state is Step.State.FAILED:
step_json["result"]["error_message"] = step.failure.reason
if step.state is Step.State.UNTESTED:
step_json["result"]["status"] = "skipped"
scenario_json["steps"].append(step_json)
feature_json["elements"].append(scenario_json)
ccjson.append(feature_json)
with open(world.config.cucumber_json, "w+") as f:
content = json.dumps(ccjson, indent=4, sort_keys=True)
f.write(content)
| {
"content_hash": "db0dfefc942972482d34fe87cf6ff410",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 117,
"avg_line_length": 39.205882352941174,
"alnum_prop": 0.5358839709927482,
"repo_name": "SamuelYvon/radish",
"id": "ccd2b452457831ac935fa9fb440df4dc43a4416a",
"size": "4024",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "radish/extensions/cucumber_json_writer.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Gherkin",
"bytes": "20376"
},
{
"name": "Python",
"bytes": "261585"
},
{
"name": "Shell",
"bytes": "1686"
}
],
"symlink_target": ""
} |
@interface UIView (BZSize)
@property(nonatomic) CGFloat bzLeft;
@property(nonatomic) CGFloat bzTop;
@property(nonatomic) CGFloat bzRight;
@property(nonatomic) CGFloat bzBottom;
@property(nonatomic) CGPoint bzOrigin;
@end
@interface UIView (BZRelativeLayout)
typedef NS_ENUM(NSInteger, BZRelativeRule){
//Must set a view
BZRelativeRuleStartOf,
BZRelativeRuleAboveOf,
BZRelativeRuleEndOf,
BZRelativeRuleBelowOf,
BZRelativeRuleAlignStart,
BZRelativeRuleAlignTop,
BZRelativeRuleAlignEnd,
BZRelativeRuleAlignBottom,
//Ignore view
BZRelativeRuleAlignParentStart,
BZRelativeRuleAlignParentTop,
BZRelativeRuleAlignParentEnd,
BZRelativeRuleAlignParentBottom,
//Ignore view,margin
BZRelativeRuleCenterHorizontalOf,
BZRelativeRuleCenterVerticalOf,
BZRelativeRuleCenterOf,
BZRelativeRuleCenterHorizontalInParent,
BZRelativeRuleCenterVerticalInParent,
BZRelativeRuleCenterInParent,
BZRelativeRuleBaseline//Only works on AutoLayout
};
@property(nonatomic,getter = isEnableAutoLayout) BOOL enableAutoLayout;//Default is YES
@property (nonatomic,nullable) NSArray<NSDictionary*> *rules;
-(void) addRule:(BZRelativeRule)rule;
-(void) addRule:(BZRelativeRule)rule view:(nullable UIView*) view;
-(void) addRule:(BZRelativeRule)rule margin:(CGFloat) margin;
-(void) addRule:(BZRelativeRule)rule view:(nullable UIView*) view margin:(CGFloat) margin;
@end
| {
"content_hash": "6ae63e8a88919cc8209ab25dd27517ee",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 90,
"avg_line_length": 25.614035087719298,
"alnum_prop": 0.7719178082191781,
"repo_name": "CBillZhang/BZRelativeLayout",
"id": "0708215d01dd914db2f5d20555d047e70894d338",
"size": "1629",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "BZRelativeLayout/UltimatePower+UIView.h",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "15005"
},
{
"name": "Ruby",
"bytes": "1233"
}
],
"symlink_target": ""
} |
// Generated by CoffeeScript 1.7.1
/*
Sort integers from the command line in a very ridiculous way: leveraging timeouts :P
*/
(function() {
var args, int, sortedCount, system, _i, _len;
system = require('system');
if (system.args.length < 2) {
console.log("Usage: phantomjs sleepsort.coffee PUT YOUR INTEGERS HERE SEPARATED BY SPACES");
phantom.exit(1);
} else {
sortedCount = 0;
args = Array.prototype.slice.call(system.args, 1);
for (_i = 0, _len = args.length; _i < _len; _i++) {
int = args[_i];
setTimeout((function(int) {
return function() {
console.log(int);
++sortedCount;
if (sortedCount === args.length) {
return phantom.exit();
}
};
})(int), int);
}
}
}).call(this);
| {
"content_hash": "3ec98ef8d4438d40f357c059856d8d82",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 96,
"avg_line_length": 25.09375,
"alnum_prop": 0.5691158156911582,
"repo_name": "bbuchsbaum/psycloud",
"id": "bd888394468f78b32ac1a2c829c233eede4d956e",
"size": "803",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "node_modules/grunt-mocha-debug/node_modules/phantomjs-wrapper/node_modules/phantomjs/lib/phantom/examples/sleepsort.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "138968"
},
{
"name": "CoffeeScript",
"bytes": "234953"
},
{
"name": "HTML",
"bytes": "11880"
},
{
"name": "JavaScript",
"bytes": "3761442"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<title>CBError.CBClickError</title>
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="CBError.CBClickError";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../com/chartboost/sdk/Model/CBError.html"
title="class in com.chartboost.sdk.Model"><span class="strong">Prev Class</span></a>
</li>
<li><a href="../../../../com/chartboost/sdk/Model/CBError.CBImpressionError.html"
title="enum in com.chartboost.sdk.Model"><span class="strong">Next Class</span></a>
</li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?com/chartboost/sdk/Model/CBError.CBClickError.html"
target="_top">Frames</a></li>
<li><a href="CBError.CBClickError.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li><a href="#enum_constant_summary">Enum Constants</a> | </li>
<li>Field | </li>
<li><a href="#method_summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li><a href="#enum_constant_detail">Enum Constants</a> | </li>
<li>Field | </li>
<li><a href="#method_detail">Method</a></li>
</ul>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">com.chartboost.sdk.Model</div>
<h2 title="Enum CBError.CBClickError" class="title">Enum CBError.CBClickError</h2>
</div>
<div class="contentContainer">
<ul class="inheritance">
<li>java.lang.Object</li>
<li>
<ul class="inheritance">
<li>java.lang.Enum<<a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html"
title="enum in com.chartboost.sdk.Model">CBError.CBClickError</a>></li>
<li>
<ul class="inheritance">
<li>com.chartboost.sdk.Model.CBError.CBClickError</li>
</ul>
</li>
</ul>
</li>
</ul>
<div class="description">
<ul class="blockList">
<li class="blockList">
<dl>
<dt>All Implemented Interfaces:</dt>
<dd>java.io.Serializable, java.lang.Comparable<<a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html"
title="enum in com.chartboost.sdk.Model">CBError.CBClickError</a>>
</dd>
</dl>
<dl>
<dt>Enclosing class:</dt>
<dd><a href="../../../../com/chartboost/sdk/Model/CBError.html"
title="class in com.chartboost.sdk.Model">CBError</a></dd>
</dl>
<hr>
<br>
<pre>public static enum <span class="strong">CBError.CBClickError</span>
extends java.lang.Enum<<a href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html"
title="enum in com.chartboost.sdk.Model">CBError.CBClickError</a>></pre>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- =========== ENUM CONSTANT SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="enum_constant_summary">
<!-- -->
</a>
<h3>Enum Constant Summary</h3>
<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0"
summary="Enum Constant Summary table, listing enum constants, and an explanation">
<caption><span>Enum Constants</span><span class="tabEnd"> </span>
</caption>
<tr>
<th class="colOne" scope="col">Enum Constant and Description</th>
</tr>
<tr class="altColor">
<td class="colOne"><code><strong><a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html#AGE_GATE_FAILURE">AGE_GATE_FAILURE</a></strong></code>
<div class="block">User failed to pass the Age Gate</div>
</td>
</tr>
<tr class="rowColor">
<td class="colOne"><code><strong><a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html#INTERNAL">INTERNAL</a></strong></code>
<div class="block">Unknown internal error</div>
</td>
</tr>
<tr class="altColor">
<td class="colOne"><code><strong><a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html#NO_HOST_ACTIVITY">NO_HOST_ACTIVITY</a></strong></code>
<div class="block">There is no currently active activity with
Chartboost properly integrated
</div>
</td>
</tr>
<tr class="rowColor">
<td class="colOne"><code><strong><a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html#URI_INVALID">URI_INVALID</a></strong></code>
<div class="block">Invalid URI</div>
</td>
</tr>
<tr class="altColor">
<td class="colOne"><code><strong><a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html#URI_UNRECOGNIZED">URI_UNRECOGNIZED</a></strong></code>
<div class="block">The device does not know how to open the
protocol of the URI
</div>
</td>
</tr>
</table>
</li>
</ul>
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method_summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0"
summary="Method Summary table, listing methods, and an explanation">
<caption><span>Methods</span><span class="tabEnd"> </span>
</caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tr class="altColor">
<td class="colFirst"><code>static <a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html"
title="enum in com.chartboost.sdk.Model">CBError.CBClickError</a></code>
</td>
<td class="colLast"><code><strong><a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html#valueOf(java.lang.String)">valueOf</a></strong>(java.lang.String name)</code>
<div class="block">Returns the enum constant of this type with
the specified name.
</div>
</td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>static <a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html"
title="enum in com.chartboost.sdk.Model">CBError.CBClickError</a>[]</code>
</td>
<td class="colLast"><code><strong><a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html#values()">values</a></strong>()</code>
<div class="block">Returns an array containing the constants of
this enum type, in
the order they are declared.
</div>
</td>
</tr>
</table>
<ul class="blockList">
<li class="blockList"><a
name="methods_inherited_from_class_java.lang.Enum">
<!-- -->
</a>
<h3>Methods inherited from class java.lang.Enum</h3>
<code>compareTo, equals, getDeclaringClass, hashCode, name, ordinal,
toString, valueOf</code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a
name="methods_inherited_from_class_java.lang.Object">
<!-- -->
</a>
<h3>Methods inherited from class java.lang.Object</h3>
<code>getClass, notify, notifyAll, wait, wait, wait</code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ============ ENUM CONSTANT DETAIL =========== -->
<ul class="blockList">
<li class="blockList"><a name="enum_constant_detail">
<!-- -->
</a>
<h3>Enum Constant Detail</h3>
<a name="URI_INVALID">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>URI_INVALID</h4>
<pre>public static final <a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html"
title="enum in com.chartboost.sdk.Model">CBError.CBClickError</a> URI_INVALID</pre>
<div class="block">Invalid URI</div>
</li>
</ul>
<a name="URI_UNRECOGNIZED">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>URI_UNRECOGNIZED</h4>
<pre>public static final <a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html"
title="enum in com.chartboost.sdk.Model">CBError.CBClickError</a> URI_UNRECOGNIZED</pre>
<div class="block">The device does not know how to open the protocol
of the URI
</div>
</li>
</ul>
<a name="AGE_GATE_FAILURE">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>AGE_GATE_FAILURE</h4>
<pre>public static final <a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html"
title="enum in com.chartboost.sdk.Model">CBError.CBClickError</a> AGE_GATE_FAILURE</pre>
<div class="block">User failed to pass the Age Gate</div>
</li>
</ul>
<a name="NO_HOST_ACTIVITY">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>NO_HOST_ACTIVITY</h4>
<pre>public static final <a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html"
title="enum in com.chartboost.sdk.Model">CBError.CBClickError</a> NO_HOST_ACTIVITY</pre>
<div class="block">There is no currently active activity with
Chartboost properly integrated
</div>
</li>
</ul>
<a name="INTERNAL">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>INTERNAL</h4>
<pre>public static final <a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html"
title="enum in com.chartboost.sdk.Model">CBError.CBClickError</a> INTERNAL</pre>
<div class="block">Unknown internal error</div>
</li>
</ul>
</li>
</ul>
<!-- ============ METHOD DETAIL ========== -->
<ul class="blockList">
<li class="blockList"><a name="method_detail">
<!-- -->
</a>
<h3>Method Detail</h3>
<a name="values()">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>values</h4>
<pre>public static <a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html"
title="enum in com.chartboost.sdk.Model">CBError.CBClickError</a>[] values()</pre>
<div class="block">Returns an array containing the constants of this
enum type, in
the order they are declared. This method may be used to iterate
over the constants as follows:
<pre>
for (CBError.CBClickError c : CBError.CBClickError.values())
System.out.println(c);
</pre>
</div>
<dl>
<dt><span class="strong">Returns:</span></dt>
<dd>an array containing the constants of this enum type, in the
order they are declared
</dd>
</dl>
</li>
</ul>
<a name="valueOf(java.lang.String)">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>valueOf</h4>
<pre>public static <a
href="../../../../com/chartboost/sdk/Model/CBError.CBClickError.html"
title="enum in com.chartboost.sdk.Model">CBError.CBClickError</a> valueOf(java.lang.String name)</pre>
<div class="block">Returns the enum constant of this type with the
specified name.
The string must match <i>exactly</i> an identifier used to
declare an
enum constant in this type. (Extraneous whitespace characters
are
not permitted.)
</div>
<dl>
<dt><span class="strong">Parameters:</span></dt>
<dd><code>name</code> - the name of the enum constant to be
returned.
</dd>
<dt><span class="strong">Returns:</span></dt>
<dd>the enum constant with the specified name</dd>
<dt><span class="strong">Throws:</span></dt>
<dd><code>java.lang.IllegalArgumentException</code> - if this
enum type has no constant with the specified name
</dd>
<dd><code>java.lang.NullPointerException</code> - if the
argument is null
</dd>
</dl>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a
name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li class="navBarCell1Rev">Class</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../index-all.html">Index</a></li>
<li><a href="../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="../../../../com/chartboost/sdk/Model/CBError.html"
title="class in com.chartboost.sdk.Model"><span class="strong">Prev Class</span></a>
</li>
<li><a href="../../../../com/chartboost/sdk/Model/CBError.CBImpressionError.html"
title="enum in com.chartboost.sdk.Model"><span class="strong">Next Class</span></a>
</li>
</ul>
<ul class="navList">
<li><a href="../../../../index.html?com/chartboost/sdk/Model/CBError.CBClickError.html"
target="_top">Frames</a></li>
<li><a href="CBError.CBClickError.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<div>
<ul class="subNavList">
<li>Summary: </li>
<li>Nested | </li>
<li><a href="#enum_constant_summary">Enum Constants</a> | </li>
<li>Field | </li>
<li><a href="#method_summary">Method</a></li>
</ul>
<ul class="subNavList">
<li>Detail: </li>
<li><a href="#enum_constant_detail">Enum Constants</a> | </li>
<li>Field | </li>
<li><a href="#method_detail">Method</a></li>
</ul>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
| {
"content_hash": "b7c61e936010584ef51dd3e1422eff49",
"timestamp": "",
"source": "github",
"line_count": 459,
"max_line_length": 191,
"avg_line_length": 50.58605664488017,
"alnum_prop": 0.40798484000172275,
"repo_name": "morris-james/googleplaylibrary",
"id": "c2734fb21c71b7a441a815f301ed03bce644295f",
"size": "23219",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "libs/doc/chartboost/com/chartboost/sdk/Model/CBError.CBClickError.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "11139"
},
{
"name": "HTML",
"bytes": "923030"
},
{
"name": "Java",
"bytes": "361791"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<!--
! @(#)gHMBCAD.xml 1.1 05/28/07
!
-->
<template type="basic" name="gHMBCAD" author="chempack" investigator=""
time_created="20070528T084437" apptype="Common"
application="liquids" scans="gHMBCAD" seqfil="gHMBCAD">
<protocol title="gHMBCAD" type="protocol">
<action type="LIB" status="Ready" lock="off" title="gHMBCAD" exp="gHMBCAD"
time="34 min, 2 sec" macro="gHMBCAD" data="" />
</protocol>
</template>
| {
"content_hash": "6ca7585cfabcfdd39151384fb0f76ba0",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 79,
"avg_line_length": 38.46153846153846,
"alnum_prop": 0.642,
"repo_name": "OpenVnmrJ/OpenVnmrJ",
"id": "1aae85972cebe4f2272df84d1e2c4798a933e4fb",
"size": "500",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/ddr/protocols/gHMBCAD.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "8033"
},
{
"name": "Awk",
"bytes": "3726"
},
{
"name": "Batchfile",
"bytes": "993"
},
{
"name": "C",
"bytes": "44578835"
},
{
"name": "C++",
"bytes": "1895471"
},
{
"name": "CSS",
"bytes": "12739"
},
{
"name": "Fortran",
"bytes": "1661682"
},
{
"name": "HTML",
"bytes": "68197"
},
{
"name": "Inno Setup",
"bytes": "18741"
},
{
"name": "Java",
"bytes": "17274464"
},
{
"name": "JavaScript",
"bytes": "44448"
},
{
"name": "Lex",
"bytes": "42593"
},
{
"name": "LiveScript",
"bytes": "1071"
},
{
"name": "MATLAB",
"bytes": "77708"
},
{
"name": "Makefile",
"bytes": "126047"
},
{
"name": "OpenEdge ABL",
"bytes": "6980"
},
{
"name": "PLpgSQL",
"bytes": "1040"
},
{
"name": "Perl",
"bytes": "10286"
},
{
"name": "PostScript",
"bytes": "801"
},
{
"name": "Python",
"bytes": "590263"
},
{
"name": "R",
"bytes": "1082"
},
{
"name": "RPC",
"bytes": "43654"
},
{
"name": "Roff",
"bytes": "41584"
},
{
"name": "Shell",
"bytes": "1478413"
},
{
"name": "Tcl",
"bytes": "864859"
},
{
"name": "Vim Script",
"bytes": "7711"
},
{
"name": "Yacc",
"bytes": "95121"
}
],
"symlink_target": ""
} |
package org.onosproject.net.flow.impl;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.LinkedListMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Sets;
import org.onosproject.core.ApplicationId;
import org.onosproject.mastership.MastershipService;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.device.DeviceEvent;
import org.onosproject.net.device.DeviceListener;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.flow.CompletedBatchOperation;
import org.onosproject.net.flow.FlowRule;
import org.onosproject.net.flow.FlowRuleBatchEntry;
import org.onosproject.net.flow.FlowRuleBatchOperation;
import org.onosproject.net.flow.FlowRuleProgrammable;
import org.onosproject.net.flow.FlowRuleProvider;
import org.onosproject.net.flow.FlowRuleProviderService;
import org.onosproject.net.provider.AbstractProvider;
import org.onosproject.net.provider.ProviderId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.Set;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import static com.google.common.collect.ImmutableSet.copyOf;
import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor;
import static org.onlab.util.Tools.groupedThreads;
import static org.onosproject.net.device.DeviceEvent.Type.*;
import static org.onosproject.net.flow.FlowRuleBatchEntry.FlowRuleOperation.*;
/**
* Driver-based flow rule provider.
*/
class FlowRuleDriverProvider extends AbstractProvider implements FlowRuleProvider {
private final Logger log = LoggerFactory.getLogger(getClass());
// Perhaps to be extracted for better reuse as we deal with other.
public static final String SCHEME = "default";
public static final String PROVIDER_NAME = "org.onosproject.provider";
FlowRuleProviderService providerService;
private DeviceService deviceService;
private MastershipService mastershipService;
private InternalDeviceListener deviceListener = new InternalDeviceListener();
private ScheduledExecutorService executor
= newSingleThreadScheduledExecutor(groupedThreads("FlowRuleDriverProvider", "%d", log));
private ScheduledFuture<?> poller = null;
/**
* Creates a new fallback flow rule provider.
*/
FlowRuleDriverProvider() {
super(new ProviderId(SCHEME, PROVIDER_NAME));
}
/**
* Initializes the provider with necessary supporting services.
*
* @param providerService flow rule provider service
* @param deviceService device service
* @param mastershipService mastership service
* @param pollFrequency flow entry poll frequency
*/
void init(FlowRuleProviderService providerService,
DeviceService deviceService, MastershipService mastershipService,
int pollFrequency) {
this.providerService = providerService;
this.deviceService = deviceService;
this.mastershipService = mastershipService;
deviceService.addListener(deviceListener);
if (poller != null && !poller.isCancelled()) {
poller.cancel(false);
}
poller = executor.scheduleAtFixedRate(this::pollFlowEntries, pollFrequency,
pollFrequency, TimeUnit.SECONDS);
}
@Override
public void applyFlowRule(FlowRule... flowRules) {
rulesByDevice(flowRules).asMap().forEach(this::applyFlowRules);
}
@Override
public void removeFlowRule(FlowRule... flowRules) {
rulesByDevice(flowRules).asMap().forEach(this::removeFlowRules);
}
@Override
public void removeRulesById(ApplicationId id, FlowRule... flowRules) {
removeFlowRule(flowRules);
}
@Override
public void executeBatch(FlowRuleBatchOperation batch) {
ImmutableList.Builder<FlowRule> toAdd = ImmutableList.builder();
ImmutableList.Builder<FlowRule> toRemove = ImmutableList.builder();
for (FlowRuleBatchEntry fbe : batch.getOperations()) {
if (fbe.operator() == ADD || fbe.operator() == MODIFY) {
toAdd.add(fbe.target());
} else if (fbe.operator() == REMOVE) {
toRemove.add(fbe.target());
}
}
ImmutableList<FlowRule> rulesToAdd = toAdd.build();
ImmutableList<FlowRule> rulesToRemove = toRemove.build();
Collection<FlowRule> added = ImmutableList.of();
if (!rulesToAdd.isEmpty()) {
added = applyFlowRules(batch.deviceId(), rulesToAdd);
}
Collection<FlowRule> removed = ImmutableList.of();
if (!rulesToRemove.isEmpty()) {
removed = removeFlowRules(batch.deviceId(), rulesToRemove);
}
Set<FlowRule> failedRules = Sets.union(Sets.difference(copyOf(rulesToAdd), copyOf(added)),
Sets.difference(copyOf(rulesToRemove), copyOf(removed)));
CompletedBatchOperation status =
new CompletedBatchOperation(failedRules.isEmpty(), failedRules, batch.deviceId());
providerService.batchOperationCompleted(batch.id(), status);
}
private Multimap<DeviceId, FlowRule> rulesByDevice(FlowRule[] flowRules) {
// Sort the flow rules by device id
Multimap<DeviceId, FlowRule> rulesByDevice = LinkedListMultimap.create();
for (FlowRule rule : flowRules) {
rulesByDevice.put(rule.deviceId(), rule);
}
return rulesByDevice;
}
private Collection<FlowRule> applyFlowRules(DeviceId deviceId, Collection<FlowRule> flowRules) {
FlowRuleProgrammable programmer = getFlowRuleProgrammable(deviceId);
return programmer != null ? programmer.applyFlowRules(flowRules) : ImmutableList.of();
}
private Collection<FlowRule> removeFlowRules(DeviceId deviceId, Collection<FlowRule> flowRules) {
FlowRuleProgrammable programmer = getFlowRuleProgrammable(deviceId);
return programmer != null ? programmer.removeFlowRules(flowRules) : ImmutableList.of();
}
private FlowRuleProgrammable getFlowRuleProgrammable(DeviceId deviceId) {
Device device = deviceService.getDevice(deviceId);
if (device.is(FlowRuleProgrammable.class)) {
return device.as(FlowRuleProgrammable.class);
} else {
log.debug("Device {} is not flow rule programmable", deviceId);
return null;
}
}
private void pollDeviceFlowEntries(Device device) {
try {
providerService.pushFlowMetrics(device.id(), device.as(FlowRuleProgrammable.class).getFlowEntries());
} catch (Exception e) {
log.warn("Exception thrown while polling {}", device.id(), e);
}
}
private void pollFlowEntries() {
try {
deviceService.getAvailableDevices().forEach(device -> {
if (mastershipService.isLocalMaster(device.id()) && device.is(FlowRuleProgrammable.class)) {
pollDeviceFlowEntries(device);
}
});
} catch (Exception e) {
log.warn("Exception thrown while polling flows", e);
}
}
// potentially positive device event
private static final Set<DeviceEvent.Type> POSITIVE_DEVICE_EVENT =
Sets.immutableEnumSet(DEVICE_ADDED,
DEVICE_AVAILABILITY_CHANGED);
private class InternalDeviceListener implements DeviceListener {
@Override
public void event(DeviceEvent event) {
executor.execute(() -> handleEvent(event));
}
@Override
public boolean isRelevant(DeviceEvent event) {
Device device = event.subject();
return POSITIVE_DEVICE_EVENT.contains(event.type()) &&
device.is(FlowRuleProgrammable.class);
}
private void handleEvent(DeviceEvent event) {
Device device = event.subject();
boolean isRelevant = mastershipService.isLocalMaster(device.id()) &&
deviceService.isAvailable(device.id());
if (isRelevant) {
pollDeviceFlowEntries(device);
}
}
}
}
| {
"content_hash": "0bdc7f53596ed283ecb8d76ade7d3b73",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 113,
"avg_line_length": 38.53211009174312,
"alnum_prop": 0.6834523809523809,
"repo_name": "sdnwiselab/onos",
"id": "c4dc8833ad656ca331993732a5cf5b8e66269604",
"size": "9017",
"binary": false,
"copies": "1",
"ref": "refs/heads/onos-sdn-wise-1.10",
"path": "core/net/src/main/java/org/onosproject/net/flow/impl/FlowRuleDriverProvider.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "222318"
},
{
"name": "HTML",
"bytes": "148718"
},
{
"name": "Java",
"bytes": "34889939"
},
{
"name": "JavaScript",
"bytes": "3818724"
},
{
"name": "Python",
"bytes": "492716"
},
{
"name": "Ruby",
"bytes": "4052"
},
{
"name": "Shell",
"bytes": "205831"
}
],
"symlink_target": ""
} |
#include <iostream>
#include <string>
#include <gazeapi.h>
#include<stdlib.h>
#include<stdio.h>
#include <dos.h>
#include <math.h>
#include <conio.h>
#include <stdio.h>
#include <stdlib.h>
#include <stdarg.h>
#include "graphics.h"
#define ESC 0x1b /* Define the escape key */
#define TRUE 1 /* Define some handy constants */
#define FALSE 0 /* Define some handy constants */
#define PI 3.14159 /* Define a value for PI */
#define ON 1 /* Define some handy constants */
#define OFF 0 /* Define some handy constants */
#define NFONTS 11
char *Fonts[NFONTS] = {
"DefaultFont", "TriplexFont", "SmallFont",
"SansSerifFont", "GothicFont", "ScriptFont", "SimplexFont", "TriplexScriptFont",
"ComplexFont", "EuropeanFont", "BoldFont"
};
char *LineStyles[] = {
"SolidLn", "DottedLn", "CenterLn", "DashedLn", "UserBitLn"
};
char *FillStyles[] = {
"EmptyFill", "SolidFill", "LineFill", "LtSlashFill",
"SlashFill", "BkSlashFill", "LtBkSlashFill", "HatchFill",
"XHatchFill", "InterleaveFill", "WideDotFill", "CloseDotFill"
};
char *TextDirect[] = {
"HorizDir", "VertDir"
};
char *HorizJust[] = {
"LeftText", "CenterText", "RightText"
};
char *VertJust[] = {
"BottomText", "CenterText", "TopText"
};
struct PTS {
int x, y;
}; /* Structure to hold vertex points */
int GraphDriver; /* The Graphics device driver */
int GraphMode; /* The Graphics mode value */
double AspectRatio; /* Aspect ratio of a pixel on the screen*/
int MaxX, MaxY; /* The maximum resolution of the screen */
int MaxColors; /* The maximum # of colors available */
int ErrorCode; /* Reports any graphics errors */
struct palettetype palette; /* Used to read palette info */
using namespace std;
// --- MyGaze definition
class MyGaze : public gtl::IGazeListener
{
public:
MyGaze();
~MyGaze();
private:
// IGazeListener
void on_gaze_data(gtl::GazeData const & gaze_data);
private:
gtl::GazeApi m_api;
};
// --- MyGaze implementation
MyGaze::MyGaze()
{
// Connect to the server in push mode on the default TCP port (6555)
if (m_api.connect(true))
{
// Enable GazeData notifications
m_api.add_listener(*this);
}
else
{
cout << "Server is not active";
}
}
MyGaze::~MyGaze()
{
m_api.remove_listener(*this);
m_api.disconnect();
}
void MyGaze::on_gaze_data(gtl::GazeData const & gaze_data)
{
if (gaze_data.state & gtl::GazeData::GD_STATE_TRACKING_GAZE)
{
gtl::Point2D const & smoothedCoordinatesLeftEye = gaze_data.lefteye.avg; // smoothed data from left eye
gtl::Point2D const & smoothedCoordinatesRightEye = gaze_data.righteye.avg; // smoothed data from right eye
float LeftEyeX = smoothedCoordinatesLeftEye.x;
float LeftEyeY = smoothedCoordinatesLeftEye.y;
float RightEyeX = smoothedCoordinatesRightEye.x;
float RightEyeY = smoothedCoordinatesRightEye.y;
// Move GUI point, do hit-testing, log coordinates, etc.
cout << "x = " << (LeftEyeX + RightEyeX) / 2 << " y = " << (LeftEyeY + RightEyeY) / 2 << endl; //center values for left and right eyes, respectively.
//m_GraphcsObject.DrawEllipse()
circle((LeftEyeX + RightEyeX) / 2, (LeftEyeY + RightEyeY) / 2, gaze_data.lefteye.psize);
}
}
void Initialize(void)
{
int xasp, yasp; /* Used to read the aspect ratio*/
GraphDriver = DETECT; /* Request auto-detection */
initgraph(&GraphDriver, &GraphMode, "");
ErrorCode = graphresult(); /* Read result of initialization*/
if (ErrorCode != grOk){ /* Error occured during init */
printf(" Graphics System Error: %s\n", grapherrormsg(ErrorCode));
exit(1);
}
getpalette(&palette); /* Read the palette from board */
MaxColors = getmaxcolor() + 1; /* Read maximum number of colors*/
MaxX = getmaxx();
MaxY = getmaxy(); /* Read size of screen */
getaspectratio(&xasp, &yasp); /* read the hardware aspect */
AspectRatio = (double)xasp / (double)yasp; /* Get correction factor */
}
void PaletteDemo(void)
{
int i, j, x, y, color;
struct viewporttype vp;
int height, width;
//MainWindow("Palette Demonstration");
//StatusLine("Press any key to continue, ESC to Abort");
getviewsettings(&vp);
width = (vp.right - vp.left) / 15; /* get width of the box */
height = (vp.bottom - vp.top) / 10; /* Get the height of the box */
x = y = 0; /* Start in upper corner */
color = 1; /* Begin at 1st color */
for (j = 0; j<10; ++j){ /* For 10 rows of boxes */
for (i = 0; i<15; ++i){ /* For 15 columns of boxes */
setfillstyle(SOLID_FILL, color++); /* Set the color of box */
bar(x, y, x + width, y + height); /* Draw the box */
x += width + 1; /* Advance to next col */
color = 1 + (color % (MaxColors - 2)); /* Set new color */
} /* End of COLUMN loop */
x = 0; /* Goto 1st column */
y += height + 1; /* Goto next row */
} /* End of ROW loop */
while (!kbhit()){ /* Until user enters a key... */
setpalette(1 + random(MaxColors - 2), random(65));
}
setallpalette(&palette);
//Pause(); /* Wait for user's response */
}
int main()
{
MyGaze gazeReceiver;
Initialize();
while (true)
{
}
return 0;
} | {
"content_hash": "b16cf1076b51bbd274160f287a3738f2",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 151,
"avg_line_length": 30.11855670103093,
"alnum_prop": 0.5646072223173028,
"repo_name": "vijayrajanna/Live_Tracking_Eye_Tribe",
"id": "b9ea4ff63ae14d28a9c05a37f2ae9b82b203e1d2",
"size": "5843",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Live_Tracking_Eye_Tribe/src/EyeTracker.cpp",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "7339"
},
{
"name": "C++",
"bytes": "110323"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace DDay.iCal
{
public class FreeBusyEntry :
Period,
IFreeBusyEntry
{
#region Private Fields
FreeBusyStatus _Status;
#endregion
#region Constructors
public FreeBusyEntry() : base() { Initialize(); }
public FreeBusyEntry(IPeriod period, FreeBusyStatus status) : base()
{
Initialize();
CopyFrom(period);
Status = status;
}
void Initialize()
{
Status = FreeBusyStatus.Busy;
}
#endregion
#region Overrides
public override void CopyFrom(ICopyable obj)
{
base.CopyFrom(obj);
IFreeBusyEntry fb = obj as IFreeBusyEntry;
if (fb != null)
{
Status = fb.Status;
}
}
#endregion
#region IFreeBusyEntry Members
virtual public FreeBusyStatus Status
{
get { return _Status; }
set { _Status = value; }
}
#endregion
}
}
| {
"content_hash": "27a518df9d9175da22ba2a49f2493193",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 76,
"avg_line_length": 20.316666666666666,
"alnum_prop": 0.49302707136997537,
"repo_name": "nachocove/DDay-iCal-Xamarin",
"id": "a0bc683e0235ad6bd837453dba236e95274e3fb5",
"size": "1219",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DDay.iCal/DataTypes/FreeBusyEntry.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C#",
"bytes": "1564724"
},
{
"name": "GAP",
"bytes": "10292"
},
{
"name": "Makefile",
"bytes": "11335"
}
],
"symlink_target": ""
} |
require 'logger'
require 'ostruct'
require 'socket'
require 'pp'
require 'tempfile'
require 'uri'
require 'uuid'
require 'diffy'
require 'hashie'
require 'roadworker/string-ext'
require 'roadworker/struct-ext'
require 'roadworker/route53-ext'
require 'roadworker/version'
require 'roadworker/log'
require 'roadworker/utils'
require 'roadworker/template-helper'
require 'roadworker/batch'
require 'roadworker/client'
require 'roadworker/collection'
require 'roadworker/dsl'
require 'roadworker/dsl-converter'
require 'roadworker/dsl-tester'
require 'roadworker/route53-exporter'
require 'roadworker/route53-health-check'
require 'roadworker/route53-wrapper'
| {
"content_hash": "4367a491af28e6f5815491c5d62e95b9",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 41,
"avg_line_length": 23.571428571428573,
"alnum_prop": 0.8045454545454546,
"repo_name": "winebarrel/roadworker",
"id": "e9d7fc4d07e69f066279513e6cb1b39abf6fa69e",
"size": "660",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/roadworker.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "265583"
}
],
"symlink_target": ""
} |
package com.spun.util;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
public class DualOutputStream extends OutputStream
{
private FileOutputStream o;
private PrintStream display = System.out;
/************************************************************************/
public DualOutputStream()
{
super();
}
/************************************************************************/
public void setOutputStream(String outfile) throws SecurityException, Exception
{
try
{
o = new FileOutputStream(outfile);
display.println("output being redirected to: " + outfile);
}
catch (Exception e)
{
throw ObjectUtils.throwAsError(e);
}
}
/************************************************************************/
public void write(int b) throws IOException
{
try
{
o.write(b);
}
catch (Exception e)
{
throw ObjectUtils.throwAsError(e);
}
display.write(b);
}
/************************************************************************/
public void write(byte b[]) throws IOException
{
try
{
o.write(b);
}
catch (Exception e)
{
throw ObjectUtils.throwAsError(e);
}
display.println(b);
flush();
}
/************************************************************************/
public void write(byte b[], int off, int len) throws IOException
{
try
{
o.write(b, off, len);
}
catch (Exception e)
{
throw ObjectUtils.throwAsError(e);
}
display.write(b, off, len);
}
/************************************************************************/
public void flush() throws IOException
{
try
{
o.flush();
}
catch (Exception e)
{
throw ObjectUtils.throwAsError(e);
}
display.flush();
}
/************************************************************************/
public void close() throws IOException
{
try
{
o.close();
}
catch (Exception e)
{
throw ObjectUtils.throwAsError(e);
}
display.close();
}
/************************************************************************/
/************************************************************************/
} | {
"content_hash": "e9eb612d22cb9b99a8c76a70732396cf",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 81,
"avg_line_length": 23.581632653061224,
"alnum_prop": 0.42016443098225875,
"repo_name": "bhagatsingh/ApprovalTests.Java.Maven",
"id": "f254e91c179626625a7a84bb3933fdc0f9ff111a",
"size": "2311",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/spun/util/DualOutputStream.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "6755"
},
{
"name": "Java",
"bytes": "1057835"
}
],
"symlink_target": ""
} |
namespace swift {
class SILDebugScope;
class SILBuilder {
friend class SILBuilderWithScope;
SILFunction &F;
/// If this is non-null, the instruction is inserted in the specified
/// basic block, at the specified InsertPt. If null, created instructions
/// are not auto-inserted.
SILBasicBlock *BB;
SILBasicBlock::iterator InsertPt;
const SILDebugScope *CurDebugScope = nullptr;
/// If this pointer is non-null, then any inserted instruction is
/// recorded in this list.
SmallVectorImpl<SILInstruction *> *InsertedInstrs = nullptr;
public:
SILBuilder(SILFunction &F) : F(F), BB(0) {}
SILBuilder(SILFunction &F, SmallVectorImpl<SILInstruction *> *InsertedInstrs)
: F(F), BB(0), InsertedInstrs(InsertedInstrs) {}
explicit SILBuilder(SILInstruction *I,
SmallVectorImpl<SILInstruction *> *InsertedInstrs = 0)
: F(*I->getFunction()), InsertedInstrs(InsertedInstrs) {
setInsertionPoint(I);
}
explicit SILBuilder(SILBasicBlock::iterator I,
SmallVectorImpl<SILInstruction *> *InsertedInstrs = 0)
: SILBuilder(&*I, InsertedInstrs) {}
explicit SILBuilder(SILBasicBlock *BB,
SmallVectorImpl<SILInstruction *> *InsertedInstrs = 0)
: F(*BB->getParent()), InsertedInstrs(InsertedInstrs) {
setInsertionPoint(BB);
}
SILBuilder(SILBasicBlock *BB, SILBasicBlock::iterator InsertPt,
SmallVectorImpl<SILInstruction *> *InsertedInstrs = 0)
: F(*BB->getParent()), InsertedInstrs(InsertedInstrs) {
setInsertionPoint(BB, InsertPt);
}
SILFunction &getFunction() const { return F; }
SILModule &getModule() const { return F.getModule(); }
ASTContext &getASTContext() const { return F.getASTContext(); }
const Lowering::TypeLowering &getTypeLowering(SILType T) const {
return F.getModule().getTypeLowering(T);
}
void setCurrentDebugScope(const SILDebugScope *DS) { CurDebugScope = DS; }
const SILDebugScope *getCurrentDebugScope() const { return CurDebugScope; }
/// Convenience function for building a SILDebugLocation.
SILDebugLocation getSILDebugLocation(SILLocation Loc) {
// FIXME: Audit all uses and enable this assertion.
// assert(getCurrentDebugScope() && "no debug scope");
auto Scope = getCurrentDebugScope();
return SILDebugLocation(Loc, Scope ? Scope : getFunction().getDebugScope());
}
//===--------------------------------------------------------------------===//
// Insertion Point Management
//===--------------------------------------------------------------------===//
bool hasValidInsertionPoint() const { return BB != nullptr; }
SILBasicBlock *getInsertionBB() { return BB; }
SILBasicBlock::iterator getInsertionPoint() { return InsertPt; }
/// insertingAtEndOfBlock - Return true if the insertion point is at the end
/// of the current basic block. False if we're inserting before an existing
/// instruction.
bool insertingAtEndOfBlock() const {
assert(hasValidInsertionPoint() &&
"Must have insertion point to ask about it");
return InsertPt == BB->end();
}
/// clearInsertionPoint - Clear the insertion point: created instructions will
/// not be inserted into a block.
void clearInsertionPoint() { BB = nullptr; }
/// setInsertionPoint - Set the insertion point.
void setInsertionPoint(SILBasicBlock *BB, SILBasicBlock::iterator InsertPt) {
this->BB = BB;
this->InsertPt = InsertPt;
}
/// setInsertionPoint - Set the insertion point to insert before the specified
/// instruction.
void setInsertionPoint(SILInstruction *I) {
setInsertionPoint(I->getParent(), I->getIterator());
}
/// setInsertionPoint - Set the insertion point to insert before the specified
/// instruction.
void setInsertionPoint(SILBasicBlock::iterator IIIter) {
setInsertionPoint(IIIter->getParent(), IIIter);
}
/// setInsertionPoint - Set the insertion point to insert at the end of the
/// specified block.
void setInsertionPoint(SILBasicBlock *BB) {
setInsertionPoint(BB, BB->end());
}
/// setInsertionPoint - Set the insertion point to insert at the end of the
/// specified block.
void setInsertionPoint(SILFunction::iterator BBIter) {
setInsertionPoint(&*BBIter);
}
SILBasicBlock *getInsertionPoint() const { return BB; }
//===--------------------------------------------------------------------===//
// Instruction Tracking
//===--------------------------------------------------------------------===//
/// Clients of SILBuilder who want to know about any newly created
/// instructions can install a SmallVector into the builder to collect them.
void setTrackingList(SmallVectorImpl<SILInstruction *> *II) {
InsertedInstrs = II;
}
SmallVectorImpl<SILInstruction *> *getTrackingList() {
return InsertedInstrs;
}
//===--------------------------------------------------------------------===//
// Type remapping
//===--------------------------------------------------------------------===//
static SILType getPartialApplyResultType(SILType Ty, unsigned ArgCount,
SILModule &M,
ArrayRef<Substitution> subs);
//===--------------------------------------------------------------------===//
// CFG Manipulation
//===--------------------------------------------------------------------===//
/// moveBlockTo - Move a block to immediately before the given iterator.
void moveBlockTo(SILBasicBlock *BB, SILFunction::iterator IP) {
assert(SILFunction::iterator(BB) != IP && "moving block before itself?");
SILFunction *F = BB->getParent();
auto &Blocks = F->getBlocks();
Blocks.remove(BB);
Blocks.insert(IP, BB);
}
/// moveBlockTo - Move \p BB to immediately before \p Before.
void moveBlockTo(SILBasicBlock *BB, SILBasicBlock *Before) {
moveBlockTo(BB, Before->getIterator());
}
/// moveBlockToEnd - Reorder a block to the end of its containing function.
void moveBlockToEnd(SILBasicBlock *BB) {
moveBlockTo(BB, BB->getParent()->end());
}
/// \brief Move the insertion point to the end of the given block.
///
/// Assumes that no insertion point is currently active.
void emitBlock(SILBasicBlock *BB) {
assert(!hasValidInsertionPoint());
setInsertionPoint(BB);
}
/// \brief Branch to the given block if there's an active insertion point,
/// then move the insertion point to the end of that block.
void emitBlock(SILBasicBlock *BB, SILLocation BranchLoc);
/// splitBlockForFallthrough - Prepare for the insertion of a terminator. If
/// the builder's insertion point is at the end of the current block (as when
/// SILGen is creating the initial code for a function), just create and
/// return a new basic block that will be later used for the continue point.
///
/// If the insertion point is valid (i.e., pointing to an existing
/// instruction) then split the block at that instruction and return the
/// continuation block.
SILBasicBlock *splitBlockForFallthrough();
//===--------------------------------------------------------------------===//
// SILInstruction Creation Methods
//===--------------------------------------------------------------------===//
AllocStackInst *createAllocStack(SILLocation Loc, SILType elementType,
SILDebugVariable Var = SILDebugVariable()) {
Loc.markAsPrologue();
return insert(AllocStackInst::create(getSILDebugLocation(Loc),
elementType, F, Var));
}
AllocRefInst *createAllocRef(SILLocation Loc, SILType elementType, bool objc,
bool canAllocOnStack) {
// AllocRefInsts expand to function calls and can therefore not be
// counted towards the function prologue.
assert(!Loc.isInPrologue());
return insert(new (F.getModule()) AllocRefInst(
getSILDebugLocation(Loc), elementType, F, objc, canAllocOnStack));
}
AllocRefDynamicInst *createAllocRefDynamic(SILLocation Loc, SILValue operand,
SILType type, bool objc) {
// AllocRefDynamicInsts expand to function calls and can therefore
// not be counted towards the function prologue.
assert(!Loc.isInPrologue());
return insert(new (F.getModule()) AllocRefDynamicInst(
getSILDebugLocation(Loc), operand, type, objc));
}
AllocValueBufferInst *
createAllocValueBuffer(SILLocation Loc, SILType valueType, SILValue operand) {
return insert(new (F.getModule()) AllocValueBufferInst(
getSILDebugLocation(Loc), valueType, operand));
}
AllocBoxInst *createAllocBox(SILLocation Loc, SILType ElementType,
SILDebugVariable Var = SILDebugVariable()) {
Loc.markAsPrologue();
return insert(
AllocBoxInst::create(getSILDebugLocation(Loc), ElementType, F, Var));
}
AllocExistentialBoxInst *
createAllocExistentialBox(SILLocation Loc, SILType ExistentialType,
CanType ConcreteType,
ArrayRef<ProtocolConformanceRef> Conformances) {
return insert(AllocExistentialBoxInst::create(
getSILDebugLocation(Loc), ExistentialType, ConcreteType,
Conformances, &F));
}
ApplyInst *createApply(SILLocation Loc, SILValue Fn, SILType SubstFnTy,
SILType Result, ArrayRef<Substitution> Subs,
ArrayRef<SILValue> Args, bool isNonThrowing) {
return insert(ApplyInst::create(getSILDebugLocation(Loc), Fn, SubstFnTy,
Result, Subs, Args, isNonThrowing, F));
}
ApplyInst *createApply(SILLocation Loc, SILValue Fn, ArrayRef<SILValue> Args,
bool isNonThrowing) {
auto FnTy = Fn->getType();
return createApply(Loc, Fn, FnTy,
FnTy.castTo<SILFunctionType>()->getSILResult(),
ArrayRef<Substitution>(), Args, isNonThrowing);
}
TryApplyInst *createTryApply(SILLocation Loc, SILValue fn, SILType substFnTy,
ArrayRef<Substitution> subs,
ArrayRef<SILValue> args, SILBasicBlock *normalBB,
SILBasicBlock *errorBB) {
return insertTerminator(TryApplyInst::create(getSILDebugLocation(Loc),
fn, substFnTy, subs, args,
normalBB, errorBB, F));
}
PartialApplyInst *createPartialApply(SILLocation Loc, SILValue Fn,
SILType SubstFnTy,
ArrayRef<Substitution> Subs,
ArrayRef<SILValue> Args,
SILType ClosureTy) {
return insert(PartialApplyInst::create(
getSILDebugLocation(Loc), Fn, SubstFnTy, Subs, Args, ClosureTy, F));
}
BuiltinInst *createBuiltin(SILLocation Loc, Identifier Name, SILType ResultTy,
ArrayRef<Substitution> Subs,
ArrayRef<SILValue> Args) {
return insert(BuiltinInst::create(getSILDebugLocation(Loc), Name,
ResultTy, Subs, Args, F));
}
/// Create a binary function with the signature: OpdTy, OpdTy -> ResultTy.
BuiltinInst *createBuiltinBinaryFunction(SILLocation Loc, StringRef Name,
SILType OpdTy, SILType ResultTy,
ArrayRef<SILValue> Args) {
auto &C = getASTContext();
llvm::SmallString<16> NameStr = Name;
if (auto BuiltinIntTy =
dyn_cast<BuiltinIntegerType>(OpdTy.getSwiftRValueType())) {
if (BuiltinIntTy == BuiltinIntegerType::getWordType(getASTContext())) {
NameStr += "_Word";
} else {
unsigned NumBits = BuiltinIntTy->getWidth().getFixedWidth();
NameStr += "_Int" + llvm::utostr(NumBits);
}
} else {
assert(OpdTy.getSwiftRValueType() == C.TheRawPointerType);
NameStr += "_RawPointer";
}
auto Ident = C.getIdentifier(NameStr);
return insert(BuiltinInst::create(getSILDebugLocation(Loc), Ident,
ResultTy, {}, Args, F));
}
/// Create a binary function with the signature:
/// OpdTy, OpdTy, Int1 -> (OpdTy, Int1)
BuiltinInst *
createBuiltinBinaryFunctionWithOverflow(SILLocation Loc, StringRef Name,
ArrayRef<SILValue> Args) {
assert(Args.size() == 3 && "Need three arguments");
assert(Args[0]->getType() == Args[1]->getType() &&
"Binary operands must match");
assert(Args[2]->getType().is<BuiltinIntegerType>() &&
Args[2]->getType().getSwiftRValueType()->isBuiltinIntegerType(1) &&
"Must have a third Int1 operand");
SILType OpdTy = Args[0]->getType();
SILType Int1Ty = Args[2]->getType();
TupleTypeElt ResultElts[] = {OpdTy.getSwiftRValueType(),
Int1Ty.getSwiftRValueType()};
Type ResultTy = TupleType::get(ResultElts, getASTContext());
SILType SILResultTy =
SILType::getPrimitiveObjectType(ResultTy->getCanonicalType());
return createBuiltinBinaryFunction(Loc, Name, OpdTy, SILResultTy, Args);
}
FunctionRefInst *createFunctionRef(SILLocation Loc, SILFunction *f) {
return insert(new (F.getModule())
FunctionRefInst(getSILDebugLocation(Loc), f));
}
AllocGlobalInst *createAllocGlobal(SILLocation Loc, SILGlobalVariable *g) {
return insert(new (F.getModule())
AllocGlobalInst(getSILDebugLocation(Loc), g));
}
GlobalAddrInst *createGlobalAddr(SILLocation Loc, SILGlobalVariable *g) {
return insert(new (F.getModule())
GlobalAddrInst(getSILDebugLocation(Loc), g));
}
IntegerLiteralInst *createIntegerLiteral(IntegerLiteralExpr *E) {
return insert(IntegerLiteralInst::create(E, getSILDebugLocation(E), F));
}
IntegerLiteralInst *createIntegerLiteral(SILLocation Loc, SILType Ty,
intmax_t Value) {
return insert(
IntegerLiteralInst::create(getSILDebugLocation(Loc), Ty, Value, F));
}
IntegerLiteralInst *createIntegerLiteral(SILLocation Loc, SILType Ty,
const APInt &Value) {
return insert(
IntegerLiteralInst::create(getSILDebugLocation(Loc), Ty, Value, F));
}
FloatLiteralInst *createFloatLiteral(FloatLiteralExpr *E) {
return insert(FloatLiteralInst::create(E, getSILDebugLocation(E), F));
}
FloatLiteralInst *createFloatLiteral(SILLocation Loc, SILType Ty,
const APFloat &Value) {
return insert(
FloatLiteralInst::create(getSILDebugLocation(Loc), Ty, Value, F));
}
StringLiteralInst *createStringLiteral(SILLocation Loc, StringRef text,
StringLiteralInst::Encoding encoding) {
return insert(StringLiteralInst::create(getSILDebugLocation(Loc), text,
encoding, F));
}
StringLiteralInst *createStringLiteral(SILLocation Loc, const Twine &text,
StringLiteralInst::Encoding encoding) {
SmallVector<char, 256> Out;
return insert(StringLiteralInst::create(
getSILDebugLocation(Loc), text.toStringRef(Out), encoding, F));
}
LoadInst *createLoad(SILLocation Loc, SILValue LV) {
return insert(new (F.getModule())
LoadInst(getSILDebugLocation(Loc), LV));
}
StoreInst *createStore(SILLocation Loc, SILValue Src, SILValue DestAddr) {
return insert(new (F.getModule())
StoreInst(getSILDebugLocation(Loc), Src, DestAddr));
}
AssignInst *createAssign(SILLocation Loc, SILValue Src, SILValue DestAddr) {
return insert(new (F.getModule())
AssignInst(getSILDebugLocation(Loc), Src, DestAddr));
}
MarkUninitializedInst *
createMarkUninitialized(SILLocation Loc, SILValue src,
MarkUninitializedInst::Kind k) {
return insert(new (F.getModule()) MarkUninitializedInst(
getSILDebugLocation(Loc), src, k));
}
MarkUninitializedInst *createMarkUninitializedVar(SILLocation Loc,
SILValue src) {
return createMarkUninitialized(Loc, src, MarkUninitializedInst::Var);
}
MarkUninitializedInst *createMarkUninitializedRootSelf(SILLocation Loc,
SILValue src) {
return createMarkUninitialized(Loc, src, MarkUninitializedInst::RootSelf);
}
MarkUninitializedBehaviorInst *
createMarkUninitializedBehavior(SILLocation Loc,
SILValue initStorageFunc,
ArrayRef<Substitution> initStorageSubs,
SILValue storage,
SILValue setterFunc,
ArrayRef<Substitution> setterSubs,
SILValue self,
SILType ty) {
return insert(MarkUninitializedBehaviorInst::create(F.getModule(),
getSILDebugLocation(Loc),
initStorageFunc, initStorageSubs, storage,
setterFunc, setterSubs, self, ty));
}
MarkFunctionEscapeInst *createMarkFunctionEscape(SILLocation Loc,
ArrayRef<SILValue> vars) {
return insert(
MarkFunctionEscapeInst::create(getSILDebugLocation(Loc), vars, F));
}
DebugValueInst *createDebugValue(SILLocation Loc, SILValue src,
SILDebugVariable Var = SILDebugVariable()) {
return insert(DebugValueInst::create(getSILDebugLocation(Loc), src,
F.getModule(), Var));
}
DebugValueAddrInst *
createDebugValueAddr(SILLocation Loc, SILValue src,
SILDebugVariable Var = SILDebugVariable()) {
return insert(DebugValueAddrInst::create(getSILDebugLocation(Loc), src,
F.getModule(), Var));
}
LoadWeakInst *createLoadWeak(SILLocation Loc, SILValue src, IsTake_t isTake) {
return insert(new (F.getModule())
LoadWeakInst(getSILDebugLocation(Loc), src, isTake));
}
StoreWeakInst *createStoreWeak(SILLocation Loc, SILValue value, SILValue dest,
IsInitialization_t isInit) {
return insert(new (F.getModule()) StoreWeakInst(getSILDebugLocation(Loc),
value, dest, isInit));
}
LoadUnownedInst *createLoadUnowned(SILLocation loc, SILValue src,
IsTake_t isTake) {
return insert(new (F.getModule())
LoadUnownedInst(getSILDebugLocation(loc), src, isTake));
}
StoreUnownedInst *createStoreUnowned(SILLocation loc, SILValue value,
SILValue dest,
IsInitialization_t isInit) {
return insert(new (F.getModule())
StoreUnownedInst(getSILDebugLocation(loc),
value, dest, isInit));
}
CopyAddrInst *createCopyAddr(SILLocation Loc, SILValue srcAddr,
SILValue destAddr, IsTake_t isTake,
IsInitialization_t isInitialize) {
assert(srcAddr->getType() == destAddr->getType());
return insert(new (F.getModule()) CopyAddrInst(
getSILDebugLocation(Loc), srcAddr, destAddr, isTake, isInitialize));
}
ConvertFunctionInst *createConvertFunction(SILLocation Loc, SILValue Op,
SILType Ty) {
return insert(new (F.getModule())
ConvertFunctionInst(getSILDebugLocation(Loc), Op, Ty));
}
ThinFunctionToPointerInst *
createThinFunctionToPointer(SILLocation Loc, SILValue Op, SILType Ty) {
return insert(new (F.getModule()) ThinFunctionToPointerInst(
getSILDebugLocation(Loc), Op, Ty));
}
PointerToThinFunctionInst *
createPointerToThinFunction(SILLocation Loc, SILValue Op, SILType Ty) {
return insert(new (F.getModule()) PointerToThinFunctionInst(
getSILDebugLocation(Loc), Op, Ty));
}
UpcastInst *createUpcast(SILLocation Loc, SILValue Op, SILType Ty) {
return insert(new (F.getModule())
UpcastInst(getSILDebugLocation(Loc), Op, Ty));
}
AddressToPointerInst *createAddressToPointer(SILLocation Loc, SILValue Op,
SILType Ty) {
return insert(new (F.getModule()) AddressToPointerInst(
getSILDebugLocation(Loc), Op, Ty));
}
PointerToAddressInst *createPointerToAddress(SILLocation Loc, SILValue Op,
SILType Ty) {
return insert(new (F.getModule()) PointerToAddressInst(
getSILDebugLocation(Loc), Op, Ty));
}
UncheckedRefCastInst *createUncheckedRefCast(SILLocation Loc, SILValue Op,
SILType Ty) {
return insert(new (F.getModule()) UncheckedRefCastInst(
getSILDebugLocation(Loc), Op, Ty));
}
UncheckedRefCastAddrInst *
createUncheckedRefCastAddr(SILLocation Loc, SILValue src, CanType sourceType,
SILValue dest, CanType targetType) {
return insert(new (F.getModule()) UncheckedRefCastAddrInst(
getSILDebugLocation(Loc), src, sourceType, dest, targetType));
}
UncheckedAddrCastInst *createUncheckedAddrCast(SILLocation Loc, SILValue Op,
SILType Ty) {
return insert(new (F.getModule()) UncheckedAddrCastInst(
getSILDebugLocation(Loc), Op, Ty));
}
UncheckedTrivialBitCastInst *
createUncheckedTrivialBitCast(SILLocation Loc, SILValue Op, SILType Ty) {
return insert(new (F.getModule()) UncheckedTrivialBitCastInst(
getSILDebugLocation(Loc), Op, Ty));
}
UncheckedBitwiseCastInst *
createUncheckedBitwiseCast(SILLocation Loc, SILValue Op, SILType Ty) {
return insert(new (F.getModule()) UncheckedBitwiseCastInst(
getSILDebugLocation(Loc), Op, Ty));
}
RefToBridgeObjectInst *createRefToBridgeObject(SILLocation Loc, SILValue Ref,
SILValue Bits) {
auto Ty = SILType::getBridgeObjectType(getASTContext());
return insert(new (F.getModule()) RefToBridgeObjectInst(
getSILDebugLocation(Loc), Ref, Bits, Ty));
}
BridgeObjectToRefInst *createBridgeObjectToRef(SILLocation Loc, SILValue Op,
SILType Ty) {
return insert(new (F.getModule()) BridgeObjectToRefInst(
getSILDebugLocation(Loc), Op, Ty));
}
BridgeObjectToWordInst *createBridgeObjectToWord(SILLocation Loc,
SILValue Op) {
auto Ty = SILType::getBuiltinWordType(getASTContext());
return createBridgeObjectToWord(Loc, Op, Ty);
}
BridgeObjectToWordInst *createBridgeObjectToWord(SILLocation Loc, SILValue Op,
SILType Ty) {
return insert(new (F.getModule()) BridgeObjectToWordInst(
getSILDebugLocation(Loc), Op, Ty));
}
RefToRawPointerInst *createRefToRawPointer(SILLocation Loc, SILValue Op,
SILType Ty) {
return insert(new (F.getModule())
RefToRawPointerInst(getSILDebugLocation(Loc), Op, Ty));
}
RawPointerToRefInst *createRawPointerToRef(SILLocation Loc, SILValue Op,
SILType Ty) {
return insert(new (F.getModule())
RawPointerToRefInst(getSILDebugLocation(Loc), Op, Ty));
}
ThinToThickFunctionInst *createThinToThickFunction(SILLocation Loc,
SILValue Op, SILType Ty) {
return insert(new (F.getModule()) ThinToThickFunctionInst(
getSILDebugLocation(Loc), Op, Ty));
}
ThickToObjCMetatypeInst *createThickToObjCMetatype(SILLocation Loc,
SILValue Op, SILType Ty) {
return insert(new (F.getModule()) ThickToObjCMetatypeInst(
getSILDebugLocation(Loc), Op, Ty));
}
ObjCToThickMetatypeInst *createObjCToThickMetatype(SILLocation Loc,
SILValue Op, SILType Ty) {
return insert(new (F.getModule()) ObjCToThickMetatypeInst(
getSILDebugLocation(Loc), Op, Ty));
}
ObjCProtocolInst *createObjCProtocol(SILLocation Loc, ProtocolDecl *P,
SILType Ty) {
return insert(new (F.getModule())
ObjCProtocolInst(getSILDebugLocation(Loc), P, Ty));
}
UnownedToRefInst *createUnownedToRef(SILLocation Loc, SILValue op,
SILType ty) {
return insert(new (F.getModule())
UnownedToRefInst(getSILDebugLocation(Loc), op, ty));
}
RefToUnownedInst *createRefToUnowned(SILLocation Loc, SILValue op,
SILType ty) {
return insert(new (F.getModule())
RefToUnownedInst(getSILDebugLocation(Loc), op, ty));
}
UnmanagedToRefInst *createUnmanagedToRef(SILLocation Loc, SILValue op,
SILType ty) {
return insert(new (F.getModule())
UnmanagedToRefInst(getSILDebugLocation(Loc), op, ty));
}
RefToUnmanagedInst *createRefToUnmanaged(SILLocation Loc, SILValue op,
SILType ty) {
return insert(new (F.getModule())
RefToUnmanagedInst(getSILDebugLocation(Loc), op, ty));
}
IsNonnullInst *createIsNonnull(SILLocation Loc, SILValue operand) {
return insert(new (F.getModule()) IsNonnullInst(
getSILDebugLocation(Loc), operand,
SILType::getBuiltinIntegerType(1, getASTContext())));
}
UnconditionalCheckedCastInst *
createUnconditionalCheckedCast(SILLocation Loc, SILValue op, SILType destTy) {
return insert(new (F.getModule()) UnconditionalCheckedCastInst(
getSILDebugLocation(Loc), op, destTy));
}
UnconditionalCheckedCastAddrInst *createUnconditionalCheckedCastAddr(
SILLocation Loc, CastConsumptionKind consumption, SILValue src,
CanType sourceType, SILValue dest, CanType targetType) {
return insert(new (F.getModule()) UnconditionalCheckedCastAddrInst(
getSILDebugLocation(Loc), consumption, src, sourceType, dest,
targetType));
}
RetainValueInst *createRetainValue(SILLocation Loc, SILValue operand) {
return insert(new (F.getModule())
RetainValueInst(getSILDebugLocation(Loc), operand));
}
ReleaseValueInst *createReleaseValue(SILLocation Loc, SILValue operand) {
return insert(new (F.getModule())
ReleaseValueInst(getSILDebugLocation(Loc), operand));
}
AutoreleaseValueInst *createAutoreleaseValue(SILLocation Loc,
SILValue operand) {
return insert(new (F.getModule()) AutoreleaseValueInst(
getSILDebugLocation(Loc), operand));
}
SetDeallocatingInst *createSetDeallocating(SILLocation Loc,
SILValue operand) {
return insert(new (F.getModule()) SetDeallocatingInst(
getSILDebugLocation(Loc), operand));
}
StructInst *createStruct(SILLocation Loc, SILType Ty,
ArrayRef<SILValue> Elements) {
return insert(
StructInst::create(getSILDebugLocation(Loc), Ty, Elements, F));
}
TupleInst *createTuple(SILLocation Loc, SILType Ty,
ArrayRef<SILValue> Elements) {
return insert(
TupleInst::create(getSILDebugLocation(Loc), Ty, Elements, F));
}
TupleInst *createTuple(SILLocation loc, ArrayRef<SILValue> elts);
EnumInst *createEnum(SILLocation Loc, SILValue Operand,
EnumElementDecl *Element, SILType Ty) {
return insert(new (F.getModule()) EnumInst(getSILDebugLocation(Loc),
Operand, Element, Ty));
}
/// Inject a loadable value into the corresponding optional type.
EnumInst *createOptionalSome(SILLocation Loc, SILValue operand, SILType ty) {
return createOptionalSome(Loc, operand, ty.getOptionalTypeKind(), ty);
}
/// Inject a loadable value into the corresponding optional type.
EnumInst *createOptionalSome(SILLocation Loc, SILValue operand,
OptionalTypeKind optKind, SILType ty) {
assert(ty.getOptionalTypeKind() == optKind);
auto someDecl = F.getModule().getASTContext().getOptionalSomeDecl(optKind);
return createEnum(Loc, operand, someDecl, ty);
}
/// Create the nil value of a loadable optional type.
EnumInst *createOptionalNone(SILLocation Loc, SILType ty) {
return createOptionalNone(Loc, ty.getOptionalTypeKind(), ty);
}
/// Create the nil value of a loadable optional type.
EnumInst *createOptionalNone(SILLocation Loc, OptionalTypeKind optKind,
SILType ty) {
assert(ty.getOptionalTypeKind() == optKind);
auto noneDecl = F.getModule().getASTContext().getOptionalNoneDecl(optKind);
return createEnum(Loc, nullptr, noneDecl, ty);
}
InitEnumDataAddrInst *createInitEnumDataAddr(SILLocation Loc,
SILValue Operand,
EnumElementDecl *Element,
SILType Ty) {
return insert(new (F.getModule()) InitEnumDataAddrInst(
getSILDebugLocation(Loc), Operand, Element, Ty));
}
UncheckedEnumDataInst *createUncheckedEnumData(SILLocation Loc,
SILValue Operand,
EnumElementDecl *Element,
SILType Ty) {
return insert(new (F.getModule()) UncheckedEnumDataInst(
getSILDebugLocation(Loc), Operand, Element, Ty));
}
UncheckedEnumDataInst *createUncheckedEnumData(SILLocation Loc,
SILValue Operand,
EnumElementDecl *Element) {
SILType EltType =
Operand->getType().getEnumElementType(Element, getModule());
return createUncheckedEnumData(Loc, Operand, Element, EltType);
}
UncheckedTakeEnumDataAddrInst *
createUncheckedTakeEnumDataAddr(SILLocation Loc, SILValue Operand,
EnumElementDecl *Element, SILType Ty) {
return insert(new (F.getModule()) UncheckedTakeEnumDataAddrInst(
getSILDebugLocation(Loc), Operand, Element, Ty));
}
UncheckedTakeEnumDataAddrInst *
createUncheckedTakeEnumDataAddr(SILLocation Loc, SILValue Operand,
EnumElementDecl *Element) {
SILType EltType =
Operand->getType().getEnumElementType(Element, getModule());
return createUncheckedTakeEnumDataAddr(Loc, Operand, Element, EltType);
}
InjectEnumAddrInst *createInjectEnumAddr(SILLocation Loc, SILValue Operand,
EnumElementDecl *Element) {
return insert(new (F.getModule()) InjectEnumAddrInst(
getSILDebugLocation(Loc), Operand, Element));
}
SelectEnumInst *createSelectEnum(
SILLocation Loc, SILValue Operand, SILType Ty, SILValue DefaultValue,
ArrayRef<std::pair<EnumElementDecl *, SILValue>> CaseValues) {
return insert(SelectEnumInst::create(getSILDebugLocation(Loc), Operand,
Ty, DefaultValue, CaseValues, F));
}
SelectEnumAddrInst *createSelectEnumAddr(
SILLocation Loc, SILValue Operand, SILType Ty, SILValue DefaultValue,
ArrayRef<std::pair<EnumElementDecl *, SILValue>> CaseValues) {
return insert(SelectEnumAddrInst::create(
getSILDebugLocation(Loc), Operand, Ty, DefaultValue, CaseValues, F));
}
SelectValueInst *createSelectValue(
SILLocation Loc, SILValue Operand, SILType Ty, SILValue DefaultResult,
ArrayRef<std::pair<SILValue, SILValue>> CaseValuesAndResults) {
return insert(SelectValueInst::create(getSILDebugLocation(Loc), Operand,
Ty, DefaultResult,
CaseValuesAndResults, F));
}
TupleExtractInst *createTupleExtract(SILLocation Loc, SILValue Operand,
unsigned FieldNo, SILType ResultTy) {
return insert(new (F.getModule()) TupleExtractInst(
getSILDebugLocation(Loc), Operand, FieldNo, ResultTy));
}
TupleExtractInst *createTupleExtract(SILLocation Loc, SILValue Operand,
unsigned FieldNo) {
auto type = Operand->getType().getTupleElementType(FieldNo);
return createTupleExtract(Loc, Operand, FieldNo, type);
}
TupleElementAddrInst *createTupleElementAddr(SILLocation Loc,
SILValue Operand,
unsigned FieldNo,
SILType ResultTy) {
return insert(new (F.getModule()) TupleElementAddrInst(
getSILDebugLocation(Loc), Operand, FieldNo, ResultTy));
}
TupleElementAddrInst *
createTupleElementAddr(SILLocation Loc, SILValue Operand, unsigned FieldNo) {
return insert(new (F.getModule()) TupleElementAddrInst(
getSILDebugLocation(Loc), Operand, FieldNo,
Operand->getType().getTupleElementType(FieldNo)));
}
StructExtractInst *createStructExtract(SILLocation Loc, SILValue Operand,
VarDecl *Field, SILType ResultTy) {
return insert(new (F.getModule()) StructExtractInst(
getSILDebugLocation(Loc), Operand, Field, ResultTy));
}
StructExtractInst *createStructExtract(SILLocation Loc, SILValue Operand,
VarDecl *Field) {
auto type = Operand->getType().getFieldType(Field, F.getModule());
return createStructExtract(Loc, Operand, Field, type);
}
StructElementAddrInst *createStructElementAddr(SILLocation Loc,
SILValue Operand,
VarDecl *Field,
SILType ResultTy) {
return insert(new (F.getModule()) StructElementAddrInst(
getSILDebugLocation(Loc), Operand, Field, ResultTy));
}
StructElementAddrInst *
createStructElementAddr(SILLocation Loc, SILValue Operand, VarDecl *Field) {
auto ResultTy = Operand->getType().getFieldType(Field, F.getModule());
return createStructElementAddr(Loc, Operand, Field, ResultTy);
}
RefElementAddrInst *createRefElementAddr(SILLocation Loc, SILValue Operand,
VarDecl *Field, SILType ResultTy) {
return insert(new (F.getModule()) RefElementAddrInst(
getSILDebugLocation(Loc), Operand, Field, ResultTy));
}
RefElementAddrInst *createRefElementAddr(SILLocation Loc, SILValue Operand,
VarDecl *Field) {
auto ResultTy = Operand->getType().getFieldType(Field, F.getModule());
return createRefElementAddr(Loc, Operand, Field, ResultTy);
}
ClassMethodInst *createClassMethod(SILLocation Loc, SILValue Operand,
SILDeclRef Member, SILType MethodTy,
bool Volatile = false) {
return insert(new (F.getModule()) ClassMethodInst(
getSILDebugLocation(Loc), Operand, Member, MethodTy, Volatile));
}
ClassMethodInst *createClassMethod(SILLocation Loc, SILValue Operand,
SILDeclRef Member, bool Volatile = false) {
auto MethodTy = getModule().Types.getConstantOverrideType(Member);
return createClassMethod(Loc, Operand, Member,
SILType::getPrimitiveObjectType(MethodTy),
Volatile);
}
/// Emit a class_method reference to the least derived overridden decl for
/// the given method, and upcast the "self" pointer to the matching superclass
/// type.
std::pair<ClassMethodInst *, SILValue> emitClassMethod(SILLocation Loc,
SILValue Self,
SILDeclRef Member,
bool Volatile = false);
SuperMethodInst *createSuperMethod(SILLocation Loc, SILValue Operand,
SILDeclRef Member, SILType MethodTy,
bool Volatile = false) {
return insert(new (F.getModule()) SuperMethodInst(
getSILDebugLocation(Loc), Operand, Member, MethodTy, Volatile));
}
WitnessMethodInst *createWitnessMethod(SILLocation Loc, CanType LookupTy,
ProtocolConformanceRef Conformance,
SILDeclRef Member, SILType MethodTy,
SILValue OptionalOpenedExistential,
bool Volatile = false) {
return insert(WitnessMethodInst::create(
getSILDebugLocation(Loc), LookupTy, Conformance, Member, MethodTy,
&F, OptionalOpenedExistential, Volatile));
}
DynamicMethodInst *createDynamicMethod(SILLocation Loc, SILValue Operand,
SILDeclRef Member, SILType MethodTy,
bool Volatile = false) {
return insert(new (F.getModule()) DynamicMethodInst(
getSILDebugLocation(Loc), Operand, Member, MethodTy, Volatile));
}
OpenExistentialAddrInst *
createOpenExistentialAddr(SILLocation Loc, SILValue Operand, SILType SelfTy) {
return insert(new (F.getModule()) OpenExistentialAddrInst(
getSILDebugLocation(Loc), Operand, SelfTy));
}
OpenExistentialMetatypeInst *createOpenExistentialMetatype(SILLocation Loc,
SILValue operand,
SILType selfTy) {
return insert(new (F.getModule()) OpenExistentialMetatypeInst(
getSILDebugLocation(Loc), operand, selfTy));
}
OpenExistentialRefInst *
createOpenExistentialRef(SILLocation Loc, SILValue Operand, SILType Ty) {
return insert(new (F.getModule()) OpenExistentialRefInst(
getSILDebugLocation(Loc), Operand, Ty));
}
OpenExistentialBoxInst *
createOpenExistentialBox(SILLocation Loc, SILValue Operand, SILType Ty) {
return insert(new (F.getModule()) OpenExistentialBoxInst(
getSILDebugLocation(Loc), Operand, Ty));
}
InitExistentialAddrInst *
createInitExistentialAddr(SILLocation Loc, SILValue Existential,
CanType FormalConcreteType,
SILType LoweredConcreteType,
ArrayRef<ProtocolConformanceRef> Conformances) {
return insert(InitExistentialAddrInst::create(
getSILDebugLocation(Loc), Existential, FormalConcreteType,
LoweredConcreteType, Conformances, &F));
}
InitExistentialMetatypeInst *
createInitExistentialMetatype(SILLocation Loc, SILValue metatype,
SILType existentialType,
ArrayRef<ProtocolConformanceRef> conformances) {
return insert(InitExistentialMetatypeInst::create(
getSILDebugLocation(Loc), existentialType, metatype, conformances,
&F));
}
InitExistentialRefInst *
createInitExistentialRef(SILLocation Loc, SILType ExistentialType,
CanType FormalConcreteType, SILValue Concrete,
ArrayRef<ProtocolConformanceRef> Conformances) {
return insert(InitExistentialRefInst::create(
getSILDebugLocation(Loc), ExistentialType, FormalConcreteType,
Concrete, Conformances, &F));
}
DeinitExistentialAddrInst *createDeinitExistentialAddr(SILLocation Loc,
SILValue Existential) {
return insert(new (F.getModule()) DeinitExistentialAddrInst(
getSILDebugLocation(Loc), Existential));
}
ProjectBlockStorageInst *createProjectBlockStorage(SILLocation Loc,
SILValue Storage) {
auto CaptureTy = Storage->getType()
.castTo<SILBlockStorageType>()
->getCaptureAddressType();
return createProjectBlockStorage(Loc, Storage, CaptureTy);
}
ProjectBlockStorageInst *createProjectBlockStorage(SILLocation Loc,
SILValue Storage,
SILType CaptureTy) {
return insert(new (F.getModule()) ProjectBlockStorageInst(
getSILDebugLocation(Loc), Storage, CaptureTy));
}
InitBlockStorageHeaderInst *
createInitBlockStorageHeader(SILLocation Loc, SILValue BlockStorage,
SILValue InvokeFunction, SILType BlockType) {
return insert(new (F.getModule()) InitBlockStorageHeaderInst(
getSILDebugLocation(Loc), BlockStorage, InvokeFunction, BlockType));
}
MetatypeInst *createMetatype(SILLocation Loc, SILType Metatype) {
return insert(new (F.getModule())
MetatypeInst(getSILDebugLocation(Loc), Metatype));
}
ObjCMetatypeToObjectInst *
createObjCMetatypeToObject(SILLocation Loc, SILValue Op, SILType Ty) {
return insert(new (F.getModule()) ObjCMetatypeToObjectInst(
getSILDebugLocation(Loc), Op, Ty));
}
ObjCExistentialMetatypeToObjectInst *
createObjCExistentialMetatypeToObject(SILLocation Loc, SILValue Op,
SILType Ty) {
return insert(new (F.getModule()) ObjCExistentialMetatypeToObjectInst(
getSILDebugLocation(Loc), Op, Ty));
}
ValueMetatypeInst *createValueMetatype(SILLocation Loc, SILType Metatype,
SILValue Base) {
return insert(new (F.getModule()) ValueMetatypeInst(
getSILDebugLocation(Loc), Metatype, Base));
}
ExistentialMetatypeInst *
createExistentialMetatype(SILLocation Loc, SILType Metatype, SILValue Base) {
return insert(new (F.getModule()) ExistentialMetatypeInst(
getSILDebugLocation(Loc), Metatype, Base));
}
CopyBlockInst *createCopyBlock(SILLocation Loc, SILValue Operand) {
return insert(new (F.getModule())
CopyBlockInst(getSILDebugLocation(Loc), Operand));
}
StrongRetainInst *createStrongRetain(SILLocation Loc, SILValue Operand) {
return insert(new (F.getModule())
StrongRetainInst(getSILDebugLocation(Loc), Operand));
}
StrongReleaseInst *createStrongRelease(SILLocation Loc, SILValue Operand) {
return insert(new (F.getModule())
StrongReleaseInst(getSILDebugLocation(Loc), Operand));
}
StrongPinInst *createStrongPin(SILLocation Loc, SILValue Operand) {
return insert(new (F.getModule())
StrongPinInst(getSILDebugLocation(Loc), Operand));
}
StrongUnpinInst *createStrongUnpin(SILLocation Loc, SILValue Operand) {
return insert(new (F.getModule())
StrongUnpinInst(getSILDebugLocation(Loc), Operand));
}
StrongRetainUnownedInst *createStrongRetainUnowned(SILLocation Loc,
SILValue Operand) {
return insert(new (F.getModule()) StrongRetainUnownedInst(
getSILDebugLocation(Loc), Operand));
}
UnownedRetainInst *createUnownedRetain(SILLocation Loc, SILValue Operand) {
return insert(new (F.getModule())
UnownedRetainInst(getSILDebugLocation(Loc), Operand));
}
UnownedReleaseInst *createUnownedRelease(SILLocation Loc, SILValue Operand) {
return insert(new (F.getModule())
UnownedReleaseInst(getSILDebugLocation(Loc), Operand));
}
FixLifetimeInst *createFixLifetime(SILLocation Loc, SILValue Operand) {
return insert(new (F.getModule())
FixLifetimeInst(getSILDebugLocation(Loc), Operand));
}
void emitFixLifetime(SILLocation Loc, SILValue Operand) {
if (getTypeLowering(Operand->getType()).isTrivial())
return;
createFixLifetime(Loc, Operand);
}
MarkDependenceInst *createMarkDependence(SILLocation Loc, SILValue value,
SILValue base) {
return insert(new (F.getModule()) MarkDependenceInst(
getSILDebugLocation(Loc), value, base));
}
IsUniqueInst *createIsUnique(SILLocation Loc, SILValue operand) {
auto Int1Ty = SILType::getBuiltinIntegerType(1, getASTContext());
return insert(new (F.getModule()) IsUniqueInst(getSILDebugLocation(Loc),
operand, Int1Ty));
}
IsUniqueOrPinnedInst *createIsUniqueOrPinned(SILLocation Loc,
SILValue value) {
auto Int1Ty = SILType::getBuiltinIntegerType(1, getASTContext());
return insert(new (F.getModule()) IsUniqueOrPinnedInst(
getSILDebugLocation(Loc), value, Int1Ty));
}
DeallocStackInst *createDeallocStack(SILLocation Loc, SILValue operand) {
return insert(new (F.getModule())
DeallocStackInst(getSILDebugLocation(Loc), operand));
}
DeallocRefInst *createDeallocRef(SILLocation Loc, SILValue operand,
bool canBeOnStack) {
return insert(new (F.getModule()) DeallocRefInst(
getSILDebugLocation(Loc), operand, canBeOnStack));
}
DeallocPartialRefInst *createDeallocPartialRef(SILLocation Loc,
SILValue operand,
SILValue metatype) {
return insert(new (F.getModule()) DeallocPartialRefInst(
getSILDebugLocation(Loc), operand, metatype));
}
DeallocBoxInst *createDeallocBox(SILLocation Loc, SILType eltType,
SILValue operand) {
return insert(new (F.getModule()) DeallocBoxInst(
getSILDebugLocation(Loc), eltType, operand));
}
DeallocBoxInst *createDeallocBox(SILLocation Loc, SILValue operand) {
auto eltType =
operand->getType().castTo<SILBoxType>()->getBoxedAddressType();
return insert(new (F.getModule()) DeallocBoxInst(
getSILDebugLocation(Loc), eltType, operand));
}
DeallocExistentialBoxInst *createDeallocExistentialBox(SILLocation Loc,
CanType concreteType,
SILValue operand) {
return insert(new (F.getModule()) DeallocExistentialBoxInst(
getSILDebugLocation(Loc), concreteType, operand));
}
DeallocValueBufferInst *createDeallocValueBuffer(SILLocation Loc,
SILType valueType,
SILValue operand) {
return insert(new (F.getModule()) DeallocValueBufferInst(
getSILDebugLocation(Loc), valueType, operand));
}
DestroyAddrInst *createDestroyAddr(SILLocation Loc, SILValue Operand) {
return insert(new (F.getModule())
DestroyAddrInst(getSILDebugLocation(Loc), Operand));
}
ProjectValueBufferInst *createProjectValueBuffer(SILLocation Loc,
SILType valueType,
SILValue operand) {
return insert(new (F.getModule()) ProjectValueBufferInst(
getSILDebugLocation(Loc), valueType, operand));
}
ProjectBoxInst *createProjectBox(SILLocation Loc, SILValue boxOperand) {
auto valueTy =
boxOperand->getType().castTo<SILBoxType>()->getBoxedAddressType();
return insert(new (F.getModule()) ProjectBoxInst(
getSILDebugLocation(Loc), valueTy, boxOperand));
}
ProjectBoxInst *createProjectBox(SILLocation Loc, SILType valueTy,
SILValue boxOperand) {
return insert(new (F.getModule()) ProjectBoxInst(
getSILDebugLocation(Loc), valueTy, boxOperand));
}
ProjectExistentialBoxInst *createProjectExistentialBox(SILLocation Loc,
SILType valueTy,
SILValue boxOperand) {
return insert(new (F.getModule()) ProjectExistentialBoxInst(
getSILDebugLocation(Loc), valueTy, boxOperand));
}
//===--------------------------------------------------------------------===//
// Unchecked cast helpers
//===--------------------------------------------------------------------===//
// Create an UncheckedRefCast if the source and dest types are legal,
// otherwise return null.
// Unwrap or wrap optional types as needed.
SILInstruction *tryCreateUncheckedRefCast(SILLocation Loc, SILValue Op,
SILType ResultTy);
// Create the appropriate cast instruction based on result type.
SILInstruction *createUncheckedBitCast(SILLocation Loc, SILValue Op,
SILType Ty);
//===--------------------------------------------------------------------===//
// Runtime failure
//===--------------------------------------------------------------------===//
CondFailInst *createCondFail(SILLocation Loc, SILValue Operand) {
return insert(new (F.getModule())
CondFailInst(getSILDebugLocation(Loc), Operand));
}
BuiltinInst *createBuiltinTrap(SILLocation Loc) {
ASTContext &AST = F.getModule().getASTContext();
auto Id_trap = AST.getIdentifier("int_trap");
return createBuiltin(Loc, Id_trap, F.getModule().Types.getEmptyTupleType(),
{}, {});
}
//===--------------------------------------------------------------------===//
// Array indexing instructions
//===--------------------------------------------------------------------===//
IndexAddrInst *createIndexAddr(SILLocation Loc, SILValue Operand,
SILValue Index) {
return insert(new (F.getModule()) IndexAddrInst(getSILDebugLocation(Loc),
Operand, Index));
}
IndexRawPointerInst *createIndexRawPointer(SILLocation Loc, SILValue Operand,
SILValue Index) {
return insert(new (F.getModule()) IndexRawPointerInst(
getSILDebugLocation(Loc), Operand, Index));
}
//===--------------------------------------------------------------------===//
// Terminator SILInstruction Creation Methods
//===--------------------------------------------------------------------===//
UnreachableInst *createUnreachable(SILLocation Loc) {
return insertTerminator(new (F.getModule())
UnreachableInst(getSILDebugLocation(Loc)));
}
ReturnInst *createReturn(SILLocation Loc, SILValue ReturnValue) {
return insertTerminator(new (F.getModule()) ReturnInst(
getSILDebugLocation(Loc), ReturnValue));
}
ThrowInst *createThrow(SILLocation Loc, SILValue errorValue) {
return insertTerminator(
new (F.getModule()) ThrowInst(getSILDebugLocation(Loc), errorValue));
}
CondBranchInst *createCondBranch(SILLocation Loc, SILValue Cond,
SILBasicBlock *Target1,
SILBasicBlock *Target2) {
return insertTerminator(CondBranchInst::create(getSILDebugLocation(Loc),
Cond, Target1, Target2, F));
}
CondBranchInst *createCondBranch(SILLocation Loc, SILValue Cond,
SILBasicBlock *Target1,
ArrayRef<SILValue> Args1,
SILBasicBlock *Target2,
ArrayRef<SILValue> Args2) {
return insertTerminator(CondBranchInst::create(
getSILDebugLocation(Loc), Cond, Target1, Args1, Target2, Args2, F));
}
CondBranchInst *createCondBranch(SILLocation Loc, SILValue Cond,
SILBasicBlock *Target1,
OperandValueArrayRef Args1,
SILBasicBlock *Target2,
OperandValueArrayRef Args2) {
SmallVector<SILValue, 6> ArgsCopy1;
SmallVector<SILValue, 6> ArgsCopy2;
ArgsCopy1.reserve(Args1.size());
ArgsCopy2.reserve(Args2.size());
for (auto I = Args1.begin(), E = Args1.end(); I != E; ++I)
ArgsCopy1.push_back(*I);
for (auto I = Args2.begin(), E = Args2.end(); I != E; ++I)
ArgsCopy2.push_back(*I);
return insertTerminator(CondBranchInst::create(getSILDebugLocation(Loc),
Cond, Target1, ArgsCopy1,
Target2, ArgsCopy2, F));
}
BranchInst *createBranch(SILLocation Loc, SILBasicBlock *TargetBlock) {
return insertTerminator(
BranchInst::create(getSILDebugLocation(Loc), TargetBlock, F));
}
BranchInst *createBranch(SILLocation Loc, SILBasicBlock *TargetBlock,
ArrayRef<SILValue> Args) {
return insertTerminator(
BranchInst::create(getSILDebugLocation(Loc), TargetBlock, Args, F));
}
BranchInst *createBranch(SILLocation Loc, SILBasicBlock *TargetBlock,
OperandValueArrayRef Args);
SwitchValueInst *
createSwitchValue(SILLocation Loc, SILValue Operand, SILBasicBlock *DefaultBB,
ArrayRef<std::pair<SILValue, SILBasicBlock *>> CaseBBs) {
return insertTerminator(SwitchValueInst::create(
getSILDebugLocation(Loc), Operand, DefaultBB, CaseBBs, F));
}
SwitchEnumInst *createSwitchEnum(
SILLocation Loc, SILValue Operand, SILBasicBlock *DefaultBB,
ArrayRef<std::pair<EnumElementDecl *, SILBasicBlock *>> CaseBBs) {
return insertTerminator(SwitchEnumInst::create(
getSILDebugLocation(Loc), Operand, DefaultBB, CaseBBs, F));
}
SwitchEnumAddrInst *createSwitchEnumAddr(
SILLocation Loc, SILValue Operand, SILBasicBlock *DefaultBB,
ArrayRef<std::pair<EnumElementDecl *, SILBasicBlock *>> CaseBBs) {
return insertTerminator(SwitchEnumAddrInst::create(
getSILDebugLocation(Loc), Operand, DefaultBB, CaseBBs, F));
}
DynamicMethodBranchInst *
createDynamicMethodBranch(SILLocation Loc, SILValue Operand,
SILDeclRef Member, SILBasicBlock *HasMethodBB,
SILBasicBlock *NoMethodBB) {
return insertTerminator(
DynamicMethodBranchInst::create(getSILDebugLocation(Loc), Operand,
Member, HasMethodBB, NoMethodBB, F));
}
CheckedCastBranchInst *createCheckedCastBranch(SILLocation Loc, bool isExact,
SILValue op, SILType destTy,
SILBasicBlock *successBB,
SILBasicBlock *failureBB) {
return insertTerminator(new (F.getModule()) CheckedCastBranchInst(
getSILDebugLocation(Loc), isExact, op, destTy, successBB,
failureBB));
}
CheckedCastAddrBranchInst *
createCheckedCastAddrBranch(SILLocation Loc, CastConsumptionKind consumption,
SILValue src, CanType sourceType, SILValue dest,
CanType targetType, SILBasicBlock *successBB,
SILBasicBlock *failureBB) {
return insertTerminator(new (F.getModule()) CheckedCastAddrBranchInst(
getSILDebugLocation(Loc), consumption, src, sourceType, dest,
targetType, successBB, failureBB));
}
//===--------------------------------------------------------------------===//
// Memory management helpers
//===--------------------------------------------------------------------===//
/// Try to fold a destroy_addr operation into the previous instructions, or
/// generate an explicit one if that fails. If this inserts a new
/// instruction, it returns it, otherwise it returns null.
DestroyAddrInst *emitDestroyAddrAndFold(SILLocation Loc, SILValue Operand) {
auto U = emitDestroyAddr(Loc, Operand);
if (U.isNull() || !U.is<DestroyAddrInst *>())
return nullptr;
return U.get<DestroyAddrInst *>();
}
/// Perform a strong_release instruction at the current location, attempting
/// to fold it locally into nearby retain instructions or emitting an explicit
/// strong release if necessary. If this inserts a new instruction, it
/// returns it, otherwise it returns null.
StrongReleaseInst *emitStrongReleaseAndFold(SILLocation Loc,
SILValue Operand) {
auto U = emitStrongRelease(Loc, Operand);
if (U.isNull())
return nullptr;
if (auto *SRI = U.dyn_cast<StrongReleaseInst *>())
return SRI;
U.get<StrongRetainInst *>()->eraseFromParent();
return nullptr;
}
/// Emit a release_value instruction at the current location, attempting to
/// fold it locally into another nearby retain_value instruction. This
/// returns the new instruction if it inserts one, otherwise it returns null.
///
/// This instruction doesn't handle strength reduction of release_value into
/// a noop / strong_release / unowned_release. For that, use the
/// emitReleaseValueOperation method below or use the TypeLowering API.
ReleaseValueInst *emitReleaseValueAndFold(SILLocation Loc, SILValue Operand) {
auto U = emitReleaseValue(Loc, Operand);
if (U.isNull())
return nullptr;
if (auto *RVI = U.dyn_cast<ReleaseValueInst *>())
return RVI;
U.get<RetainValueInst *>()->eraseFromParent();
return nullptr;
}
/// Emit a release_value instruction at the current location, attempting to
/// fold it locally into another nearby retain_value instruction. Returns a
/// pointer union initialized with a release value inst if it inserts one,
/// otherwise returns the retain. It is expected that the caller will remove
/// the retain_value. This allows for the caller to update any state before
/// the retain_value is destroyed.
PointerUnion<RetainValueInst *, ReleaseValueInst *>
emitReleaseValue(SILLocation Loc, SILValue Operand);
/// Emit a strong_release instruction at the current location, attempting to
/// fold it locally into another nearby strong_retain instruction. Returns a
/// pointer union initialized with a strong_release inst if it inserts one,
/// otherwise returns the pointer union initialized with the strong_retain. It
/// is expected that the caller will remove the returned strong_retain. This
/// allows for the caller to update any state before the release value is
/// destroyed.
PointerUnion<StrongRetainInst *, StrongReleaseInst *>
emitStrongRelease(SILLocation Loc, SILValue Operand);
/// Emit a destroy_addr instruction at \p Loc attempting to fold the
/// destroy_addr locally into a copy_addr instruction. Returns a pointer union
/// initialized with the folded copy_addr if the destroy_addr was folded into
/// a copy_addr. Otherwise, returns the newly inserted destroy_addr.
PointerUnion<CopyAddrInst *, DestroyAddrInst *>
emitDestroyAddr(SILLocation Loc, SILValue Operand);
/// Convenience function for calling emitRetain on the type lowering
/// for the non-address value.
void emitRetainValueOperation(SILLocation Loc, SILValue v) {
assert(!v->getType().isAddress());
auto &lowering = getTypeLowering(v->getType());
return lowering.emitRetainValue(*this, Loc, v);
}
/// Convenience function for calling TypeLowering.emitRelease on the type
/// lowering for the non-address value.
void emitReleaseValueOperation(SILLocation Loc, SILValue v) {
assert(!v->getType().isAddress());
auto &lowering = getTypeLowering(v->getType());
lowering.emitReleaseValue(*this, Loc, v);
}
SILValue emitTupleExtract(SILLocation Loc, SILValue Operand, unsigned FieldNo,
SILType ResultTy) {
// Fold tuple_extract(tuple(x,y,z),2)
if (auto *TI = dyn_cast<TupleInst>(Operand))
return TI->getOperand(FieldNo);
return createTupleExtract(Loc, Operand, FieldNo, ResultTy);
}
SILValue emitTupleExtract(SILLocation Loc, SILValue Operand,
unsigned FieldNo) {
return emitTupleExtract(Loc, Operand, FieldNo,
Operand->getType().getTupleElementType(FieldNo));
}
SILValue emitStructExtract(SILLocation Loc, SILValue Operand, VarDecl *Field,
SILType ResultTy) {
if (auto *SI = dyn_cast<StructInst>(Operand))
return SI->getFieldValue(Field);
return createStructExtract(Loc, Operand, Field, ResultTy);
}
SILValue emitStructExtract(SILLocation Loc, SILValue Operand,
VarDecl *Field) {
auto type = Operand->getType().getFieldType(Field, F.getModule());
return emitStructExtract(Loc, Operand, Field, type);
}
SILValue emitThickToObjCMetatype(SILLocation Loc, SILValue Op, SILType Ty);
SILValue emitObjCToThickMetatype(SILLocation Loc, SILValue Op, SILType Ty);
//===--------------------------------------------------------------------===//
// Private Helper Methods
//===--------------------------------------------------------------------===//
private:
/// insert - This is a template to avoid losing type info on the result.
template <typename T> T *insert(T *TheInst) {
insertImpl(TheInst);
return TheInst;
}
/// insertTerminator - This is the same as insert, but clears the insertion
/// point after doing the insertion. This is used by terminators, since it
/// isn't valid to insert something after a terminator.
template <typename T> T *insertTerminator(T *TheInst) {
insertImpl(TheInst);
clearInsertionPoint();
return TheInst;
}
void insertImpl(SILInstruction *TheInst) {
if (BB == 0)
return;
// If the SILBuilder client wants to know about new instructions, record
// this.
if (InsertedInstrs)
InsertedInstrs->push_back(TheInst);
BB->insert(InsertPt, TheInst);
}
};
/// An wrapper on top of SILBuilder's constructor that automatically sets the
/// current SILDebugScope based on the specified insertion point. This is useful
/// for situations where a single SIL instruction is lowered into a sequence of
/// SIL instructions.
class SILBuilderWithScope : public SILBuilder {
void inheritScopeFrom(SILInstruction *I) {
assert(I->getDebugScope() && "instruction has no debug scope");
setCurrentDebugScope(I->getDebugScope());
}
public:
explicit SILBuilderWithScope(
SILInstruction *I,
SmallVectorImpl<SILInstruction *> *InsertedInstrs = nullptr)
: SILBuilder(I, InsertedInstrs) {
assert(I->getDebugScope() && "instruction has no debug scope");
setCurrentDebugScope(I->getDebugScope());
}
explicit SILBuilderWithScope(SILBasicBlock::iterator I)
: SILBuilderWithScope(&*I) {}
explicit SILBuilderWithScope(SILInstruction *I,
SILInstruction *InheritScopeFrom)
: SILBuilderWithScope(I) {
inheritScopeFrom(InheritScopeFrom);
}
explicit SILBuilderWithScope(SILBasicBlock::iterator I,
SILInstruction *InheritScopeFrom)
: SILBuilderWithScope(&*I) {
inheritScopeFrom(InheritScopeFrom);
}
explicit SILBuilderWithScope(SILBasicBlock *BB,
SILInstruction *InheritScopeFrom)
: SILBuilder(BB) {
inheritScopeFrom(InheritScopeFrom);
}
};
} // end swift namespace
#endif
| {
"content_hash": "86d6b05889bc4f6fecd8f6a16a37432b",
"timestamp": "",
"source": "github",
"line_count": 1484,
"max_line_length": 80,
"avg_line_length": 43.44137466307278,
"alnum_prop": 0.6185180014581103,
"repo_name": "dduan/swift",
"id": "58c5e6e5f5942624756fd5b5eebc390afa271391",
"size": "65224",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "include/swift/SIL/SILBuilder.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "2024"
},
{
"name": "C",
"bytes": "36550"
},
{
"name": "C++",
"bytes": "18618871"
},
{
"name": "CMake",
"bytes": "239759"
},
{
"name": "D",
"bytes": "1686"
},
{
"name": "DTrace",
"bytes": "1857"
},
{
"name": "Emacs Lisp",
"bytes": "33909"
},
{
"name": "LLVM",
"bytes": "48760"
},
{
"name": "Makefile",
"bytes": "1841"
},
{
"name": "Objective-C",
"bytes": "173088"
},
{
"name": "Objective-C++",
"bytes": "155175"
},
{
"name": "Perl",
"bytes": "2219"
},
{
"name": "Python",
"bytes": "371222"
},
{
"name": "Ruby",
"bytes": "2087"
},
{
"name": "Shell",
"bytes": "124677"
},
{
"name": "Swift",
"bytes": "12467726"
},
{
"name": "Vim script",
"bytes": "11829"
}
],
"symlink_target": ""
} |
"use strict";
(function (root) {
// Global variables
var verbose = false;
var maxIterations = 0x3FFFFFFF;
var testTime = 3000;
var preTestIterations = 1000;
var checkOptimization = true;
// Allow Native Syntax
var isOptimized;
if (typeof process !== 'undefined' &&
typeof process.execArgv !== 'undefined' &&
process.execArgv.indexOf('--allow-natives-syntax') >= 0)
{
isOptimized = require('./isOptimizedNative');
} else {
isOptimized = function () {
return null;
};
}
// Benchmark function
var benchmark = function benchmark(name, fn) {
// Prepare innerloop function
var innerLoop = eval(
"(function(f) {" +
" return function innerLoop4" + fn.name + "(n) {" +
" for (var i = 0; i < n; i++) f()" +
" };" +
"})")(fn);
// Pre test
var timeExcess = testTime * 1.1;
var init = Date.now();
innerLoop(preTestIterations, fn);
var elapsed = Date.now() - init + 1;
var iterations = 0 | Math.min(maxIterations, timeExcess / elapsed * 1000);
// Test
var checks = 0;
var totalIterations = 0;
init = Date.now();
do {
innerLoop(iterations);
totalIterations += iterations;
checks++;
elapsed = Date.now() - init;
if (elapsed >= testTime) break;
iterations = 0 | Math.min(maxIterations, (timeExcess - elapsed) / (elapsed + 1) * totalIterations);
} while (elapsed < testTime);
// Stats
var secs = elapsed / 1000;
var perSecondIterations = 0 | totalIterations / secs;
if (verbose) {
console.log('Function "%s" running for %d seconds: %s checks, %s total iterations, %s iterations per second',
name,
Math.round(secs * 100) / 100,
checks,
totalIterations.toLocaleString(),
perSecondIterations.toLocaleString()
);
}
// Global stats
benchmark.results.push({
name: name,
elapsed: elapsed,
checks: checks,
totalIterations: totalIterations,
perSecondIterations: perSecondIterations,
isOptimized: checkOptimization ? isOptimized(fn) : null
});
benchmark.results.sort(function (a, b) {
return b.perSecondIterations - a.perSecondIterations;}
);
var max = benchmark.results[0].perSecondIterations;
benchmark.results.forEach(function (a) {
return a.diff = Math.round((max - a.perSecondIterations) / max * 10000) / 100 + "%";
});
};
// Results property
benchmark.results = [];
// Options method
benchmark.options = function (opt) {
if (typeof opt.verbose === 'boolean') verbose = opt.verbose;
if (typeof opt.checkOptimization === 'boolean') checkOptimization = opt.checkOptimization;
if (typeof opt.maxIterations === 'number') maxIterations = opt.maxIterations;
if (typeof opt.testTime === 'number') testTime = opt.testTime;
if (typeof opt.preTestIterations === 'number') preTestIterations = opt.preTestIterations;
return {
verbose: verbose,
maxIterations: maxIterations,
testTime: testTime,
preTestIterations: preTestIterations
};
};
// Export for node and browser
if (typeof exports !== 'undefined') {
if (typeof module !== 'undefined' && module.exports) {
module.exports = benchmark;
}
exports.benchmarkjs = benchmark;
} else {
root.benchmarkjs = benchmark;
}
})(this);
| {
"content_hash": "86253edbe182a16ec9b72e3fa1ee02c7",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 121,
"avg_line_length": 33.10434782608696,
"alnum_prop": 0.552140793275545,
"repo_name": "todojs/BenchmarkJS",
"id": "96fbbb295c0a21d4257510d5896a01f3e7f8e093",
"size": "3807",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "benchmarkjs.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "953"
},
{
"name": "JavaScript",
"bytes": "4720"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Data.SqlClient;
namespace TransactionalNodeService
{
public partial class MapTransaction
{
private interface IQueryBuilder
{
void AddParameter(string parameterName, object parameterValue);
string GenerateSqlQuery();
SqlParameter[] GenerateSqlParameters();
}
}
} | {
"content_hash": "a633f2cd6e809a1736386a58a36f5a6c",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 75,
"avg_line_length": 24.055555555555557,
"alnum_prop": 0.6859122401847575,
"repo_name": "chris-tomich/Glyma",
"id": "2e6a28f9c2c629ad362d4e3ef5c16aefad548651",
"size": "435",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Glyma.NodeService/TransactionalNodeService/NodeServiceOperations/MapTransactionSupportClasses/IQueryBuilder.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "55793"
},
{
"name": "C#",
"bytes": "7119388"
},
{
"name": "CSS",
"bytes": "54246"
},
{
"name": "Cucumber",
"bytes": "12623"
},
{
"name": "HTML",
"bytes": "45304"
},
{
"name": "JavaScript",
"bytes": "291703"
},
{
"name": "PLpgSQL",
"bytes": "28266"
},
{
"name": "PowerShell",
"bytes": "789"
},
{
"name": "TypeScript",
"bytes": "414928"
},
{
"name": "XSLT",
"bytes": "47044"
}
],
"symlink_target": ""
} |
<?php
namespace AppBundle\Repository;
/**
* PcRepository
*
* This class was generated by the Doctrine ORM. Add your own custom
* repository methods below.
*/
class PcRepository extends \Doctrine\ORM\EntityRepository
{
}
| {
"content_hash": "ea06ba2fecc0fe08a60544e049a0dd61",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 68,
"avg_line_length": 17.46153846153846,
"alnum_prop": 0.7444933920704846,
"repo_name": "zneel/gestion-materiel",
"id": "4753a5c824731b5a4b4347e81427bd03f46c38e1",
"size": "227",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/AppBundle/Repository/PcRepository.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "3605"
},
{
"name": "HTML",
"bytes": "9715"
},
{
"name": "PHP",
"bytes": "84235"
}
],
"symlink_target": ""
} |
import os
import sys
_SCRIPT_DIR = os.path.realpath(os.path.dirname(__file__))
_CHROME_SOURCE = os.path.realpath(
os.path.join(_SCRIPT_DIR, *[os.path.pardir] * 6))
sys.path.append(os.path.join(_CHROME_SOURCE, 'build/android/gyp'))
import argparse
import json
from util import build_utils
def process_emoticon_data(metadata):
"""Produce the emoticon data to be consumed by the emoji picker.
Args:
metadata (list(dict)): list of emoticon group data.
Returns:
list(dict): list of readily used emoticon groups.
"""
return [{
"group":
group["group"],
"emoji": [{
"base": {
"string": emoticon["value"],
"name": emoticon["description"],
"keywords": []
},
"alternates": []
} for emoticon in group["emoticon"]]
} for group in metadata]
def main(args):
parser = argparse.ArgumentParser()
parser.add_argument('--metadata',
required=True,
help='emoji metadata ordering file as JSON')
parser.add_argument('--output',
required=True,
help='output JSON file path')
options = parser.parse_args(args)
metadata_file = options.metadata
output_file = options.output
# Parse emoticon ordering data.
metadata = []
with open(metadata_file, 'r') as file:
metadata = json.load(file)
emoticon_data = process_emoticon_data(metadata)
# Write output file atomically in utf-8 format.
with build_utils.AtomicOutput(output_file) as tmp_file:
tmp_file.write(
json.dumps(emoticon_data,
separators=(',', ':'),
ensure_ascii=False).encode('utf-8'))
if __name__ == '__main__':
main(sys.argv[1:])
| {
"content_hash": "1b1e891ceef575c4e9ca881fe67f0f8d",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 68,
"avg_line_length": 28.49230769230769,
"alnum_prop": 0.5680345572354212,
"repo_name": "scheib/chromium",
"id": "0848a76f02557185d5c72dabe64f26b8f9badfac",
"size": "2015",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "chrome/browser/resources/chromeos/emoji_picker/tools/emoticon_data.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
package org.apache.hadoop.yarn.server.resourcemanager.scheduler;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainerState;
import org.apache.hadoop.yarn.util.resource.Resources;
/**
* This class keeps track of all the consumption of an application. This also
* keeps track of current running/completed containers for the application.
*/
@Private
@Unstable
public class AppSchedulingInfo {
private static final Log LOG = LogFactory.getLog(AppSchedulingInfo.class);
private final ApplicationAttemptId applicationAttemptId;
final ApplicationId applicationId;
private String queueName;
Queue queue;
final String user;
// TODO making containerIdCounter long
private final AtomicLong containerIdCounter;
private final int EPOCH_BIT_SHIFT = 40;
final Set<Priority> priorities = new TreeSet<Priority>(
new org.apache.hadoop.yarn.server.resourcemanager.resource.Priority.Comparator());
final Map<Priority, Map<String, ResourceRequest>> requests =
new ConcurrentHashMap<Priority, Map<String, ResourceRequest>>();
private Set<String> blacklist = new HashSet<String>();
//private final ApplicationStore store;
private ActiveUsersManager activeUsersManager;
/* Allocated by scheduler */
boolean pending = true; // for app metrics
public AppSchedulingInfo(ApplicationAttemptId appAttemptId,
String user, Queue queue, ActiveUsersManager activeUsersManager,
long epoch) {
this.applicationAttemptId = appAttemptId;
this.applicationId = appAttemptId.getApplicationId();
this.queue = queue;
this.queueName = queue.getQueueName();
this.user = user;
this.activeUsersManager = activeUsersManager;
this.containerIdCounter = new AtomicLong(epoch << EPOCH_BIT_SHIFT);
}
public ApplicationId getApplicationId() {
return applicationId;
}
public ApplicationAttemptId getApplicationAttemptId() {
return applicationAttemptId;
}
public String getQueueName() {
return queueName;
}
public String getUser() {
return user;
}
public synchronized boolean isPending() {
return pending;
}
/**
* Clear any pending requests from this application.
*/
private synchronized void clearRequests() {
priorities.clear();
requests.clear();
LOG.info("Application " + applicationId + " requests cleared");
}
public long getNewContainerId() {
return this.containerIdCounter.incrementAndGet();
}
/**
* The ApplicationMaster is updating resource requirements for the
* application, by asking for more resources and releasing resources acquired
* by the application.
*
* @param requests resources to be acquired
* @param recoverPreemptedRequest recover Resource Request on preemption
*/
synchronized public void updateResourceRequests(
List<ResourceRequest> requests, boolean recoverPreemptedRequest) {
QueueMetrics metrics = queue.getMetrics();
// Update resource requests
for (ResourceRequest request : requests) {
Priority priority = request.getPriority();
String resourceName = request.getResourceName();
boolean updatePendingResources = false;
ResourceRequest lastRequest = null;
if (resourceName.equals(ResourceRequest.ANY)) {
if (LOG.isDebugEnabled()) {
LOG.debug("update:" + " application=" + applicationId + " request="
+ request);
}
updatePendingResources = true;
// Premature optimization?
// Assumes that we won't see more than one priority request updated
// in one call, reasonable assumption... however, it's totally safe
// to activate same application more than once.
// Thus we don't need another loop ala the one in decrementOutstanding()
// which is needed during deactivate.
if (request.getNumContainers() > 0) {
activeUsersManager.activateApplication(user, applicationId);
}
}
Map<String, ResourceRequest> asks = this.requests.get(priority);
if (asks == null) {
asks = new ConcurrentHashMap<String, ResourceRequest>();
this.requests.put(priority, asks);
this.priorities.add(priority);
}
lastRequest = asks.get(resourceName);
if (recoverPreemptedRequest && lastRequest != null) {
// Increment the number of containers to 1, as it is recovering a
// single container.
request.setNumContainers(lastRequest.getNumContainers() + 1);
}
asks.put(resourceName, request);
if (updatePendingResources) {
// Similarly, deactivate application?
if (request.getNumContainers() <= 0) {
LOG.info("checking for deactivate of application :"
+ this.applicationId);
checkForDeactivation();
}
int lastRequestContainers = lastRequest != null ? lastRequest
.getNumContainers() : 0;
Resource lastRequestCapability = lastRequest != null ? lastRequest
.getCapability() : Resources.none();
metrics.incrPendingResources(user, request.getNumContainers(),
request.getCapability());
metrics.decrPendingResources(user, lastRequestContainers,
lastRequestCapability);
}
}
}
/**
* The ApplicationMaster is updating the blacklist
*
* @param blacklistAdditions resources to be added to the blacklist
* @param blacklistRemovals resources to be removed from the blacklist
*/
synchronized public void updateBlacklist(
List<String> blacklistAdditions, List<String> blacklistRemovals) {
// Add to blacklist
if (blacklistAdditions != null) {
blacklist.addAll(blacklistAdditions);
}
// Remove from blacklist
if (blacklistRemovals != null) {
blacklist.removeAll(blacklistRemovals);
}
}
synchronized public Collection<Priority> getPriorities() {
return priorities;
}
synchronized public Map<String, ResourceRequest> getResourceRequests(
Priority priority) {
return requests.get(priority);
}
public List<ResourceRequest> getAllResourceRequests() {
List<ResourceRequest> ret = new ArrayList<ResourceRequest>();
for (Map<String, ResourceRequest> r : requests.values()) {
ret.addAll(r.values());
}
return ret;
}
synchronized public ResourceRequest getResourceRequest(Priority priority,
String resourceName) {
Map<String, ResourceRequest> nodeRequests = requests.get(priority);
return (nodeRequests == null) ? null : nodeRequests.get(resourceName);
}
public synchronized Resource getResource(Priority priority) {
ResourceRequest request = getResourceRequest(priority, ResourceRequest.ANY);
return (request == null) ? null : request.getCapability();
}
public synchronized boolean isBlacklisted(String resourceName) {
return blacklist.contains(resourceName);
}
/**
* Resources have been allocated to this application by the resource
* scheduler. Track them.
*
* @param type
* the type of the node
* @param node
* the nodeinfo of the node
* @param priority
* the priority of the request.
* @param request
* the request
* @param container
* the containers allocated.
*/
synchronized public List<ResourceRequest> allocate(NodeType type,
SchedulerNode node, Priority priority, ResourceRequest request,
Container container) {
List<ResourceRequest> resourceRequests = new ArrayList<ResourceRequest>();
if (type == NodeType.NODE_LOCAL) {
allocateNodeLocal(node, priority, request, container, resourceRequests);
} else if (type == NodeType.RACK_LOCAL) {
allocateRackLocal(node, priority, request, container, resourceRequests);
} else {
allocateOffSwitch(node, priority, request, container, resourceRequests);
}
QueueMetrics metrics = queue.getMetrics();
if (pending) {
// once an allocation is done we assume the application is
// running from scheduler's POV.
pending = false;
metrics.runAppAttempt(applicationId, user);
}
if (LOG.isDebugEnabled()) {
LOG.debug("allocate: applicationId=" + applicationId
+ " container=" + container.getId()
+ " host=" + container.getNodeId().toString()
+ " user=" + user
+ " resource=" + request.getCapability());
}
metrics.allocateResources(user, 1, request.getCapability(), true);
return resourceRequests;
}
/**
* The {@link ResourceScheduler} is allocating data-local resources to the
* application.
*
* @param allocatedContainers
* resources allocated to the application
*/
synchronized private void allocateNodeLocal(SchedulerNode node,
Priority priority, ResourceRequest nodeLocalRequest, Container container,
List<ResourceRequest> resourceRequests) {
// Update future requirements
decResourceRequest(node.getNodeName(), priority, nodeLocalRequest);
ResourceRequest rackLocalRequest = requests.get(priority).get(
node.getRackName());
decResourceRequest(node.getRackName(), priority, rackLocalRequest);
ResourceRequest offRackRequest = requests.get(priority).get(
ResourceRequest.ANY);
decrementOutstanding(offRackRequest);
// Update cloned NodeLocal, RackLocal and OffRack requests for recovery
resourceRequests.add(cloneResourceRequest(nodeLocalRequest));
resourceRequests.add(cloneResourceRequest(rackLocalRequest));
resourceRequests.add(cloneResourceRequest(offRackRequest));
}
private void decResourceRequest(String resourceName, Priority priority,
ResourceRequest request) {
request.setNumContainers(request.getNumContainers() - 1);
if (request.getNumContainers() == 0) {
requests.get(priority).remove(resourceName);
}
}
/**
* The {@link ResourceScheduler} is allocating data-local resources to the
* application.
*
* @param allocatedContainers
* resources allocated to the application
*/
synchronized private void allocateRackLocal(SchedulerNode node,
Priority priority, ResourceRequest rackLocalRequest, Container container,
List<ResourceRequest> resourceRequests) {
// Update future requirements
decResourceRequest(node.getRackName(), priority, rackLocalRequest);
ResourceRequest offRackRequest = requests.get(priority).get(
ResourceRequest.ANY);
decrementOutstanding(offRackRequest);
// Update cloned RackLocal and OffRack requests for recovery
resourceRequests.add(cloneResourceRequest(rackLocalRequest));
resourceRequests.add(cloneResourceRequest(offRackRequest));
}
/**
* The {@link ResourceScheduler} is allocating data-local resources to the
* application.
*
* @param allocatedContainers
* resources allocated to the application
*/
synchronized private void allocateOffSwitch(SchedulerNode node,
Priority priority, ResourceRequest offSwitchRequest, Container container,
List<ResourceRequest> resourceRequests) {
// Update future requirements
decrementOutstanding(offSwitchRequest);
// Update cloned OffRack requests for recovery
resourceRequests.add(cloneResourceRequest(offSwitchRequest));
}
synchronized private void decrementOutstanding(
ResourceRequest offSwitchRequest) {
int numOffSwitchContainers = offSwitchRequest.getNumContainers() - 1;
// Do not remove ANY
offSwitchRequest.setNumContainers(numOffSwitchContainers);
// Do we have any outstanding requests?
// If there is nothing, we need to deactivate this application
if (numOffSwitchContainers == 0) {
checkForDeactivation();
}
}
synchronized private void checkForDeactivation() {
boolean deactivate = true;
for (Priority priority : getPriorities()) {
ResourceRequest request = getResourceRequest(priority, ResourceRequest.ANY);
if (request != null) {
if (request.getNumContainers() > 0) {
deactivate = false;
break;
}
}
}
if (deactivate) {
activeUsersManager.deactivateApplication(user, applicationId);
}
}
synchronized public void move(Queue newQueue) {
QueueMetrics oldMetrics = queue.getMetrics();
QueueMetrics newMetrics = newQueue.getMetrics();
for (Map<String, ResourceRequest> asks : requests.values()) {
ResourceRequest request = asks.get(ResourceRequest.ANY);
if (request != null) {
oldMetrics.decrPendingResources(user, request.getNumContainers(),
request.getCapability());
newMetrics.incrPendingResources(user, request.getNumContainers(),
request.getCapability());
}
}
oldMetrics.moveAppFrom(this);
newMetrics.moveAppTo(this);
activeUsersManager.deactivateApplication(user, applicationId);
activeUsersManager = newQueue.getActiveUsersManager();
activeUsersManager.activateApplication(user, applicationId);
this.queue = newQueue;
this.queueName = newQueue.getQueueName();
}
synchronized public void stop(RMAppAttemptState rmAppAttemptFinalState) {
// clear pending resources metrics for the application
QueueMetrics metrics = queue.getMetrics();
for (Map<String, ResourceRequest> asks : requests.values()) {
ResourceRequest request = asks.get(ResourceRequest.ANY);
if (request != null) {
metrics.decrPendingResources(user, request.getNumContainers(),
request.getCapability());
}
}
metrics.finishAppAttempt(applicationId, pending, user);
// Clear requests themselves
clearRequests();
}
public synchronized void setQueue(Queue queue) {
this.queue = queue;
}
public synchronized Set<String> getBlackList() {
return this.blacklist;
}
public synchronized Set<String> getBlackListCopy() {
return new HashSet<>(this.blacklist);
}
public synchronized void transferStateFromPreviousAppSchedulingInfo(
AppSchedulingInfo appInfo) {
// this.priorities = appInfo.getPriorities();
// this.requests = appInfo.getRequests();
this.blacklist = appInfo.getBlackList();
}
public synchronized void recoverContainer(RMContainer rmContainer) {
QueueMetrics metrics = queue.getMetrics();
if (pending) {
// If there was any container to recover, the application was
// running from scheduler's POV.
pending = false;
metrics.runAppAttempt(applicationId, user);
}
// Container is completed. Skip recovering resources.
if (rmContainer.getState().equals(RMContainerState.COMPLETED)) {
return;
}
metrics.allocateResources(user, 1, rmContainer.getAllocatedResource(),
false);
}
public ResourceRequest cloneResourceRequest(ResourceRequest request) {
ResourceRequest newRequest = ResourceRequest.newInstance(
request.getPriority(), request.getResourceName(),
request.getCapability(), 1, request.getRelaxLocality());
return newRequest;
}
}
| {
"content_hash": "68033de23759038f761cb634c2680cb1",
"timestamp": "",
"source": "github",
"line_count": 461,
"max_line_length": 88,
"avg_line_length": 35.049891540130155,
"alnum_prop": 0.7095556380740191,
"repo_name": "mix/hadoop",
"id": "22476d8d97ac242b399d28ad7c542f890c1fc782",
"size": "16964",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/AppSchedulingInfo.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AspectJ",
"bytes": "31146"
},
{
"name": "Batchfile",
"bytes": "64064"
},
{
"name": "C",
"bytes": "1390236"
},
{
"name": "C++",
"bytes": "93543"
},
{
"name": "CMake",
"bytes": "40065"
},
{
"name": "CSS",
"bytes": "50572"
},
{
"name": "HTML",
"bytes": "195034"
},
{
"name": "Java",
"bytes": "50475517"
},
{
"name": "JavaScript",
"bytes": "26275"
},
{
"name": "Perl",
"bytes": "18992"
},
{
"name": "Protocol Buffer",
"bytes": "234739"
},
{
"name": "Python",
"bytes": "18556"
},
{
"name": "Shell",
"bytes": "188919"
},
{
"name": "TLA",
"bytes": "14993"
},
{
"name": "TeX",
"bytes": "19322"
},
{
"name": "XSLT",
"bytes": "20949"
}
],
"symlink_target": ""
} |
EXPORT_SYMBOL_GPL(shmem_truncate_range);
EXPORT_SYMBOL_GPL(shmem_truncate_range);
EXPORT_SYMBOL_GPL(shmem_file_setup);
EXPORT_SYMBOL_GPL(shmem_file_setup_with_mnt);
EXPORT_SYMBOL_GPL(shmem_read_mapping_page_gfp);
\n
static unsigned long shmem_default_max_blocks(void)
static unsigned long shmem_default_max_inodes(void)
int shmem_getpage(struct inode *inode, pgoff_t index, struct page **pagep, enum sgp_type sgp)
static inline struct shmem_sb_info *SHMEM_SB(struct super_block *sb)
static inline int shmem_acct_size(unsigned long flags, loff_t size)
static inline void shmem_unacct_size(unsigned long flags, loff_t size)
static inline int shmem_reacct_size(unsigned long flags, loff_t oldsize, loff_t newsize)
static inline int shmem_acct_block(unsigned long flags, long pages)
static inline void shmem_unacct_blocks(unsigned long flags, long pages)
static inline bool shmem_inode_acct_block(struct inode *inode, long pages)
static inline void shmem_inode_unacct_blocks(struct inode *inode, long pages)
bool vma_is_shmem(struct vm_area_struct *vma)
static int shmem_reserve_inode(struct super_block *sb)
static void shmem_free_inode(struct super_block *sb)
static void shmem_recalc_inode(struct inode *inode)
bool shmem_charge(struct inode *inode, long pages)
void shmem_uncharge(struct inode *inode, long pages)
static int shmem_replace_entry(struct address_space *mapping, pgoff_t index, void *expected, void *replacement)
static bool shmem_confirm_swap(struct address_space *mapping, pgoff_t index, swp_entry_t swap)
static int shmem_parse_huge(const char *str)
static const char *shmem_format_huge(int huge)
static unsigned long shmem_unused_huge_shrink(struct shmem_sb_info *sbinfo, struct shrink_control *sc, unsigned long nr_to_split)
static long shmem_unused_huge_scan(struct super_block *sb, struct shrink_control *sc)
static long shmem_unused_huge_count(struct super_block *sb, struct shrink_control *sc)
static unsigned long shmem_unused_huge_shrink(struct shmem_sb_info *sbinfo, struct shrink_control *sc, unsigned long nr_to_split)
static inline bool is_huge_enabled(struct shmem_sb_info *sbinfo)
static int shmem_add_to_page_cache(struct page *page, struct address_space *mapping, pgoff_t index, void *expected, gfp_t gfp)
static void shmem_delete_from_page_cache(struct page *page, void *radswap)
static int shmem_free_swap(struct address_space *mapping, pgoff_t index, void *radswap)
unsigned long shmem_partial_swap_usage(struct address_space *mapping, pgoff_t start, pgoff_t end)
unsigned long shmem_swap_usage(struct vm_area_struct *vma)
void shmem_unlock_mapping(struct address_space *mapping)
static void shmem_undo_range(struct inode *inode, loff_t lstart, loff_t lend, bool unfalloc)
void shmem_truncate_range(struct inode *inode, loff_t lstart, loff_t lend)
static int shmem_getattr(const struct path *path, struct kstat *stat, u32 request_mask, unsigned int query_flags)
static int shmem_setattr(struct dentry *dentry, struct iattr *attr)
static void shmem_evict_inode(struct inode *inode)
static unsigned long find_swap_entry(struct xarray *xa, void *item)
static int shmem_unuse_inode(struct shmem_inode_info *info, swp_entry_t swap, struct page **pagep)
int shmem_unuse(swp_entry_t swap, struct page *page)
static int shmem_writepage(struct page *page, struct writeback_control *wbc)
static void shmem_show_mpol(struct seq_file *seq, struct mempolicy *mpol)
static struct mempolicy *shmem_get_sbmpol(struct shmem_sb_info *sbinfo)
static struct mempolicy *shmem_get_sbmpol(struct shmem_sb_info *sbinfo)
struct mempolicy *mpol = NULL; spin_lock(&sbinfo->stat_lock)
;mpol = sbinfo->mpol; mpol_get(mpol)
; spin_unlock(&sbinfo->stat_lock)
;}return mpol; } static inline void shmem_show_mpol(struct seq_file *seq, struct mempolicy *mpol)
static struct mempolicy *shmem_get_sbmpol(struct shmem_sb_info *sbinfo)
struct mempolicy *mpol = NULL; spin_lock(&sbinfo->stat_lock)
;mpol = sbinfo->mpol; mpol_get(mpol)
; spin_unlock(&sbinfo->stat_lock)
;}return mpol; } static inline void shmem_show_mpol(struct seq_file *seq, struct mempolicy *mpol)
} static inline struct mempolicy *shmem_get_sbmpol(struct shmem_sb_info *sbinfo)
static void shmem_pseudo_vma_init(struct vm_area_struct *vma, struct shmem_inode_info *info, pgoff_t index)
static void shmem_pseudo_vma_destroy(struct vm_area_struct *vma)
static struct page *shmem_swapin(swp_entry_t swap, gfp_t gfp, struct shmem_inode_info *info, pgoff_t index)
static struct page *shmem_alloc_hugepage(gfp_t gfp, struct shmem_inode_info *info, pgoff_t index)
static struct page *shmem_alloc_page(gfp_t gfp, struct shmem_inode_info *info, pgoff_t index)
static struct page *shmem_alloc_and_acct_page(gfp_t gfp, struct inode *inode, pgoff_t index, bool huge)
static bool shmem_should_replace_page(struct page *page, gfp_t gfp)
static int shmem_replace_page(struct page **pagep, gfp_t gfp, struct shmem_inode_info *info, pgoff_t index)
static int shmem_getpage_gfp(struct inode *inode, pgoff_t index, struct page **pagep, enum sgp_type sgp, gfp_t gfp, struct vm_area_struct *vma, struct vm_fault *vmf, vm_fault_t *fault_type)
static int synchronous_wake_function(wait_queue_entry_t *wait, unsigned mode, int sync, void *key)
static vm_fault_t shmem_fault(struct vm_fault *vmf)
unsigned long shmem_get_unmapped_area(struct file *file, unsigned long uaddr, unsigned long len, unsigned long pgoff, unsigned long flags)
static int shmem_set_policy(struct vm_area_struct *vma, struct mempolicy *mpol)
static struct mempolicy *shmem_get_policy(struct vm_area_struct *vma, unsigned long addr)
int shmem_lock(struct file *file, int lock, struct user_struct *user)
static int shmem_mmap(struct file *file, struct vm_area_struct *vma)
static struct inode *shmem_get_inode(struct super_block *sb, const struct inode *dir, umode_t mode, dev_t dev, unsigned long flags)
bool shmem_mapping(struct address_space *mapping)
static int shmem_mfill_atomic_pte(struct mm_struct *dst_mm, pmd_t *dst_pmd, struct vm_area_struct *dst_vma, unsigned long dst_addr, unsigned long src_addr, bool zeropage, struct page **pagep)
int shmem_mcopy_atomic_pte(struct mm_struct *dst_mm, pmd_t *dst_pmd, struct vm_area_struct *dst_vma, unsigned long dst_addr, unsigned long src_addr, struct page **pagep)
int shmem_mfill_zeropage_pte(struct mm_struct *dst_mm, pmd_t *dst_pmd, struct vm_area_struct *dst_vma, unsigned long dst_addr)
static int shmem_write_begin(struct file *file, struct address_space *mapping, loff_t pos, unsigned len, unsigned flags, struct page **pagep, void **fsdata)
static int shmem_write_end(struct file *file, struct address_space *mapping, loff_t pos, unsigned len, unsigned copied, struct page *page, void *fsdata)
static ssize_t shmem_file_read_iter(struct kiocb *iocb, struct iov_iter *to)
static pgoff_t shmem_seek_hole_data(struct address_space *mapping, pgoff_t index, pgoff_t end, int whence)
static loff_t shmem_file_llseek(struct file *file, loff_t offset, int whence)
static long shmem_fallocate(struct file *file, int mode, loff_t offset, loff_t len)
static int shmem_statfs(struct dentry *dentry, struct kstatfs *buf)
static int shmem_mknod(struct inode *dir, struct dentry *dentry, umode_t mode, dev_t dev)
static int shmem_tmpfile(struct inode *dir, struct dentry *dentry, umode_t mode)
static int shmem_mkdir(struct inode *dir, struct dentry *dentry, umode_t mode)
static int shmem_create(struct inode *dir, struct dentry *dentry, umode_t mode, bool excl)
static int shmem_link(struct dentry *old_dentry, struct inode *dir, struct dentry *dentry)
static int shmem_unlink(struct inode *dir, struct dentry *dentry)
static int shmem_rmdir(struct inode *dir, struct dentry *dentry)
static int shmem_exchange(struct inode *old_dir, struct dentry *old_dentry, struct inode *new_dir, struct dentry *new_dentry)
static int shmem_whiteout(struct inode *old_dir, struct dentry *old_dentry)
static int shmem_rename2(struct inode *old_dir, struct dentry *old_dentry, struct inode *new_dir, struct dentry *new_dentry, unsigned int flags)
static int shmem_symlink(struct inode *dir, struct dentry *dentry, const char *symname)
static void shmem_put_link(void *arg)
static const char *shmem_get_link(struct dentry *dentry, struct inode *inode, struct delayed_call *done)
static int shmem_initxattrs(struct inode *inode, const struct xattr *xattr_array, void *fs_info)
static int shmem_xattr_handler_get(const struct xattr_handler *handler, struct dentry *unused, struct inode *inode, const char *name, void *buffer, size_t size)
static int shmem_xattr_handler_set(const struct xattr_handler *handler, struct dentry *unused, struct inode *inode, const char *name, const void *value, size_t size, int flags)
static ssize_t shmem_listxattr(struct dentry *dentry, char *buffer, size_t size)
static struct dentry *shmem_get_parent(struct dentry *child)
static int shmem_match(struct inode *ino, void *vfh)
static struct dentry *shmem_find_alias(struct inode *inode)
static struct dentry *shmem_fh_to_dentry(struct super_block *sb, struct fid *fid, int fh_len, int fh_type)
static int shmem_encode_fh(struct inode *inode, __u32 *fh, int *len, struct inode *parent)
static int shmem_parse_options(char *options, struct shmem_sb_info *sbinfo, bool remount)
static int shmem_remount_fs(struct super_block *sb, int *flags, char *data)
static int shmem_show_options(struct seq_file *seq, struct dentry *root)
static void shmem_put_super(struct super_block *sb)
int shmem_fill_super(struct super_block *sb, void *data, int silent)
static struct inode *shmem_alloc_inode(struct super_block *sb)
static void shmem_destroy_callback(struct rcu_head *head)
static void shmem_destroy_inode(struct inode *inode)
static void shmem_init_inode(void *foo)
static void shmem_init_inodecache(void)
static void shmem_destroy_inodecache(void)
static struct dentry *shmem_mount(struct file_system_type *fs_type, int flags, const char *dev_name, void *data)
int __init shmem_init(void)
static ssize_t shmem_enabled_show(struct kobject *kobj, struct kobj_attribute *attr, char *buf)
static ssize_t shmem_enabled_store(struct kobject *kobj, struct kobj_attribute *attr, const char *buf, size_t count)
bool shmem_huge_enabled(struct vm_area_struct *vma)
int __init shmem_init(void)
int shmem_unuse(swp_entry_t swap, struct page *page)
int shmem_lock(struct file *file, int lock, struct user_struct *user)
void shmem_unlock_mapping(struct address_space *mapping)
unsigned long shmem_get_unmapped_area(struct file *file, unsigned long addr, unsigned long len, unsigned long pgoff, unsigned long flags)
void shmem_truncate_range(struct inode *inode, loff_t lstart, loff_t lend)
static struct file *__shmem_file_setup(struct vfsmount *mnt, const char *name, loff_t size, unsigned long flags, unsigned int i_flags)
struct file *shmem_kernel_file_setup(const char *name, loff_t size, unsigned long flags)
struct file *shmem_file_setup(const char *name, loff_t size, unsigned long flags)
struct file *shmem_file_setup_with_mnt(struct vfsmount *mnt, const char *name, loff_t size, unsigned long flags)
int shmem_zero_setup(struct vm_area_struct *vma)
struct page *shmem_read_mapping_page_gfp(struct address_space *mapping, pgoff_t index, gfp_t gfp)
\n
19 struct inode *inode
14 pgoff_t index
12 unsigned long flags
12 struct dentry *dentry
12 struct address_space *mapping
11 struct super_block *sb
10 struct vm_area_struct *vma
9 struct file *file
9 gfp_t gfp
8 struct shmem_sb_info *sbinfo
8 struct inode *dir
7 struct page **pagep
7 struct page *page
6 void
6 struct shmem_inode_info *info
6 long pages
6 loff_t size
6 const char *name
5 umode_t mode
5 swp_entry_t swap
4 struct shrink_control *sc
4 struct seq_file *seq
4 struct mempolicy *mpol
4 struct dentry *old_dentry
4 &sbinfo->stat_lock
3 unsigned long dst_addr
3 struct vm_area_struct *dst_vma
3 struct mm_struct *dst_mm
3 struct inode *old_dir
3 size_t size
3 pmd_t *dst_pmd
3 loff_t lstart
3 loff_t lend
2 void *radswap
2 void *expected
2 void *data
2 unsigned long src_addr
2 unsigned long pgoff
2 unsigned long nr_to_split
2 unsigned long len
2 unsigned long addr
2 unsigned len
2 struct vm_fault *vmf
2 struct vfsmount *mnt
2 struct user_struct *user
2 struct kobject *kobj
2 struct kobj_attribute *attr
2 struct inode *new_dir
2 struct dentry *unused
2 struct dentry *new_dentry
2 shmem_truncate_range
2 pgoff_t end
2 mpol
2 loff_t pos
2 loff_t offset
2 int whence
2 int lock
2 int flags
2 enum sgp_type sgp
2 dev_t dev
2 const struct xattr_handler *handler
1 wait_queue_entry_t *wait
1 void *vfh
1 void *replacement
1 void *key
1 void *item
1 void *fs_info
1 void **fsdata
1 void *fsdata
1 void *foo
1 void *buffer
1 void *arg
1 vm_fault_t *fault_type
1 unsigned mode
1 unsigned long uaddr
1 unsigned int query_flags
1 unsigned int i_flags
1 unsigned int flags
1 unsigned flags
1 unsigned copied
1 u32 request_mask
1 __u32 *fh
1 struct xarray *xa
1 struct writeback_control *wbc
1 struct rcu_head *head
1 struct kstat *stat
1 struct kstatfs *buf
1 struct kiocb *iocb
1 struct iov_iter *to
1 struct inode *parent
1 struct inode *ino
1 struct iattr *attr
1 struct file_system_type *fs_type
1 struct fid *fid
1 struct dentry *root
1 struct dentry *child
1 struct delayed_call *done
1 size_t count
1 shmem_read_mapping_page_gfp
1 shmem_file_setup_with_mnt
1 shmem_file_setup
1 pgoff_t start
1 loff_t oldsize
1 loff_t newsize
1 loff_t len
1 int sync
1 int silent
1 int mode
1 int *len
1 int huge
1 int *flags
1 int fh_type
1 int fh_len
1 const void *value
1 const struct xattr *xattr_array
1 const struct path *path
1 const struct inode *dir
1 const char *symname
1 const char *str
1 const char *dev_name
1 const char *buf
1 char *options
1 char *data
1 char *buffer
1 char *buf
1 bool zeropage
1 bool unfalloc
1 bool remount
1 bool huge
1 bool excl
| {
"content_hash": "799b611299983a3310209fe1feac8515",
"timestamp": "",
"source": "github",
"line_count": 269,
"max_line_length": 191,
"avg_line_length": 53.89591078066915,
"alnum_prop": 0.7332735549730998,
"repo_name": "liker12134/my-kernel-dev-environment",
"id": "a5dd71b4a6b4f257ea6775a2e4839d3e41a276bb",
"size": "14499",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "code-readpad/kernel-schema/mm_shmem.c.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1970671"
},
{
"name": "Shell",
"bytes": "8111"
}
],
"symlink_target": ""
} |
package com.tests;
import java.io.File;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.testng.annotations.AfterSuite;
import org.testng.annotations.BeforeTest;
import com.tests.logic.ApplicationManager;
import com.tests.logic1.ApplicationManager1;
import com.tests.model.Ride;
import com.tests.model.User;
public class TestBase {
static protected ApplicationManager app;
//@BeforeTest
public void setUp() throws Exception {
Properties properties = new Properties();
String configFile = System.getProperty("configFile",
"applicationChrome.properties");
properties.load(new FileReader(new File(configFile)));
app = new ApplicationManager1(properties);
app.getNavigationHelper().openLoginPage(properties.getProperty("baseUrl"));
}
//@AfterSuite
public void tearDown() throws Exception {
app.stop();
}
public static List<Object[]> wrapUsersForDataProvider(List<User> users) {
List<Object[]> list = new ArrayList<Object[]>();
for (User user : users){
list.add(new Object[]{user});
}
return list;
}
public static List<Object[]> wrapRidesForDataProvider(List<Ride> rides) {
List<Object[]> list1 = new ArrayList<Object[]>();
for (Ride ride : rides){
list1.add(new Object[]{ride});
}
return list1;
}
} | {
"content_hash": "26cf6ab75c9d04dab8250977880c4a2c",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 80,
"avg_line_length": 25.51851851851852,
"alnum_prop": 0.7104499274310595,
"repo_name": "makarkina/JavaForTesters",
"id": "cdb4844859fca383f916478c991c739f56ebb054",
"size": "1378",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ride-exchange-tests-logic/src/com/tests/TestBase.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "462"
},
{
"name": "CSS",
"bytes": "5640"
},
{
"name": "HTML",
"bytes": "25683"
},
{
"name": "Java",
"bytes": "165303"
},
{
"name": "JavaScript",
"bytes": "3555"
}
],
"symlink_target": ""
} |
package com.shejiaomao.weibo.db;
import java.util.Date;
import java.util.List;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import com.cattong.commons.ServiceProvider;
import com.cattong.commons.util.StringUtil;
import com.cattong.entity.Comment;
import com.cattong.entity.Status;
import com.cattong.entity.User;
import com.shejiaomao.weibo.common.StatusCatalog;
public class CommentDao extends BaseDao<Comment> {
private static final String TABLE = "Comment";
private UserDao userDao;
private StatusDao statusDao;
public CommentDao(Context context) {
super(context);
userDao = new UserDao(context);
statusDao = new StatusDao(context);
}
public void save(Comment comment, LocalAccount account) {
if (isNull(comment)) {
return;
}
SQLiteDatabase sqLiteDatabase = dbHelper.getWritableDatabase();
sqLiteDatabase.beginTransaction();
try {
save(sqLiteDatabase, comment, account);
sqLiteDatabase.setTransactionSuccessful();
} finally {
sqLiteDatabase.endTransaction();
}
}
public void batchSave(List<Comment> comments, LocalAccount account) {
if (isNull(comments) || isNull(account)) {
return;
}
SQLiteDatabase sqLiteDatabase = dbHelper.getWritableDatabase();
sqLiteDatabase.beginTransaction();
try {
for (Comment comment : comments) {
save(sqLiteDatabase, comment, account);
}
sqLiteDatabase.setTransactionSuccessful();
} finally {
sqLiteDatabase.endTransaction();
}
}
void save(SQLiteDatabase sqLiteDatabase, Comment comment, LocalAccount account) {
if (isNull(comment)) {
return;
}
ContentValues values = new ContentValues();
values.put("Comment_ID", comment.getCommentId());
values.put("Created_At", comment.getCreatedAt() == null ? 0 : comment.getCreatedAt().getTime());
values.put("Text", comment.getText());
values.put("Source", comment.getSource());
values.put("Is_Truncated", comment.isTruncated() ? 1 : 0);
values.put("Is_Favorated", comment.isFavorited() ? 1 : 0);
values.put("Service_Provider", comment.getServiceProvider().getSpNo());
values.put("Account_ID", account == null ? -1 : account.getAccountId());
if (comment instanceof LocalComment) {
values.put("Is_Divider", ((LocalComment)comment).isDivider() ? 1 : 0);
} else {
values.put("Is_Divider", 0);
}
if (comment.getReplyToComment() != null) {
save(sqLiteDatabase, comment.getReplyToComment(), null);
values.put("In_Reply_To_Comment_ID", comment.getReplyToComment().getCommentId());
}
if (comment.getReplyToStatus() != null) {
statusDao.save(sqLiteDatabase, comment.getReplyToStatus(), StatusCatalog.Others, null);
values.put("In_Reply_To_Status_ID", comment.getReplyToStatus().getStatusId());
}
if (comment.getUser() != null) {
userDao.save(sqLiteDatabase, comment.getUser());
values.put("User_ID", comment.getUser().getUserId());
}
sqLiteDatabase.replace(TABLE, null, values);
}
public int delete(Comment comment, LocalAccount account) {
if (isNull(comment) || isNull(account)) {
return -1;
}
SQLiteDatabase sqLiteDatabase = dbHelper.getWritableDatabase();
int rowsAffected = sqLiteDatabase.delete(TABLE, "Comment_ID = '" + comment.getCommentId() + "' and Account_ID = "
+ account.getAccountId(), null);
return rowsAffected;
}
public int delete(LocalAccount account) {
if (isNull(account)) {
return -1;
}
SQLiteDatabase sqLiteDatabase = dbHelper.getWritableDatabase();
int rowsAffected = sqLiteDatabase.delete(TABLE, "Account_ID = " + account.getAccountId(), null);
return rowsAffected;
}
public Comment findById(String commentId, ServiceProvider sp, boolean isReplyTo) {
if (isNull(commentId) || isNull(sp)) {
return null;
}
SQLiteDatabase sqLiteDatabase = dbHelper.getWritableDatabase();
return findById(sqLiteDatabase, commentId, sp, isReplyTo);
}
Comment findById(SQLiteDatabase sqLiteDatabase , String commentId, ServiceProvider sp, boolean isReplyTo) {
String sql = "select * from Comment where Comment_ID = '" + commentId + "' and Service_Provider = "
+ sp.getSpNo();
if (isReplyTo) {
sql += " and Account_ID = -1";
}
return this.query(sqLiteDatabase, sql);
}
@Override
public Comment extractData(SQLiteDatabase sqLiteDatabase, Cursor cursor) {
LocalComment comment = new LocalComment();
comment.setCommentId(cursor.getString(cursor.getColumnIndex("Comment_ID")));
long time = cursor.getLong(cursor.getColumnIndex("Created_At"));
if (time > 0) {
comment.setCreatedAt(new Date(time));
}
comment.setText(cursor.getString(cursor.getColumnIndex("Text")));
comment.setSource(cursor.getString(cursor.getColumnIndex("Source")));
comment.setFavorited(1 == cursor.getInt(cursor.getColumnIndex("Is_Favorated")));
comment.setTruncated(1 == cursor.getInt(cursor.getColumnIndex("Is_Truncated")));
comment.setDivider(1 == cursor.getInt(cursor.getColumnIndex("Is_Divider")));
int sp = cursor.getInt(cursor.getColumnIndex("Service_Provider"));
comment.setServiceProvider(ServiceProvider.getServiceProvider(sp));
String userId = cursor.getString(cursor.getColumnIndex("User_ID"));
if (StringUtil.isNotEmpty(userId)) {
User user = (User) userDao.findById(sqLiteDatabase, userId, comment.getServiceProvider());
comment.setUser(user);
}
String statusId = cursor.getString(cursor.getColumnIndex("In_Reply_To_Status_ID"));
if (statusId != null) {
Status inReplyToStatus = statusDao.findById(sqLiteDatabase, statusId, comment.getServiceProvider(), true);
comment.setReplyToStatus(inReplyToStatus);
}
String commentId = cursor.getString(cursor.getColumnIndex("In_Reply_To_Comment_ID"));
if (commentId != null) {
Comment inReplyToComment = findById(sqLiteDatabase, commentId, comment.getServiceProvider(), true);
comment.setReplyToComment(inReplyToComment);
}
return comment;
}
}
| {
"content_hash": "06be6d9460bdc29a53be9af3015d7358",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 115,
"avg_line_length": 33.486338797814206,
"alnum_prop": 0.7087140992167101,
"repo_name": "cattong/YiBo",
"id": "d14696bfae8151ed6f76ce4e22c3fbf8ec7c64fe",
"size": "6128",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "YiBo/src/com/shejiaomao/weibo/db/CommentDao.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "2746446"
}
],
"symlink_target": ""
} |
package edu.uncc.parsets.data;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.zip.Deflater;
import java.util.zip.GZIPOutputStream;
import org.json.simple.JSONValue;
import edu.uncc.parsets.data.LocalDB.DBAccess;
import edu.uncc.parsets.util.PSLogging;
/**
* Export {@link DataSet}s to a streaming-friendly JSON format.
*/
public class JSONExport {
/**
* The version of the JSON data produced by this class. The idea is to
* treat 1000 as 1.000, 1100 as 1.1, etc. Minor revisions that don't break
* compatibility in the reader increment the last two digits. A revision
* that requires an update of the reader adds 100. A change in the first
* digit would mean a complete revamp of the data model.
*/
public static final int JSONVERSION = 2000;
public static final int JSONMAXCOMPATIBLEVERSION = 2099;
public static final String METAKEY = "%meta";
public static final String DATASETSKEY = "datasets";
public static String exportDataSet(LocalDBDataSet ds, String fileName) {
Map<String, Object> dataset = new LinkedHashMap<String, Object>();
Map<String, Object> meta = new LinkedHashMap<String, Object>();
meta.put("version", JSONExport.JSONVERSION);
meta.put("type", "cube");
List<Map<String, Object>> tables = new Vector<Map<String, Object>>();
meta.put("tables", tables);
List<Map<String, Object>> indices = new Vector<Map<String, Object>>();
meta.put("indices", indices);
dataset.put(METAKEY, meta);
Map<String, List<List<Object>>> data = new LinkedHashMap<String, List<List<Object>>>();
dataset.put("data", data);
try {
exportTable(data, ds.getDB(), "Admin_Datasets", "handle", ds.getHandle());
exportTable(data, ds.getDB(), "Admin_Dimensions", "dataset", ds.getHandle());
exportTable(data, ds.getDB(), "Admin_Categories", "dataset", ds.getHandle());
PreparedStatement stmt = ds.getDB().prepareStatement("select name, sql, type from sqlite_master where name like \""+ds.getHandle()+"%\";", DBAccess.FORREADING);
ResultSet rs = stmt.executeQuery();
while (rs.next()) {
if (rs.getString("type").equals("table")) {
tables.add(create2JSON(rs.getString("sql")));
String tableName = rs.getString("name");
exportTable(data, ds.getDB(), tableName, null, null);
} else if (rs.getString("sql") != null) {
indices.add(index2JSON(rs.getString("sql")));
}
}
return writeGZIPFile(fileName, dataset);
} catch (Exception e) {
PSLogging.logger.error("Could not export dataset as JSON file.", e);
} finally {
ds.getDB().releaseReadLock();
}
return null;
}
private static void exportTable(Map<String, List<List<Object>>> container, LocalDB db, String tableName, String key, String value) {
List<List<Object>> data = new ArrayList<List<Object>>();
try {
String sql = "select * from "+tableName;
if (key != null)
sql += " where "+key+"='"+value+"';";
else
sql += ";";
PreparedStatement stmt = db.prepareStatement(sql, DBAccess.FORREADING);
ResultSet rs = stmt.executeQuery();
boolean firstRow = true;
int numColumns = 0;
while (rs.next()) {
if (firstRow) {
numColumns = rs.getMetaData().getColumnCount();
List<Object> row = new ArrayList<Object>(numColumns);
for (int column = 1; column <= numColumns; column++) {
switch (rs.getMetaData().getColumnType(column)) {
case Types.INTEGER:
row.add("INTEGER");
break;
case Types.FLOAT:
row.add("REAL");
break;
case Types.NULL: // null can only be an empty text field
case Types.VARCHAR:
row.add("TEXT");
break;
default:
PSLogging.logger.error("Encountered unknown column type: "+rs.getMetaData().getColumnType(column)+" in table "+tableName);
}
}
data.add(row);
firstRow = false;
}
List<Object> row = new ArrayList<Object>(numColumns);
for (int column = 1; column <= numColumns; column++) {
switch (rs.getMetaData().getColumnType(column)) {
case Types.INTEGER:
row.add(rs.getLong(column));
break;
case Types.FLOAT:
row.add(rs.getFloat(column));
break;
case Types.VARCHAR:
row.add(rs.getString(column));
break;
case Types.NULL:
row.add(null);
break;
}
}
data.add(row);
}
container.put(tableName, data);
} catch (SQLException e) {
PSLogging.logger.error("Could not query table "+tableName+".", e);
} finally {
db.releaseReadLock();
}
}
private static Map<String, Object> create2JSON(String sql) {
Map<String, Object> json = new LinkedHashMap<String, Object>();
// CREATE TABLE Admin_Dimensions (dataSet TEXT, name TEXT, handle TEXT, type TEXT, leftShift INTEGER, bitMask INTEGER)
// CREATE TABLE householdsal_measures (key INTEGER, numpeople REAL, numvehicles REAL, costelectricity REAL, costgas REAL, costwater REAL, costoil REAL, rent REAL, mortgage REAL, mortgage2 REAL, rentaspercentage REAL, employment REAL, experience REAL, totalincome REAL)
String firstSplit[] = sql.split("\\(");
// extract table name from "CREATE TABLE <tablename>"
String create[] = firstSplit[0].split(" ");
json.put("tableName", create[2]);
// fields are "<name>", "<type>," pairs, last one ends in ")" instead of comma
String columnNames[] = firstSplit[1].split(" ");
List<List<String>> columns = new ArrayList<List<String>>(columnNames.length/2);
for (int i = 0; i < columnNames.length; i += 2) {
List<String> pair = new ArrayList<String>(2);
pair.add(columnNames[i]);
pair.add(columnNames[i+1].substring(0, columnNames[i+1].length()-1));
columns.add(pair);
}
json.put("columns", columns);
return json;
}
private static Map<String, Object> index2JSON(String sql) {
Map<String, Object> json = new LinkedHashMap<String, Object>();
String tokens[] = sql.split(" ");
// CREATE INDEX Admin_Dimensions_Handle on Admin_Dimensions (dataSet)
// CREATE INDEX Admin_Categories_DSHandle on Admin_Categories (dataSet, dimension)
// skip "CREATE" and "INDEX"
json.put("indexName", tokens[2]);
// skip "on"
json.put("tableName", tokens[4]);
// now remove parentheses and commas from rest
List<String> columns = new ArrayList<String>(tokens.length-5);
for (int i = 5; i < tokens.length; i++) {
String column = tokens[i].substring(0, tokens[i].length()-1);
if (i == 5)
column = column.substring(1);
columns.add(column);
}
json.put("columns", columns);
return json;
}
public static String exportDBIndex(LocalDB db, String fileName) {
Map<String, Object> index = new LinkedHashMap<String, Object>();
Map<String, Object> meta = new LinkedHashMap<String, Object>();
meta.put("type", "index");
meta.put("version", JSONExport.JSONVERSION);
index.put(METAKEY, meta);
Map<String, Map<String, Object>> dsList = new LinkedHashMap<String, Map<String,Object>>();
for (DataSet ds : db.getDataSets()) {
Map<String, Object> dataSet = new LinkedHashMap<String, Object>();
dataSet.put("name", ds.getName());
dataSet.put("section", ds.getSection());
dataSet.put("source", ((LocalDBDataSet)ds).getSource());
dataSet.put("srcURL", ((LocalDBDataSet)ds).getSrcURL());
dataSet.put("items", ds.getNumRecords());
dataSet.put("categorical", ds.getNumCategoricalDimensions());
dataSet.put("numerical", ds.getNumNumericDimensions());
dataSet.put("url", ds.getURL());
dsList.put(ds.getHandle(), dataSet);
}
index.put(DATASETSKEY, dsList);
return writeGZIPFile(fileName, index);
}
private static String writeGZIPFile(String fileName, Map<String, Object> dataset) {
if (!fileName.endsWith(".json.gz"))
fileName += ".json.gz";
try {
// found this trick on http://weblogs.java.net/blog/mister__m/archive/2003/12/achieving_bette.html
GZIPOutputStream outStream = new GZIPOutputStream(new FileOutputStream(fileName)) {
{
def.setLevel(Deflater.BEST_COMPRESSION);
}
};
OutputStreamWriter outWriter = new OutputStreamWriter(outStream);
JSONValue.writeJSONString(dataset, outWriter);
outWriter.flush();
outWriter.close();
return fileName;
} catch (Exception e) {
PSLogging.logger.error("Error writing to gzipped file "+fileName+".", e);
}
return null;
}
}
| {
"content_hash": "e463b0d71b1d60ba8ea1bf303b28c15d",
"timestamp": "",
"source": "github",
"line_count": 228,
"max_line_length": 270,
"avg_line_length": 37.280701754385966,
"alnum_prop": 0.6809411764705883,
"repo_name": "dvonpasecky/parsets",
"id": "b3cee7ea756e8b3ab99a56f1b55ce5277c8d3548",
"size": "10284",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "edu/uncc/parsets/data/JSONExport.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "345793"
},
{
"name": "NSIS",
"bytes": "5995"
}
],
"symlink_target": ""
} |
package msgraph
// GiphyRatingType undocumented
type GiphyRatingType string
const (
// GiphyRatingTypeVStrict undocumented
GiphyRatingTypeVStrict GiphyRatingType = "strict"
// GiphyRatingTypeVModerate undocumented
GiphyRatingTypeVModerate GiphyRatingType = "moderate"
// GiphyRatingTypeVUnknownFutureValue undocumented
GiphyRatingTypeVUnknownFutureValue GiphyRatingType = "unknownFutureValue"
)
var (
// GiphyRatingTypePStrict is a pointer to GiphyRatingTypeVStrict
GiphyRatingTypePStrict = &_GiphyRatingTypePStrict
// GiphyRatingTypePModerate is a pointer to GiphyRatingTypeVModerate
GiphyRatingTypePModerate = &_GiphyRatingTypePModerate
// GiphyRatingTypePUnknownFutureValue is a pointer to GiphyRatingTypeVUnknownFutureValue
GiphyRatingTypePUnknownFutureValue = &_GiphyRatingTypePUnknownFutureValue
)
var (
_GiphyRatingTypePStrict = GiphyRatingTypeVStrict
_GiphyRatingTypePModerate = GiphyRatingTypeVModerate
_GiphyRatingTypePUnknownFutureValue = GiphyRatingTypeVUnknownFutureValue
)
| {
"content_hash": "706603bdf39610fa5ab4e71e588d929e",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 89,
"avg_line_length": 36.714285714285715,
"alnum_prop": 0.8424124513618677,
"repo_name": "42wim/matterbridge",
"id": "b1a207360df67adac69693cb178c43e3ea419824",
"size": "1078",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vendor/github.com/yaegashi/msgraph.go/beta/EnumGiphy.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2029"
},
{
"name": "Go",
"bytes": "456275"
},
{
"name": "Shell",
"bytes": "513"
}
],
"symlink_target": ""
} |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.customerinsights.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
/** Property definition. */
@Fluent
public final class PropertyDefinition {
@JsonIgnore private final ClientLogger logger = new ClientLogger(PropertyDefinition.class);
/*
* Array value separator for properties with isArray set.
*/
@JsonProperty(value = "arrayValueSeparator")
private String arrayValueSeparator;
/*
* Describes valid values for an enum property.
*/
@JsonProperty(value = "enumValidValues")
private List<ProfileEnumValidValuesFormat> enumValidValues;
/*
* Name of the property.
*/
@JsonProperty(value = "fieldName", required = true)
private String fieldName;
/*
* Type of the property.
*/
@JsonProperty(value = "fieldType", required = true)
private String fieldType;
/*
* Indicates if the property is actually an array of the fieldType above on
* the data api.
*/
@JsonProperty(value = "isArray")
private Boolean isArray;
/*
* Indicates if the property is an enum.
*/
@JsonProperty(value = "isEnum")
private Boolean isEnum;
/*
* Indicates if the property is an flag enum.
*/
@JsonProperty(value = "isFlagEnum")
private Boolean isFlagEnum;
/*
* Whether the property is an Image.
*/
@JsonProperty(value = "isImage")
private Boolean isImage;
/*
* Whether the property is a localized string.
*/
@JsonProperty(value = "isLocalizedString")
private Boolean isLocalizedString;
/*
* Whether the property is a name or a part of name.
*/
@JsonProperty(value = "isName")
private Boolean isName;
/*
* Whether property value is required on instances, IsRequired field only
* for Interaction. Profile Instance will not check for required field.
*/
@JsonProperty(value = "isRequired")
private Boolean isRequired;
/*
* The ID associated with the property.
*/
@JsonProperty(value = "propertyId")
private String propertyId;
/*
* URL encoded schema.org item prop link for the property.
*/
@JsonProperty(value = "schemaItemPropLink")
private String schemaItemPropLink;
/*
* Max length of string. Used only if type is string.
*/
@JsonProperty(value = "maxLength")
private Integer maxLength;
/*
* Whether property is available in graph or not.
*/
@JsonProperty(value = "isAvailableInGraph")
private Boolean isAvailableInGraph;
/*
* This is specific to interactions modeled as activities. Data sources are
* used to determine where data is stored and also in precedence rules.
*/
@JsonProperty(value = "dataSourcePrecedenceRules", access = JsonProperty.Access.WRITE_ONLY)
private List<DataSourcePrecedence> dataSourcePrecedenceRules;
/**
* Get the arrayValueSeparator property: Array value separator for properties with isArray set.
*
* @return the arrayValueSeparator value.
*/
public String arrayValueSeparator() {
return this.arrayValueSeparator;
}
/**
* Set the arrayValueSeparator property: Array value separator for properties with isArray set.
*
* @param arrayValueSeparator the arrayValueSeparator value to set.
* @return the PropertyDefinition object itself.
*/
public PropertyDefinition withArrayValueSeparator(String arrayValueSeparator) {
this.arrayValueSeparator = arrayValueSeparator;
return this;
}
/**
* Get the enumValidValues property: Describes valid values for an enum property.
*
* @return the enumValidValues value.
*/
public List<ProfileEnumValidValuesFormat> enumValidValues() {
return this.enumValidValues;
}
/**
* Set the enumValidValues property: Describes valid values for an enum property.
*
* @param enumValidValues the enumValidValues value to set.
* @return the PropertyDefinition object itself.
*/
public PropertyDefinition withEnumValidValues(List<ProfileEnumValidValuesFormat> enumValidValues) {
this.enumValidValues = enumValidValues;
return this;
}
/**
* Get the fieldName property: Name of the property.
*
* @return the fieldName value.
*/
public String fieldName() {
return this.fieldName;
}
/**
* Set the fieldName property: Name of the property.
*
* @param fieldName the fieldName value to set.
* @return the PropertyDefinition object itself.
*/
public PropertyDefinition withFieldName(String fieldName) {
this.fieldName = fieldName;
return this;
}
/**
* Get the fieldType property: Type of the property.
*
* @return the fieldType value.
*/
public String fieldType() {
return this.fieldType;
}
/**
* Set the fieldType property: Type of the property.
*
* @param fieldType the fieldType value to set.
* @return the PropertyDefinition object itself.
*/
public PropertyDefinition withFieldType(String fieldType) {
this.fieldType = fieldType;
return this;
}
/**
* Get the isArray property: Indicates if the property is actually an array of the fieldType above on the data api.
*
* @return the isArray value.
*/
public Boolean isArray() {
return this.isArray;
}
/**
* Set the isArray property: Indicates if the property is actually an array of the fieldType above on the data api.
*
* @param isArray the isArray value to set.
* @return the PropertyDefinition object itself.
*/
public PropertyDefinition withIsArray(Boolean isArray) {
this.isArray = isArray;
return this;
}
/**
* Get the isEnum property: Indicates if the property is an enum.
*
* @return the isEnum value.
*/
public Boolean isEnum() {
return this.isEnum;
}
/**
* Set the isEnum property: Indicates if the property is an enum.
*
* @param isEnum the isEnum value to set.
* @return the PropertyDefinition object itself.
*/
public PropertyDefinition withIsEnum(Boolean isEnum) {
this.isEnum = isEnum;
return this;
}
/**
* Get the isFlagEnum property: Indicates if the property is an flag enum.
*
* @return the isFlagEnum value.
*/
public Boolean isFlagEnum() {
return this.isFlagEnum;
}
/**
* Set the isFlagEnum property: Indicates if the property is an flag enum.
*
* @param isFlagEnum the isFlagEnum value to set.
* @return the PropertyDefinition object itself.
*/
public PropertyDefinition withIsFlagEnum(Boolean isFlagEnum) {
this.isFlagEnum = isFlagEnum;
return this;
}
/**
* Get the isImage property: Whether the property is an Image.
*
* @return the isImage value.
*/
public Boolean isImage() {
return this.isImage;
}
/**
* Set the isImage property: Whether the property is an Image.
*
* @param isImage the isImage value to set.
* @return the PropertyDefinition object itself.
*/
public PropertyDefinition withIsImage(Boolean isImage) {
this.isImage = isImage;
return this;
}
/**
* Get the isLocalizedString property: Whether the property is a localized string.
*
* @return the isLocalizedString value.
*/
public Boolean isLocalizedString() {
return this.isLocalizedString;
}
/**
* Set the isLocalizedString property: Whether the property is a localized string.
*
* @param isLocalizedString the isLocalizedString value to set.
* @return the PropertyDefinition object itself.
*/
public PropertyDefinition withIsLocalizedString(Boolean isLocalizedString) {
this.isLocalizedString = isLocalizedString;
return this;
}
/**
* Get the isName property: Whether the property is a name or a part of name.
*
* @return the isName value.
*/
public Boolean isName() {
return this.isName;
}
/**
* Set the isName property: Whether the property is a name or a part of name.
*
* @param isName the isName value to set.
* @return the PropertyDefinition object itself.
*/
public PropertyDefinition withIsName(Boolean isName) {
this.isName = isName;
return this;
}
/**
* Get the isRequired property: Whether property value is required on instances, IsRequired field only for
* Interaction. Profile Instance will not check for required field.
*
* @return the isRequired value.
*/
public Boolean isRequired() {
return this.isRequired;
}
/**
* Set the isRequired property: Whether property value is required on instances, IsRequired field only for
* Interaction. Profile Instance will not check for required field.
*
* @param isRequired the isRequired value to set.
* @return the PropertyDefinition object itself.
*/
public PropertyDefinition withIsRequired(Boolean isRequired) {
this.isRequired = isRequired;
return this;
}
/**
* Get the propertyId property: The ID associated with the property.
*
* @return the propertyId value.
*/
public String propertyId() {
return this.propertyId;
}
/**
* Set the propertyId property: The ID associated with the property.
*
* @param propertyId the propertyId value to set.
* @return the PropertyDefinition object itself.
*/
public PropertyDefinition withPropertyId(String propertyId) {
this.propertyId = propertyId;
return this;
}
/**
* Get the schemaItemPropLink property: URL encoded schema.org item prop link for the property.
*
* @return the schemaItemPropLink value.
*/
public String schemaItemPropLink() {
return this.schemaItemPropLink;
}
/**
* Set the schemaItemPropLink property: URL encoded schema.org item prop link for the property.
*
* @param schemaItemPropLink the schemaItemPropLink value to set.
* @return the PropertyDefinition object itself.
*/
public PropertyDefinition withSchemaItemPropLink(String schemaItemPropLink) {
this.schemaItemPropLink = schemaItemPropLink;
return this;
}
/**
* Get the maxLength property: Max length of string. Used only if type is string.
*
* @return the maxLength value.
*/
public Integer maxLength() {
return this.maxLength;
}
/**
* Set the maxLength property: Max length of string. Used only if type is string.
*
* @param maxLength the maxLength value to set.
* @return the PropertyDefinition object itself.
*/
public PropertyDefinition withMaxLength(Integer maxLength) {
this.maxLength = maxLength;
return this;
}
/**
* Get the isAvailableInGraph property: Whether property is available in graph or not.
*
* @return the isAvailableInGraph value.
*/
public Boolean isAvailableInGraph() {
return this.isAvailableInGraph;
}
/**
* Set the isAvailableInGraph property: Whether property is available in graph or not.
*
* @param isAvailableInGraph the isAvailableInGraph value to set.
* @return the PropertyDefinition object itself.
*/
public PropertyDefinition withIsAvailableInGraph(Boolean isAvailableInGraph) {
this.isAvailableInGraph = isAvailableInGraph;
return this;
}
/**
* Get the dataSourcePrecedenceRules property: This is specific to interactions modeled as activities. Data sources
* are used to determine where data is stored and also in precedence rules.
*
* @return the dataSourcePrecedenceRules value.
*/
public List<DataSourcePrecedence> dataSourcePrecedenceRules() {
return this.dataSourcePrecedenceRules;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
if (enumValidValues() != null) {
enumValidValues().forEach(e -> e.validate());
}
if (fieldName() == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException("Missing required property fieldName in model PropertyDefinition"));
}
if (fieldType() == null) {
throw logger
.logExceptionAsError(
new IllegalArgumentException("Missing required property fieldType in model PropertyDefinition"));
}
if (dataSourcePrecedenceRules() != null) {
dataSourcePrecedenceRules().forEach(e -> e.validate());
}
}
}
| {
"content_hash": "04e80afd1f5f9824c62511b9c7f2ec9b",
"timestamp": "",
"source": "github",
"line_count": 452,
"max_line_length": 119,
"avg_line_length": 29.52433628318584,
"alnum_prop": 0.6514050206069689,
"repo_name": "Azure/azure-sdk-for-java",
"id": "4a8407a50cce90390dcd82322d62cd5efa605a7a",
"size": "13345",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/customerinsights/azure-resourcemanager-customerinsights/src/main/java/com/azure/resourcemanager/customerinsights/models/PropertyDefinition.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "8762"
},
{
"name": "Bicep",
"bytes": "15055"
},
{
"name": "CSS",
"bytes": "7676"
},
{
"name": "Dockerfile",
"bytes": "2028"
},
{
"name": "Groovy",
"bytes": "3237482"
},
{
"name": "HTML",
"bytes": "42090"
},
{
"name": "Java",
"bytes": "432409546"
},
{
"name": "JavaScript",
"bytes": "36557"
},
{
"name": "Jupyter Notebook",
"bytes": "95868"
},
{
"name": "PowerShell",
"bytes": "737517"
},
{
"name": "Python",
"bytes": "240542"
},
{
"name": "Scala",
"bytes": "1143898"
},
{
"name": "Shell",
"bytes": "18488"
},
{
"name": "XSLT",
"bytes": "755"
}
],
"symlink_target": ""
} |
package de.stephanlindauer.criticalmaps.overlays;
import org.osmdroid.ResourceProxy;
import org.osmdroid.bonuspack.overlays.Marker;
import org.osmdroid.views.MapView;
public class LocationMarker extends Marker {
public LocationMarker(MapView mapView, ResourceProxy resourceProxy) {
super(mapView, resourceProxy);
setAnchor(Marker.ANCHOR_CENTER, Marker.ANCHOR_CENTER);
setInfoWindow(null);
}
}
| {
"content_hash": "74b49d46c95eee0b01ec42a537be3a92",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 73,
"avg_line_length": 30.571428571428573,
"alnum_prop": 0.7663551401869159,
"repo_name": "stephanlindauer/criticalmaps-android",
"id": "0c690b4bbdcc8fb17744767f740edaedb56b9359",
"size": "428",
"binary": false,
"copies": "1",
"ref": "refs/heads/feature_sternfahrt",
"path": "app/src/main/java/de/stephanlindauer/criticalmaps/overlays/LocationMarker.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "105030"
}
],
"symlink_target": ""
} |
namespace atom {
class WindowStateWatcher : public ui::PlatformEventObserver {
public:
explicit WindowStateWatcher(NativeWindowViews* window);
virtual ~WindowStateWatcher();
protected:
// ui::PlatformEventObserver:
void WillProcessEvent(const ui::PlatformEvent& event) override;
void DidProcessEvent(const ui::PlatformEvent& event) override;
private:
bool IsWindowStateEvent(const ui::PlatformEvent& event);
NativeWindowViews* window_;
gfx::AcceleratedWidget widget_;
bool was_minimized_;
bool was_maximized_;
DISALLOW_COPY_AND_ASSIGN(WindowStateWatcher);
};
} // namespace atom
#endif // ATOM_BROWSER_UI_X_WINDOW_STATE_WATCHER_H_
| {
"content_hash": "01faaefec0d7c19856bc93c331f4a084",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 65,
"avg_line_length": 24.666666666666668,
"alnum_prop": 0.7612612612612613,
"repo_name": "thomsonreuters/electron",
"id": "376e028e1a77ecf76506775bd57d700c54402d22",
"size": "1007",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "atom/browser/ui/x/window_state_watcher.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "4499"
},
{
"name": "C++",
"bytes": "2795707"
},
{
"name": "HTML",
"bytes": "17392"
},
{
"name": "JavaScript",
"bytes": "865416"
},
{
"name": "Objective-C",
"bytes": "53373"
},
{
"name": "Objective-C++",
"bytes": "310336"
},
{
"name": "PowerShell",
"bytes": "99"
},
{
"name": "Python",
"bytes": "216426"
},
{
"name": "Shell",
"bytes": "3880"
}
],
"symlink_target": ""
} |
namespace Serenity.Web
{
internal static partial class Texts
{
public static partial class Controls
{
public static class ColumnPickerDialog
{
public static LocalText HiddenColumns = "Hidden Columns";
public static LocalText HideHint = "hide";
public static LocalText RestoreDefaults = "Restore Defaults";
public static LocalText ShowHint = "show";
public static LocalText Title = "Column Picker";
public static LocalText VisibleColumns = "Visible Columns";
}
}
}
} | {
"content_hash": "9351a48ff80a020cace645f650dac66b",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 77,
"avg_line_length": 36.166666666666664,
"alnum_prop": 0.5668202764976958,
"repo_name": "volkanceylan/Serenity",
"id": "6325876db2b1e05238bb67301d1f75e1af0a13aa",
"size": "653",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/Serenity.Net.Web/Texts/Texts.Controls.ColumnPickerDialog.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1592"
},
{
"name": "C#",
"bytes": "3328213"
},
{
"name": "CSS",
"bytes": "198506"
},
{
"name": "HTML",
"bytes": "2818"
},
{
"name": "JavaScript",
"bytes": "638940"
},
{
"name": "Roff",
"bytes": "11586"
},
{
"name": "Shell",
"bytes": "287"
},
{
"name": "Smalltalk",
"bytes": "290"
},
{
"name": "TSQL",
"bytes": "1592"
},
{
"name": "TypeScript",
"bytes": "804757"
},
{
"name": "XSLT",
"bytes": "17702"
}
],
"symlink_target": ""
} |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.github.nppes4j.entity;
import java.util.Objects;
/**
*
* @author Roberto C. Benitez
*/
public class Taxonomy
{
private String code;
private String license;
private String state;
private boolean primary;
private String description;
public Taxonomy()
{
}
public String getCode()
{
return code;
}
public void setCode(String code)
{
this.code = code;
}
public String getLicense()
{
return license;
}
public void setLicense(String license)
{
this.license = license;
}
public String getState()
{
return state;
}
public void setState(String state)
{
this.state = state;
}
public boolean isPrimary()
{
return primary;
}
public void setPrimary(boolean primary)
{
this.primary = primary;
}
public String getDescription()
{
return description;
}
public void setDescription(String description)
{
this.description = description;
}
@Override
public int hashCode()
{
int hash = 7;
hash = 29 * hash + Objects.hashCode(this.code);
hash = 29 * hash + Objects.hashCode(this.license);
hash = 29 * hash + Objects.hashCode(this.state);
hash = 29 * hash + (this.primary ? 1 : 0);
hash = 29 * hash + Objects.hashCode(this.description);
return hash;
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
{
return true;
}
if (obj == null)
{
return false;
}
if (getClass() != obj.getClass())
{
return false;
}
final Taxonomy other = (Taxonomy) obj;
if (this.primary != other.primary)
{
return false;
}
if (!Objects.equals(this.code, other.code))
{
return false;
}
if (!Objects.equals(this.license, other.license))
{
return false;
}
if (!Objects.equals(this.state, other.state))
{
return false;
}
if (!Objects.equals(this.description, other.description))
{
return false;
}
return true;
}
@Override
public String toString()
{
return "Taxonomy{" + "code=" + code + ", license=" + license + ", state=" + state + ", primary=" + primary + ", description=" + description + '}';
}
}
| {
"content_hash": "e1b50cec88d14a9ca7b50bb268d1a48b",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 154,
"avg_line_length": 21.51127819548872,
"alnum_prop": 0.5106606081789584,
"repo_name": "rbenitez22/nppes4j",
"id": "5a599777e25d258c937522937e9723555480379b",
"size": "2861",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nppes4j/src/main/java/com/github/nppes4j/entity/Taxonomy.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "65564"
}
],
"symlink_target": ""
} |
package kvledger
import (
"fmt"
"strconv"
"testing"
"github.com/hyperledger/fabric/common/ledger/testutil"
ledgerpackage "github.com/hyperledger/fabric/core/ledger"
"github.com/hyperledger/fabric/core/ledger/ledgerconfig"
ledgertestutil "github.com/hyperledger/fabric/core/ledger/testutil"
"github.com/hyperledger/fabric/protos/common"
putils "github.com/hyperledger/fabric/protos/utils"
"github.com/stretchr/testify/assert"
)
func TestKVLedgerBlockStorage(t *testing.T) {
env := newTestEnv(t)
defer env.cleanup()
provider, _ := NewProvider()
defer provider.Close()
ledger, _ := provider.Create("testLedger")
defer ledger.Close()
bcInfo, _ := ledger.GetBlockchainInfo()
testutil.AssertEquals(t, bcInfo, &common.BlockchainInfo{
Height: 0, CurrentBlockHash: nil, PreviousBlockHash: nil})
simulator, _ := ledger.NewTxSimulator()
simulator.SetState("ns1", "key1", []byte("value1"))
simulator.SetState("ns1", "key2", []byte("value2"))
simulator.SetState("ns1", "key3", []byte("value3"))
simulator.Done()
simRes, _ := simulator.GetTxSimulationResults()
bg := testutil.NewBlockGenerator(t)
block0 := bg.NextBlock([][]byte{simRes}, false)
ledger.Commit(block0)
bcInfo, _ = ledger.GetBlockchainInfo()
block1Hash := block0.Header.Hash()
testutil.AssertEquals(t, bcInfo, &common.BlockchainInfo{
Height: 1, CurrentBlockHash: block1Hash, PreviousBlockHash: []byte{}})
simulator, _ = ledger.NewTxSimulator()
simulator.SetState("ns1", "key1", []byte("value4"))
simulator.SetState("ns1", "key2", []byte("value5"))
simulator.SetState("ns1", "key3", []byte("value6"))
simulator.Done()
simRes, _ = simulator.GetTxSimulationResults()
block1 := bg.NextBlock([][]byte{simRes}, false)
ledger.Commit(block1)
bcInfo, _ = ledger.GetBlockchainInfo()
block2Hash := block1.Header.Hash()
testutil.AssertEquals(t, bcInfo, &common.BlockchainInfo{
Height: 2, CurrentBlockHash: block2Hash, PreviousBlockHash: block0.Header.Hash()})
b0, _ := ledger.GetBlockByHash(block1Hash)
testutil.AssertEquals(t, b0, block0)
b1, _ := ledger.GetBlockByHash(block2Hash)
testutil.AssertEquals(t, b1, block1)
b0, _ = ledger.GetBlockByNumber(0)
testutil.AssertEquals(t, b0, block0)
b1, _ = ledger.GetBlockByNumber(1)
testutil.AssertEquals(t, b1, block1)
// get the tran id from the 2nd block, then use it to test GetTransactionByID()
txEnvBytes2 := block1.Data.Data[0]
txEnv2, err := putils.GetEnvelopeFromBlock(txEnvBytes2)
testutil.AssertNoError(t, err, "Error upon GetEnvelopeFromBlock")
payload2, err := putils.GetPayload(txEnv2)
testutil.AssertNoError(t, err, "Error upon GetPayload")
chdr, err := putils.UnmarshalChannelHeader(payload2.Header.ChannelHeader)
testutil.AssertNoError(t, err, "Error upon GetChannelHeaderFromBytes")
txID2 := chdr.TxId
processedTran2, err := ledger.GetTransactionByID(txID2)
testutil.AssertNoError(t, err, "Error upon GetTransactionByID")
// get the tran envelope from the retrieved ProcessedTransaction
retrievedTxEnv2 := processedTran2.TransactionEnvelope
testutil.AssertEquals(t, retrievedTxEnv2, txEnv2)
}
func TestKVLedgerDBRecovery(t *testing.T) {
ledgertestutil.SetupCoreYAMLConfig("./../../../peer")
env := newTestEnv(t)
defer env.cleanup()
provider, _ := NewProvider()
defer provider.Close()
ledger, _ := provider.Create("testLedger")
defer ledger.Close()
bcInfo, err := ledger.GetBlockchainInfo()
testutil.AssertNoError(t, err, "")
testutil.AssertEquals(t, bcInfo, &common.BlockchainInfo{
Height: 0, CurrentBlockHash: nil, PreviousBlockHash: nil})
//creating and committing the first block
simulator, _ := ledger.NewTxSimulator()
//simulating a transaction
simulator.SetState("ns1", "key1", []byte("value1.1"))
simulator.SetState("ns1", "key2", []byte("value2.1"))
simulator.SetState("ns1", "key3", []byte("value3.1"))
simulator.Done()
simRes, _ := simulator.GetTxSimulationResults()
//generating a block based on the simulation result
bg := testutil.NewBlockGenerator(t)
block1 := bg.NextBlock([][]byte{simRes}, false)
//performing validation of read and write set to find valid transactions
ledger.Commit(block1)
bcInfo, _ = ledger.GetBlockchainInfo()
block1Hash := block1.Header.Hash()
testutil.AssertEquals(t, bcInfo, &common.BlockchainInfo{
Height: 1, CurrentBlockHash: block1Hash, PreviousBlockHash: []byte{}})
//======================================================================================
//SCENARIO 1: peer fails before committing the second block to state DB
//and history DB (if exist)
//======================================================================================
simulator, _ = ledger.NewTxSimulator()
//simulating transaction
simulator.SetState("ns1", "key1", []byte("value1.2"))
simulator.SetState("ns1", "key2", []byte("value2.2"))
simulator.SetState("ns1", "key3", []byte("value3.2"))
simulator.Done()
simRes, _ = simulator.GetTxSimulationResults()
//generating a block based on the simulation result
block2 := bg.NextBlock([][]byte{simRes}, false)
//performing validation of read and write set to find valid transactions
ledger.(*kvLedger).txtmgmt.ValidateAndPrepare(block2, true)
//writing the validated block to block storage but not committing the transaction
//to state DB and history DB (if exist)
err = ledger.(*kvLedger).blockStore.AddBlock(block2)
//assume that peer fails here before committing the transaction
assert.NoError(t, err)
bcInfo, _ = ledger.GetBlockchainInfo()
block2Hash := block2.Header.Hash()
testutil.AssertEquals(t, bcInfo, &common.BlockchainInfo{
Height: 2, CurrentBlockHash: block2Hash, PreviousBlockHash: block1.Header.Hash()})
simulator, _ = ledger.NewTxSimulator()
value, _ := simulator.GetState("ns1", "key1")
//value for 'key1' should be 'value1' as the last commit failed
testutil.AssertEquals(t, value, []byte("value1.1"))
value, _ = simulator.GetState("ns1", "key2")
//value for 'key2' should be 'value2' as the last commit failed
testutil.AssertEquals(t, value, []byte("value2.1"))
value, _ = simulator.GetState("ns1", "key3")
//value for 'key3' should be 'value3' as the last commit failed
testutil.AssertEquals(t, value, []byte("value3.1"))
//savepoint in state DB should 0 as the last commit failed
stateDBSavepoint, _ := ledger.(*kvLedger).txtmgmt.GetLastSavepoint()
testutil.AssertEquals(t, stateDBSavepoint.BlockNum, uint64(0))
if ledgerconfig.IsHistoryDBEnabled() == true {
qhistory, _ := ledger.NewHistoryQueryExecutor()
itr, _ := qhistory.GetHistoryForKey("ns1", "key1")
count := 0
for {
kmod, err := itr.Next()
testutil.AssertNoError(t, err, "Error upon Next()")
if kmod == nil {
break
}
retrievedValue := kmod.(*ledgerpackage.KeyModification).Value
count++
expectedValue := []byte("value1." + strconv.Itoa(count))
testutil.AssertEquals(t, retrievedValue, expectedValue)
}
testutil.AssertEquals(t, count, 1)
//savepoint in history DB should 0 as the last commit failed
historyDBSavepoint, _ := ledger.(*kvLedger).historyDB.GetLastSavepoint()
testutil.AssertEquals(t, historyDBSavepoint.BlockNum, uint64(0))
}
simulator.Done()
ledger.Close()
provider.Close()
//we assume here that the peer comes online and calls NewKVLedger to get a handler for the ledger
//State DB should be recovered before returning from NewKVLedger call
provider, _ = NewProvider()
ledger, _ = provider.Open("testLedger")
simulator, _ = ledger.NewTxSimulator()
value, _ = simulator.GetState("ns1", "key1")
//value for 'key1' should be 'value4' after recovery
testutil.AssertEquals(t, value, []byte("value1.2"))
value, _ = simulator.GetState("ns1", "key2")
//value for 'key2' should be 'value5' after recovery
testutil.AssertEquals(t, value, []byte("value2.2"))
value, _ = simulator.GetState("ns1", "key3")
//value for 'key3' should be 'value6' after recovery
testutil.AssertEquals(t, value, []byte("value3.2"))
//savepoint in state DB should 2 after recovery
stateDBSavepoint, _ = ledger.(*kvLedger).txtmgmt.GetLastSavepoint()
testutil.AssertEquals(t, stateDBSavepoint.BlockNum, uint64(1))
if ledgerconfig.IsHistoryDBEnabled() == true {
qhistory, _ := ledger.NewHistoryQueryExecutor()
itr, _ := qhistory.GetHistoryForKey("ns1", "key1")
count := 0
for {
kmod, err := itr.Next()
testutil.AssertNoError(t, err, "Error upon Next()")
if kmod == nil {
break
}
retrievedValue := kmod.(*ledgerpackage.KeyModification).Value
count++
expectedValue := []byte("value1." + strconv.Itoa(count))
testutil.AssertEquals(t, retrievedValue, expectedValue)
}
testutil.AssertEquals(t, count, 2)
//savepoint in history DB should 2 after recovery
historyDBSavepoint, _ := ledger.(*kvLedger).historyDB.GetLastSavepoint()
testutil.AssertEquals(t, historyDBSavepoint.BlockNum, uint64(1))
}
simulator.Done()
//======================================================================================
//SCENARIO 2: peer fails after committing the third block to state DB
//but before committing to history DB (if exist)
//======================================================================================
simulator, _ = ledger.NewTxSimulator()
//simulating transaction
simulator.SetState("ns1", "key1", []byte("value1.3"))
simulator.SetState("ns1", "key2", []byte("value2.3"))
simulator.SetState("ns1", "key3", []byte("value3.3"))
simulator.Done()
simRes, _ = simulator.GetTxSimulationResults()
//generating a block based on the simulation result
block3 := bg.NextBlock([][]byte{simRes}, false)
//performing validation of read and write set to find valid transactions
ledger.(*kvLedger).txtmgmt.ValidateAndPrepare(block3, true)
//writing the validated block to block storage
err = ledger.(*kvLedger).blockStore.AddBlock(block3)
//committing the transaction to state DB
err = ledger.(*kvLedger).txtmgmt.Commit()
//assume that peer fails here after committing the transaction to state DB but before
//history DB
assert.NoError(t, err)
bcInfo, _ = ledger.GetBlockchainInfo()
block3Hash := block3.Header.Hash()
testutil.AssertEquals(t, bcInfo, &common.BlockchainInfo{
Height: 3, CurrentBlockHash: block3Hash, PreviousBlockHash: block2.Header.Hash()})
simulator, _ = ledger.NewTxSimulator()
value, _ = simulator.GetState("ns1", "key1")
//value for 'key1' should be 'value7'
testutil.AssertEquals(t, value, []byte("value1.3"))
value, _ = simulator.GetState("ns1", "key2")
//value for 'key2' should be 'value8'
testutil.AssertEquals(t, value, []byte("value2.3"))
value, _ = simulator.GetState("ns1", "key3")
//value for 'key3' should be 'value9'
testutil.AssertEquals(t, value, []byte("value3.3"))
//savepoint in state DB should 3
stateDBSavepoint, _ = ledger.(*kvLedger).txtmgmt.GetLastSavepoint()
testutil.AssertEquals(t, stateDBSavepoint.BlockNum, uint64(2))
if ledgerconfig.IsHistoryDBEnabled() == true {
qhistory, _ := ledger.NewHistoryQueryExecutor()
itr, _ := qhistory.GetHistoryForKey("ns1", "key1")
count := 0
for {
kmod, err := itr.Next()
testutil.AssertNoError(t, err, "Error upon Next()")
if kmod == nil {
break
}
retrievedValue := kmod.(*ledgerpackage.KeyModification).Value
count++
expectedValue := []byte("value1." + strconv.Itoa(count))
testutil.AssertEquals(t, retrievedValue, expectedValue)
}
testutil.AssertEquals(t, count, 2)
//savepoint in history DB should 2 as the last commit failed
historyDBSavepoint, _ := ledger.(*kvLedger).historyDB.GetLastSavepoint()
testutil.AssertEquals(t, historyDBSavepoint.BlockNum, uint64(1))
}
simulator.Done()
ledger.Close()
provider.Close()
//we assume here that the peer comes online and calls NewKVLedger to get a handler for the ledger
//history DB should be recovered before returning from NewKVLedger call
provider, _ = NewProvider()
ledger, _ = provider.Open("testLedger")
simulator, _ = ledger.NewTxSimulator()
stateDBSavepoint, _ = ledger.(*kvLedger).txtmgmt.GetLastSavepoint()
testutil.AssertEquals(t, stateDBSavepoint.BlockNum, uint64(2))
if ledgerconfig.IsHistoryDBEnabled() == true {
qhistory, _ := ledger.NewHistoryQueryExecutor()
itr, _ := qhistory.GetHistoryForKey("ns1", "key1")
count := 0
for {
kmod, err := itr.Next()
testutil.AssertNoError(t, err, "Error upon Next()")
if kmod == nil {
break
}
retrievedValue := kmod.(*ledgerpackage.KeyModification).Value
count++
expectedValue := []byte("value1." + strconv.Itoa(count))
testutil.AssertEquals(t, retrievedValue, expectedValue)
}
testutil.AssertEquals(t, count, 3)
//savepoint in history DB should 3 after recovery
historyDBSavepoint, _ := ledger.(*kvLedger).historyDB.GetLastSavepoint()
testutil.AssertEquals(t, historyDBSavepoint.BlockNum, uint64(2))
}
simulator.Done()
//Rare scenario
//======================================================================================
//SCENARIO 3: peer fails after committing the fourth block to history DB (if exist)
//but before committing to state DB
//======================================================================================
simulator, _ = ledger.NewTxSimulator()
//simulating transaction
simulator.SetState("ns1", "key1", []byte("value1.4"))
simulator.SetState("ns1", "key2", []byte("value2.4"))
simulator.SetState("ns1", "key3", []byte("value3.4"))
simulator.Done()
simRes, _ = simulator.GetTxSimulationResults()
//generating a block based on the simulation result
block4 := bg.NextBlock([][]byte{simRes}, false)
//performing validation of read and write set to find valid transactions
ledger.(*kvLedger).txtmgmt.ValidateAndPrepare(block4, true)
//writing the validated block to block storage but fails to commit to state DB but
//successfully commits to history DB (if exists)
err = ledger.(*kvLedger).blockStore.AddBlock(block4)
if ledgerconfig.IsHistoryDBEnabled() == true {
err = ledger.(*kvLedger).historyDB.Commit(block4)
}
assert.NoError(t, err)
bcInfo, _ = ledger.GetBlockchainInfo()
block4Hash := block4.Header.Hash()
testutil.AssertEquals(t, bcInfo, &common.BlockchainInfo{
Height: 4, CurrentBlockHash: block4Hash, PreviousBlockHash: block3.Header.Hash()})
simulator, _ = ledger.NewTxSimulator()
value, _ = simulator.GetState("ns1", "key1")
//value for 'key1' should be 'value7' as the last commit to State DB failed
testutil.AssertEquals(t, value, []byte("value1.3"))
value, _ = simulator.GetState("ns1", "key2")
//value for 'key2' should be 'value8' as the last commit to State DB failed
testutil.AssertEquals(t, value, []byte("value2.3"))
value, _ = simulator.GetState("ns1", "key3")
//value for 'key3' should be 'value9' as the last commit to State DB failed
testutil.AssertEquals(t, value, []byte("value3.3"))
//savepoint in state DB should 3 as the last commit failed
stateDBSavepoint, _ = ledger.(*kvLedger).txtmgmt.GetLastSavepoint()
testutil.AssertEquals(t, stateDBSavepoint.BlockNum, uint64(2))
if ledgerconfig.IsHistoryDBEnabled() == true {
qhistory, _ := ledger.NewHistoryQueryExecutor()
itr, _ := qhistory.GetHistoryForKey("ns1", "key1")
count := 0
for {
kmod, err := itr.Next()
testutil.AssertNoError(t, err, "Error upon Next()")
if kmod == nil {
break
}
retrievedValue := kmod.(*ledgerpackage.KeyModification).Value
count++
expectedValue := []byte("value1." + strconv.Itoa(count))
testutil.AssertEquals(t, retrievedValue, expectedValue)
}
testutil.AssertEquals(t, count, 4)
//savepoint in history DB should 4
historyDBSavepoint, _ := ledger.(*kvLedger).historyDB.GetLastSavepoint()
testutil.AssertEquals(t, historyDBSavepoint.BlockNum, uint64(3))
}
simulator.Done()
ledger.Close()
provider.Close()
//we assume here that the peer comes online and calls NewKVLedger to get a handler for the ledger
//state DB should be recovered before returning from NewKVLedger call
provider, _ = NewProvider()
ledger, _ = provider.Open("testLedger")
simulator, _ = ledger.NewTxSimulator()
value, _ = simulator.GetState("ns1", "key1")
//value for 'key1' should be 'value10' after state DB recovery
testutil.AssertEquals(t, value, []byte("value1.4"))
value, _ = simulator.GetState("ns1", "key2")
//value for 'key2' should be 'value11' after state DB recovery
testutil.AssertEquals(t, value, []byte("value2.4"))
value, _ = simulator.GetState("ns1", "key3")
//value for 'key3' should be 'value12' after state DB recovery
testutil.AssertEquals(t, value, []byte("value3.4"))
//savepoint in state DB should 4 after the recovery
stateDBSavepoint, _ = ledger.(*kvLedger).txtmgmt.GetLastSavepoint()
testutil.AssertEquals(t, stateDBSavepoint.BlockNum, uint64(3))
simulator.Done()
}
func TestLedgerWithCouchDbEnabledWithBinaryAndJSONData(t *testing.T) {
//call a helper method to load the core.yaml
ledgertestutil.SetupCoreYAMLConfig("./../../../peer")
logger.Debugf("TestLedgerWithCouchDbEnabledWithBinaryAndJSONData IsCouchDBEnabled()value: %v , IsHistoryDBEnabled()value: %v\n",
ledgerconfig.IsCouchDBEnabled(), ledgerconfig.IsHistoryDBEnabled())
env := newTestEnv(t)
defer env.cleanup()
provider, _ := NewProvider()
defer provider.Close()
ledger, _ := provider.Create("testLedger")
defer ledger.Close()
bcInfo, _ := ledger.GetBlockchainInfo()
testutil.AssertEquals(t, bcInfo, &common.BlockchainInfo{
Height: 0, CurrentBlockHash: nil, PreviousBlockHash: nil})
simulator, _ := ledger.NewTxSimulator()
simulator.SetState("ns1", "key4", []byte("value1"))
simulator.SetState("ns1", "key5", []byte("value2"))
simulator.SetState("ns1", "key6", []byte("{\"shipmentID\":\"161003PKC7300\",\"customsInvoice\":{\"methodOfTransport\":\"GROUND\",\"invoiceNumber\":\"00091622\"},\"weightUnitOfMeasure\":\"KGM\",\"volumeUnitOfMeasure\": \"CO\",\"dimensionUnitOfMeasure\":\"CM\",\"currency\":\"USD\"}"))
simulator.SetState("ns1", "key7", []byte("{\"shipmentID\":\"161003PKC7600\",\"customsInvoice\":{\"methodOfTransport\":\"AIR MAYBE\",\"invoiceNumber\":\"00091624\"},\"weightUnitOfMeasure\":\"KGM\",\"volumeUnitOfMeasure\": \"CO\",\"dimensionUnitOfMeasure\":\"CM\",\"currency\":\"USD\"}"))
simulator.Done()
simRes, _ := simulator.GetTxSimulationResults()
bg := testutil.NewBlockGenerator(t)
block1 := bg.NextBlock([][]byte{simRes}, false)
ledger.Commit(block1)
bcInfo, _ = ledger.GetBlockchainInfo()
block1Hash := block1.Header.Hash()
testutil.AssertEquals(t, bcInfo, &common.BlockchainInfo{
Height: 1, CurrentBlockHash: block1Hash, PreviousBlockHash: []byte{}})
simulationResults := [][]byte{}
simulator, _ = ledger.NewTxSimulator()
simulator.SetState("ns1", "key4", []byte("value3"))
simulator.SetState("ns1", "key5", []byte("{\"shipmentID\":\"161003PKC7500\",\"customsInvoice\":{\"methodOfTransport\":\"AIR FREIGHT\",\"invoiceNumber\":\"00091623\"},\"weightUnitOfMeasure\":\"KGM\",\"volumeUnitOfMeasure\": \"CO\",\"dimensionUnitOfMeasure\":\"CM\",\"currency\":\"USD\"}"))
simulator.SetState("ns1", "key6", []byte("value4"))
simulator.SetState("ns1", "key7", []byte("{\"shipmentID\":\"161003PKC7600\",\"customsInvoice\":{\"methodOfTransport\":\"GROUND\",\"invoiceNumber\":\"00091624\"},\"weightUnitOfMeasure\":\"KGM\",\"volumeUnitOfMeasure\": \"CO\",\"dimensionUnitOfMeasure\":\"CM\",\"currency\":\"USD\"}"))
simulator.SetState("ns1", "key8", []byte("{\"shipmentID\":\"161003PKC7700\",\"customsInvoice\":{\"methodOfTransport\":\"SHIP\",\"invoiceNumber\":\"00091625\"},\"weightUnitOfMeasure\":\"KGM\",\"volumeUnitOfMeasure\": \"CO\",\"dimensionUnitOfMeasure\":\"CM\",\"currency\":\"USD\"}"))
simulator.Done()
simRes, _ = simulator.GetTxSimulationResults()
simulationResults = append(simulationResults, simRes)
//add a 2nd transaction
simulator2, _ := ledger.NewTxSimulator()
simulator2.SetState("ns1", "key7", []byte("{\"shipmentID\":\"161003PKC7600\",\"customsInvoice\":{\"methodOfTransport\":\"TRAIN\",\"invoiceNumber\":\"00091624\"},\"weightUnitOfMeasure\":\"KGM\",\"volumeUnitOfMeasure\": \"CO\",\"dimensionUnitOfMeasure\":\"CM\",\"currency\":\"USD\"}"))
simulator2.SetState("ns1", "key9", []byte("value5"))
simulator2.SetState("ns1", "key10", []byte("{\"shipmentID\":\"261003PKC8000\",\"customsInvoice\":{\"methodOfTransport\":\"DONKEY\",\"invoiceNumber\":\"00091626\"},\"weightUnitOfMeasure\":\"KGM\",\"volumeUnitOfMeasure\": \"CO\",\"dimensionUnitOfMeasure\":\"CM\",\"currency\":\"USD\"}"))
simulator2.Done()
simRes2, _ := simulator2.GetTxSimulationResults()
simulationResults = append(simulationResults, simRes2)
block2 := bg.NextBlock(simulationResults, false)
ledger.Commit(block2)
bcInfo, _ = ledger.GetBlockchainInfo()
block2Hash := block2.Header.Hash()
testutil.AssertEquals(t, bcInfo, &common.BlockchainInfo{
Height: 2, CurrentBlockHash: block2Hash, PreviousBlockHash: block1.Header.Hash()})
b1, _ := ledger.GetBlockByHash(block1Hash)
testutil.AssertEquals(t, b1, block1)
b2, _ := ledger.GetBlockByHash(block2Hash)
testutil.AssertEquals(t, b2, block2)
b1, _ = ledger.GetBlockByNumber(0)
testutil.AssertEquals(t, b1, block1)
b2, _ = ledger.GetBlockByNumber(1)
testutil.AssertEquals(t, b2, block2)
//Similar test has been pushed down to historyleveldb_test.go as well
if ledgerconfig.IsHistoryDBEnabled() == true {
logger.Debugf("History is enabled\n")
qhistory, err := ledger.NewHistoryQueryExecutor()
testutil.AssertNoError(t, err, fmt.Sprintf("Error when trying to retrieve history database executor"))
itr, err2 := qhistory.GetHistoryForKey("ns1", "key7")
testutil.AssertNoError(t, err2, fmt.Sprintf("Error upon GetHistoryForKey"))
var retrievedValue []byte
count := 0
for {
kmod, _ := itr.Next()
if kmod == nil {
break
}
retrievedValue = kmod.(*ledgerpackage.KeyModification).Value
count++
}
testutil.AssertEquals(t, count, 3)
// test the last value in the history matches the last value set for key7
expectedValue := []byte("{\"shipmentID\":\"161003PKC7600\",\"customsInvoice\":{\"methodOfTransport\":\"TRAIN\",\"invoiceNumber\":\"00091624\"},\"weightUnitOfMeasure\":\"KGM\",\"volumeUnitOfMeasure\": \"CO\",\"dimensionUnitOfMeasure\":\"CM\",\"currency\":\"USD\"}")
testutil.AssertEquals(t, retrievedValue, expectedValue)
}
}
| {
"content_hash": "930a4fa8308d95b4fb8dbf66038af0c1",
"timestamp": "",
"source": "github",
"line_count": 518,
"max_line_length": 289,
"avg_line_length": 42.7007722007722,
"alnum_prop": 0.7022017270220172,
"repo_name": "king3000/fabric",
"id": "75ba24e92bf69b501556425d4224aa2d30ddd200",
"size": "22694",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "core/ledger/kvledger/kv_ledger_test.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "845"
},
{
"name": "Gherkin",
"bytes": "28708"
},
{
"name": "Go",
"bytes": "3544857"
},
{
"name": "HTML",
"bytes": "6057"
},
{
"name": "Java",
"bytes": "71703"
},
{
"name": "JavaScript",
"bytes": "35739"
},
{
"name": "Makefile",
"bytes": "15248"
},
{
"name": "Protocol Buffer",
"bytes": "86395"
},
{
"name": "Python",
"bytes": "179099"
},
{
"name": "Ruby",
"bytes": "3441"
},
{
"name": "Shell",
"bytes": "49312"
}
],
"symlink_target": ""
} |
import { toTruncFixed } from './common';
// TODO: Should this module depend on this?
import Torrent from 'stores/torrent';
const speed_K = 1000;
const speed_K_str = 'kB/s';
const speed_M_str = 'MB/s';
const speed_G_str = 'GB/s';
const size_K = 1000;
const size_B_str = 'B';
const size_K_str = 'kB';
const size_M_str = 'MB';
const size_G_str = 'GB';
const size_T_str = 'TB';
const mem_K = 1024;
const mem_B_str = 'B';
const mem_K_str = 'KiB';
const mem_M_str = 'MiB';
const mem_G_str = 'GiB';
const mem_T_str = 'TiB';
export function formatUL(bps) {
return `↑ ${speedBps(bps)}`;
};
export function formatDL(bps) {
return `↓ ${speedBps(bps)}`;
};
/**
* Localize number with browser number formatting
*
* @param {Number} n
* @return {String}
*/
export function numberWithCommas(n) {
return parseInt(n, 10).toLocaleString();
};
/*
* Format a percentage to a string
*/
export function percentString(x) {
if (x < 10.0) {
return toTruncFixed(x, 2);
} else if (x < 100.0) {
return toTruncFixed(x, 1);
} else {
return toTruncFixed(x, 0);
}
}
export function speedBps(Bps) {
return speed(toKBps(Bps));
}
export function toKBps(Bps) {
return Math.floor(Bps / speed_K);
}
export function speed(KBps) {
var speed = KBps;
if (speed <= 999.95) { // 0 KBps to 999 K
return `${toTruncFixed(speed, 0)} ${speed_K_str}`;
}
speed /= speed_K;
if (speed <= 99.995) { // 1 M to 99.99 M
return `${toTruncFixed(speed, 2)} ${speed_M_str}`;
}
if (speed <= 999.95) { // 100 M to 999.9 M
return `${toTruncFixed(speed, 1)} ${speed_M_str}`;
}
// insane speeds
speed /= speed_K;
return `${toTruncFixed(speed, 2)} ${speed_G_str}`;
}
export function pluralString(msgid, msgid_plural, n) {
// TODO(i18n): http://doc.qt.digia.com/4.6/i18n-plural-rules.html
return n === 1 ? msgid : msgid_plural;
}
export function countString(msgid, msgid_plural, n) {
return `${numberWithCommas(n)} ${pluralString(msgid, msgid_plural, n)}`;
}
export function mem(bytes) {
if (bytes < mem_K) {
return [bytes, mem_B_str].join(' ');
}
let convertedSize;
let unit;
if (bytes < Math.pow(mem_K, 2)) {
convertedSize = bytes / mem_K;
unit = mem_K_str;
} else if (bytes < Math.pow(mem_K, 3)) {
convertedSize = bytes / Math.pow(mem_K, 2);
unit = mem_M_str;
} else if (bytes < Math.pow(mem_K, 4)) {
convertedSize = bytes / Math.pow(mem_K, 3);
unit = mem_G_str;
} else {
convertedSize = bytes / Math.pow(mem_K, 4);
unit = mem_T_str;
}
// try to have at least 3 digits and at least 1 decimal
return (
convertedSize <= 9.995 ?
[toTruncFixed(convertedSize, 2), unit].join(' ') :
[toTruncFixed(convertedSize, 1), unit].join(' ')
);
}
export function size(bytes) {
if (bytes < size_K) {
return [bytes, size_B_str].join(' ');
}
let convertedSize;
let unit;
if (bytes < Math.pow(size_K, 2)) {
convertedSize = bytes / size_K;
unit = size_K_str;
} else if (bytes < Math.pow(size_K, 3)) {
convertedSize = bytes / Math.pow(size_K, 2);
unit = size_M_str;
} else if (bytes < Math.pow(size_K, 4)) {
convertedSize = bytes / Math.pow(size_K, 3);
unit = size_G_str;
} else {
convertedSize = bytes / Math.pow(size_K, 4);
unit = size_T_str;
}
// try to have at least 3 digits and at least 1 decimal
return (
convertedSize <= 9.995 ?
[toTruncFixed(convertedSize, 2), unit].join(' ') :
[toTruncFixed(convertedSize, 1), unit].join(' ')
);
}
export function ratioString(x) {
if (x === -1) {
return 'None';
}
if (x === -2) {
return '∞';
}
return percentString(x);
}
export function timeInterval(seconds) {
let days = Math.floor(seconds / 86400);
let hours = Math.floor((seconds % 86400) / 3600);
let minutes = Math.floor((seconds % 3600) / 60);
let roundedSeconds = Math.floor(seconds % 60);
let d = days + ' ' + (days > 1 ? 'days' : 'day');
let h = hours + ' ' + (hours > 1 ? 'hours' : 'hour');
let m = minutes + ' ' + (minutes > 1 ? 'minutes' : 'minute');
let s = roundedSeconds + ' ' + (roundedSeconds > 1 ? 'seconds' : 'second');
if (days) {
if (days >= 4 || !hours) {
return d;
}
return d + ', ' + h;
}
if (hours) {
if (hours >= 4 || !minutes) {
return h;
}
return h + ', ' + m;
}
if (minutes) {
if (minutes >= 4 || !roundedSeconds) {
return m;
}
return m + ', ' + s;
}
return s;
}
export function formatStatus(torrent) {
switch (torrent.status) {
case Torrent.STATUS_STOPPED:
return torrent.isFinished ? 'Finished' : 'Paused';
case Torrent.STATUS_CHECK_WAIT:
return 'Queued for verification';
case Torrent.STATUS_CHECK:
return 'Verifying local data';
case Torrent.STATUS_DOWNLOAD_WAIT:
return 'Queued for download';
case Torrent.STATUS_DOWNLOAD:
return 'Downloading';
case Torrent.STATUS_SEED_WAIT:
return 'Queued for seeding';
case Torrent.STATUS_SEED:
return 'Seeding';
case null:
case undefined:
return 'Unknown';
default:
return 'Error';
}
}
export function timestamp(value) {
if (!value) {
return 'N/A';
}
const myDate = new Date(value * 1000);
const now = new Date();
let date = '';
let time = '';
const sameYear = now.getFullYear() === myDate.getFullYear();
const sameMonth = now.getMonth() === myDate.getMonth();
const dateDiff = now.getDate() - myDate.getDate();
if (sameYear && sameMonth && Math.abs(dateDiff) <= 1) {
if (dateDiff === 0) {
date = "Today";
} else if (dateDiff === 1) {
date = "Yesterday";
} else {
date = "Tomorrow";
}
} else {
date = myDate.toDateString();
}
let period = 'AM';
let hours = myDate.getHours();
if (hours > 12) {
hours -= 12;
period = 'PM';
}
if (hours === 0) {
hours = 12;
}
if (hours < 10) {
hours = '0' + hours;
}
let minutes = myDate.getMinutes();
if (minutes < 10) {
minutes = '0' + minutes;
}
let seconds = myDate.getSeconds();
if (seconds < 10) {
seconds = '0' + seconds;
}
time = [hours, minutes, seconds].join(':');
return [date, time, period].join(' ');
}
| {
"content_hash": "dddd55128577bc154c9ef969b9d4f6fb",
"timestamp": "",
"source": "github",
"line_count": 280,
"max_line_length": 77,
"avg_line_length": 22.09642857142857,
"alnum_prop": 0.587360594795539,
"repo_name": "fcsonline/react-transmission",
"id": "d4b6e871ed3b7e1c6887a60e296567b0bbb09261",
"size": "6193",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/util/formatters.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "19230"
},
{
"name": "HTML",
"bytes": "1154"
},
{
"name": "JavaScript",
"bytes": "202366"
}
],
"symlink_target": ""
} |
@interface SerachViewController : UIViewController
@end
| {
"content_hash": "c0983b4c0259f0b0a6ea1f9f183970d0",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 50,
"avg_line_length": 19,
"alnum_prop": 0.8421052631578947,
"repo_name": "starLover/CrazyNews",
"id": "0a91ca8b491cf931b923991a7d3915ef2e2b357f",
"size": "230",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CrazyNews/classes/Main/Controllers/SerachViewController.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Objective-C",
"bytes": "201899"
},
{
"name": "Ruby",
"bytes": "241"
}
],
"symlink_target": ""
} |
require 'gamma_ray/request'
module GammaRay
class Client
def initialize(attrs = {})
@options = attrs
@stream_name = attrs[:stream_name]
end
def track(event, properties={})
fail ArgumentError, 'Must supply and event name as a non-empty string' if event.empty?
fail ArgumentError, 'Properties must be a Hash' unless properties.is_a? Hash
serialized = {}
properties.each { |k, v| serialized[k] = serialize_value(v) }
#put the event onto the kinesis queue
GammaRay::Request.new.post(@stream_name, serialized)
end
private
def serialize_value(value)
value = value.utc if value.respond_to?(:utc)
value = value.iso8601(6) if value.respond_to?(:iso8601)
return value
end
end
end
| {
"content_hash": "f520c21f970d7d20326e10da24bd8c8c",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 105,
"avg_line_length": 26.833333333333332,
"alnum_prop": 0.6285714285714286,
"repo_name": "transfixio/gamma_ray",
"id": "816120b8b2807936c868fd70e1f97e9e89e49ec4",
"size": "805",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/gamma_ray/client.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "3049"
},
{
"name": "Ruby",
"bytes": "45387"
},
{
"name": "Shell",
"bytes": "1980"
}
],
"symlink_target": ""
} |
namespace blink {
PassRefPtr<SerializedScriptValue>
SerializedScriptValueForModulesFactory::create(v8::Isolate* isolate,
v8::Local<v8::Value> value,
Transferables* transferables,
WebBlobInfoArray* blobInfo,
ExceptionState& exceptionState) {
TRACE_EVENT0("blink", "SerializedScriptValueFactory::create");
if (RuntimeEnabledFeatures::v8BasedStructuredCloneEnabled()) {
V8ScriptValueSerializerForModules serializer(ScriptState::current(isolate));
serializer.setBlobInfoArray(blobInfo);
return serializer.serialize(value, transferables, exceptionState);
}
SerializedScriptValueWriterForModules writer;
ScriptValueSerializerForModules serializer(writer, blobInfo,
ScriptState::current(isolate));
return serializer.serialize(value, transferables, exceptionState);
}
v8::Local<v8::Value> SerializedScriptValueForModulesFactory::deserialize(
SerializedScriptValue* value,
v8::Isolate* isolate,
MessagePortArray* messagePorts,
const WebBlobInfoArray* blobInfo) {
TRACE_EVENT0("blink", "SerializedScriptValueFactory::deserialize");
if (RuntimeEnabledFeatures::v8BasedStructuredCloneEnabled()) {
V8ScriptValueDeserializerForModules deserializer(
ScriptState::current(isolate), value);
deserializer.setTransferredMessagePorts(messagePorts);
deserializer.setBlobInfoArray(blobInfo);
return deserializer.deserialize();
}
// deserialize() can run arbitrary script (e.g., setters), which could result
// in |this| being destroyed. Holding a RefPtr ensures we are alive (along
// with our internal data) throughout the operation.
RefPtr<SerializedScriptValue> protect(value);
if (!value->dataLengthInBytes())
return v8::Null(isolate);
static_assert(sizeof(SerializedScriptValueWriter::BufferValueType) == 2,
"BufferValueType should be 2 bytes");
// FIXME: SerializedScriptValue shouldn't use String for its underlying
// storage. Instead, it should use SharedBuffer or Vector<uint8_t>. The
// information stored in m_data isn't even encoded in UTF-16. Instead,
// unicode characters are encoded as UTF-8 with two code units per UChar.
SerializedScriptValueReaderForModules reader(
value->data(), value->dataLengthInBytes(), blobInfo,
value->blobDataHandles(), ScriptState::current(isolate));
ScriptValueDeserializerForModules deserializer(
reader, messagePorts, value->getArrayBufferContentsArray(),
value->getImageBitmapContentsArray());
return deserializer.deserialize();
}
} // namespace blink
| {
"content_hash": "29fbe08433e5ad9c31124ef8e1ab37c7",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 80,
"avg_line_length": 50.14545454545455,
"alnum_prop": 0.7077592458303118,
"repo_name": "google-ar/WebARonARCore",
"id": "621e899c68744066cf896cd41cedbf99b92b9c6f",
"size": "3417",
"binary": false,
"copies": "2",
"ref": "refs/heads/webarcore_57.0.2987.5",
"path": "third_party/WebKit/Source/bindings/modules/v8/SerializedScriptValueForModulesFactory.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package org.keycloak.testsuite.account;
import org.jboss.arquillian.graphene.page.Page;
import org.junit.AfterClass;
import static org.junit.Assert.assertEquals;
import org.junit.Before;
import org.junit.Test;
import org.keycloak.representations.idm.RealmRepresentation;
import org.keycloak.testsuite.auth.page.login.VerifyEmail;
import static org.keycloak.testsuite.util.MailAssert.assertEmailAndGetUrl;
import org.keycloak.testsuite.util.MailServer;
import org.keycloak.testsuite.util.MailServerConfiguration;
import static org.keycloak.testsuite.util.URLAssert.assertCurrentUrlStartsWith;
/**
*
* @author vramik
*/
public class VerifyEmailTest extends AbstractAccountManagementTest {
@Page
private VerifyEmail testRealmVerifyEmailPage;
private static boolean init = false;
@Override
public void setDefaultPageUriParameters() {
super.setDefaultPageUriParameters();
testRealmVerifyEmailPage.setAuthRealm(testRealmPage);
}
@Before
public void beforeVerifyEmail() {
log.info("enable verify email and configure smpt server in test realm");
RealmRepresentation testRealmRep = testRealmResource().toRepresentation();
testRealmRep.setSmtpServer(suiteContext.getSmtpServer());
testRealmRep.setVerifyEmail(true);
testRealmResource().update(testRealmRep);
if (!init) {
init = true;
MailServer.start();
MailServer.createEmailAccount(testUser.getEmail(), "password");
}
}
@AfterClass
public static void afterClass() {
MailServer.stop();
}
@Test
public void verifyEmail() {
testRealmAccountManagementPage.navigateTo();
testRealmLoginPage.form().login(testUser);
assertEquals("You need to verify your email address to activate your account.",
testRealmVerifyEmailPage.getFeedbackText());
String verifyEmailUrl = assertEmailAndGetUrl(MailServerConfiguration.FROM, testUser.getEmail(),
"Someone has created a Test account with this email address.");
log.info("navigating to url from email: " + verifyEmailUrl);
driver.navigate().to(verifyEmailUrl);
assertCurrentUrlStartsWith(testRealmAccountManagementPage);
testRealmAccountManagementPage.signOut();
testRealmLoginPage.form().login(testUser);
assertCurrentUrlStartsWith(testRealmAccountManagementPage);
}
}
| {
"content_hash": "49450b863f912db8980b0921248c9209",
"timestamp": "",
"source": "github",
"line_count": 72,
"max_line_length": 104,
"avg_line_length": 34.52777777777778,
"alnum_prop": 0.7148028962188254,
"repo_name": "gregjones60/keycloak",
"id": "2c00ad8cf029747710f451c488c163c67073edfc",
"size": "3151",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "testsuite/integration-arquillian/tests/base/src/test/java/org/keycloak/testsuite/account/VerifyEmailTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AMPL",
"bytes": "328"
},
{
"name": "ApacheConf",
"bytes": "22819"
},
{
"name": "Batchfile",
"bytes": "2114"
},
{
"name": "CSS",
"bytes": "345677"
},
{
"name": "FreeMarker",
"bytes": "59645"
},
{
"name": "HTML",
"bytes": "405846"
},
{
"name": "Java",
"bytes": "9388566"
},
{
"name": "JavaScript",
"bytes": "695860"
},
{
"name": "Shell",
"bytes": "11085"
},
{
"name": "XSLT",
"bytes": "82496"
}
],
"symlink_target": ""
} |
package org.openweathermap.api.query.forecast.daily;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.openweathermap.api.query.QueryRequestPartBuilder;
import org.openweathermap.api.query.Type;
@EqualsAndHashCode(callSuper = true)
@Data
public class ByCityName extends AbstractDailyForecastQuery implements DailyForecastQuery {
private final String cityName;
private String countryCode;
private Type type;
public ByCityName(String cityName) {
this.cityName = cityName;
}
@Override
protected String getRequestPart() {
return QueryRequestPartBuilder.byCityName(getCityName(), getCountryCode(), getType());
}
}
| {
"content_hash": "3d00075261ce465139e6e8c978ce9ae5",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 94,
"avg_line_length": 27.08,
"alnum_prop": 0.7651403249630724,
"repo_name": "xSAVIKx/openweathermap-java-api",
"id": "8a86d9af46921efc928211b418f531ce6448f978",
"size": "1279",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "api-query/src/main/java/org/openweathermap/api/query/forecast/daily/ByCityName.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "760"
},
{
"name": "Groovy",
"bytes": "42992"
},
{
"name": "Java",
"bytes": "73943"
},
{
"name": "Shell",
"bytes": "906"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.