file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
test_filename_parser.py | # emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the NiBabel package for the
# copyright and license terms.
#
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
''' Tests for filename container '''
from ..filename_parser import (types_filenames, TypesFilenamesError,
parse_filename, splitext_addext)
from nose.tools import (assert_equal, assert_true, assert_false,
assert_raises)
def test_filenames():
types_exts = (('image', '.img'), ('header', '.hdr'))
for t_fname in ('test.img', 'test.hdr', 'test', 'test.'):
tfns = types_filenames(t_fname, types_exts)
assert_equal(tfns,
{'image': 'test.img',
'header': 'test.hdr'})
# enforcing extensions raises an error for bad extension
assert_raises(TypesFilenamesError,
types_filenames,
'test.funny',
types_exts)
# If not enforcing extensions, it does the best job it can,
# assuming the passed filename is for the first type (in this case
# 'image')
tfns = types_filenames('test.funny', types_exts,
enforce_extensions=False)
assert_equal(tfns,
{'header': 'test.hdr',
'image': 'test.funny'})
# .gz and .bz2 suffixes to extensions, by default, are removed
# before extension checking etc, and then put back onto every
# returned filename.
tfns = types_filenames('test.img.gz', types_exts)
assert_equal(tfns,
{'header': 'test.hdr.gz',
'image': 'test.img.gz'})
tfns = types_filenames('test.img.bz2', types_exts)
assert_equal(tfns,
{'header': 'test.hdr.bz2',
'image': 'test.img.bz2'})
# of course, if we don't know about e.g. gz, and enforce_extensions
# is on, we get an errror
assert_raises(TypesFilenamesError,
types_filenames,
'test.img.gz',
types_exts, ())
# if we don't know about .gz extension, and not enforcing, then we
# get something a bit odd
tfns = types_filenames('test.img.gz', types_exts,
trailing_suffixes=(),
enforce_extensions=False)
assert_equal(tfns,
{'header': 'test.img.hdr',
'image': 'test.img.gz'})
# the suffixes we remove and replaces can be any suffixes.
tfns = types_filenames('test.img.bzr', types_exts, ('.bzr',))
assert_equal(tfns,
{'header': 'test.hdr.bzr',
'image': 'test.img.bzr'})
# If we specifically pass the remove / replace suffixes, then we
# don't remove / replace the .gz and .bz2, unless they are passed
# specifically.
tfns = types_filenames('test.img.bzr', types_exts,
trailing_suffixes=('.bzr',),
enforce_extensions=False)
assert_equal(tfns,
{'header': 'test.hdr.bzr',
'image': 'test.img.bzr'})
# but, just .gz or .bz2 as extension gives an error, if enforcing is on
assert_raises(TypesFilenamesError,
types_filenames,
'test.gz',
types_exts)
assert_raises(TypesFilenamesError,
types_filenames,
'test.bz2',
types_exts)
# if enforcing is off, it tries to work out what the other files
# should be assuming the passed filename is of the first input type
tfns = types_filenames('test.gz', types_exts,
enforce_extensions=False)
assert_equal(tfns,
{'image': 'test.gz',
'header': 'test.hdr.gz'})
# case (in)sensitivity, and effect of uppercase, lowercase
tfns = types_filenames('test.IMG', types_exts)
assert_equal(tfns,
{'image': 'test.IMG',
'header': 'test.HDR'})
tfns = types_filenames('test.img',
(('image', '.IMG'), ('header', '.HDR')))
assert_equal(tfns,
{'header': 'test.hdr',
'image': 'test.img'})
tfns = types_filenames('test.IMG.Gz', types_exts)
assert_equal(tfns,
{'image': 'test.IMG.Gz',
'header': 'test.HDR.Gz'})
def test_parse_filename():
types_exts = (('t1', 'ext1'), ('t2', 'ext2'))
exp_in_outs = (
(('/path/fname.funny', ()),
('/path/fname', '.funny', None, None)),
(('/path/fnameext2', ()),
('/path/fname', 'ext2', None, 't2')),
(('/path/fnameext2', ('.gz',)),
('/path/fname', 'ext2', None, 't2')),
(('/path/fnameext2.gz', ('.gz',)),
('/path/fname', 'ext2', '.gz', 't2'))
)
for inps, exps in exp_in_outs:
pth, sufs = inps
res = parse_filename(pth, types_exts, sufs)
assert_equal(res, exps)
upth = pth.upper()
uexps = (exps[0].upper(), exps[1].upper(),
exps[2].upper() if exps[2] else None,
exps[3])
res = parse_filename(upth, types_exts, sufs)
assert_equal(res, uexps)
# test case sensitivity
res = parse_filename('/path/fnameext2.GZ',
types_exts,
('.gz',), False) # case insensitive again
assert_equal(res, ('/path/fname', 'ext2', '.GZ', 't2'))
res = parse_filename('/path/fnameext2.GZ',
types_exts,
('.gz',), True) # case sensitive
assert_equal(res, ('/path/fnameext2', '.GZ', None, None))
res = parse_filename('/path/fnameEXT2.gz',
types_exts,
('.gz',), False) # case insensitive
assert_equal(res, ('/path/fname', 'EXT2', '.gz', 't2'))
res = parse_filename('/path/fnameEXT2.gz',
types_exts,
('.gz',), True) # case sensitive
assert_equal(res, ('/path/fnameEXT2', '', '.gz', None))
def test_splitext_addext():
res = splitext_addext('fname.ext.gz')
assert_equal(res, ('fname', '.ext', '.gz'))
res = splitext_addext('fname.ext')
assert_equal(res, ('fname', '.ext', ''))
res = splitext_addext('fname.ext.foo', ('.foo', '.bar'))
assert_equal(res, ('fname', '.ext', '.foo'))
res = splitext_addext('fname.ext.FOO', ('.foo', '.bar'))
assert_equal(res, ('fname', '.ext', '.FOO'))
# case sensitive
res = splitext_addext('fname.ext.FOO', ('.foo', '.bar'), True)
assert_equal(res, ('fname.ext', '.FOO', ''))
# edge cases
res = splitext_addext('.nii')
assert_equal(res, ('', '.nii', ''))
res = splitext_addext('...nii')
assert_equal(res, ('..', '.nii', ''))
res = splitext_addext('.')
assert_equal(res, ('.', '', ''))
res = splitext_addext('..')
assert_equal(res, ('..', '', '')) | assert_equal(res, ('...', '', '')) | res = splitext_addext('...') |
memory_test.go | package memory_test
import (
"testing"
"github.com/filecoin-project/go-indexer-core/store/memory"
"github.com/filecoin-project/go-indexer-core/store/test"
)
func TestE2E(t *testing.T) {
s := memory.New()
test.E2ETest(t, s)
}
func | (t *testing.T) {
s := memory.New()
test.ParallelUpdateTest(t, s)
}
func TestRemove(t *testing.T) {
s := memory.New()
test.RemoveTest(t, s)
}
func TestRemoveProviderContext(t *testing.T) {
s := memory.New()
test.RemoveProviderContextTest(t, s)
}
func TestRemoveProvider(t *testing.T) {
s := memory.New()
test.RemoveProviderTest(t, s)
}
| TestParallel |
Component.js | import clsx from "clsx";
import React from "react";
import CircularProgress from "@mui/material/CircularProgress";
import Paper from "@mui/material/Paper";
import useStyles from "./styles";
function Loading({ size, withoutBackground }) {
const classes = useStyles();
return (
<Paper
elevation={0}
square={true}
className={clsx(classes.preloader, withoutBackground && classes.open)}
>
<CircularProgress thickness={1.5} color="inherit" size={size} />
</Paper>
);
}
Loading.defaultProps = {
size: 50,
};
| export default Loading; |
|
blame_file.rs | use super::{
utils, visibility_blocking, CommandBlocking, CommandInfo,
Component, DrawableComponent, EventState,
};
use crate::{
components::{utils::string_width_align, ScrollType},
keys::SharedKeyConfig,
queue::{InternalEvent, Queue},
strings,
ui::{self, style::SharedTheme},
};
use anyhow::Result;
use asyncgit::{
sync::{BlameHunk, CommitId, FileBlame},
AsyncBlame, AsyncGitNotification, BlameParams,
};
use crossbeam_channel::Sender;
use crossterm::event::Event;
use std::convert::TryInto;
use tui::{
backend::Backend,
layout::{Constraint, Rect},
symbols::line::VERTICAL,
text::Span,
widgets::{Block, Borders, Cell, Clear, Row, Table, TableState},
Frame,
};
pub struct | {
title: String,
theme: SharedTheme,
queue: Queue,
async_blame: AsyncBlame,
visible: bool,
file_path: Option<String>,
file_blame: Option<FileBlame>,
table_state: std::cell::Cell<TableState>,
key_config: SharedKeyConfig,
current_height: std::cell::Cell<usize>,
}
static NO_COMMIT_ID: &str = "0000000";
static NO_AUTHOR: &str = "<no author>";
static MIN_AUTHOR_WIDTH: usize = 3;
static MAX_AUTHOR_WIDTH: usize = 20;
fn get_author_width(width: usize) -> usize {
(width.saturating_sub(19) / 3)
.clamp(MIN_AUTHOR_WIDTH, MAX_AUTHOR_WIDTH)
}
const fn number_of_digits(number: usize) -> usize {
let mut rest = number;
let mut result = 0;
while rest > 0 {
rest /= 10;
result += 1;
}
result
}
impl DrawableComponent for BlameFileComponent {
fn draw<B: Backend>(
&self,
f: &mut Frame<B>,
area: Rect,
) -> Result<()> {
if self.is_visible() {
let title = self.get_title();
let rows = self.get_rows(area.width.into());
let author_width = get_author_width(area.width.into());
let constraints = [
// commit id
Constraint::Length(7),
// commit date
Constraint::Length(10),
// commit author
Constraint::Length(author_width.try_into()?),
// line number and vertical bar
Constraint::Length(
(self.get_line_number_width().saturating_add(1))
.try_into()?,
),
// the source code line
Constraint::Percentage(100),
];
let number_of_rows = rows.len();
let table = Table::new(rows)
.widths(&constraints)
.column_spacing(1)
.highlight_style(self.theme.text(true, true))
.block(
Block::default()
.borders(Borders::ALL)
.title(Span::styled(
title,
self.theme.title(true),
))
.border_style(self.theme.block(true)),
);
let mut table_state = self.table_state.take();
f.render_widget(Clear, area);
f.render_stateful_widget(table, area, &mut table_state);
ui::draw_scrollbar(
f,
area,
&self.theme,
// April 2021: `draw_scrollbar` assumes that the last parameter
// is `scroll_top`. Therefore, it subtracts the area’s height
// before calculating the position of the scrollbar. To account
// for that, we add the current height.
number_of_rows + (area.height as usize),
// April 2021: we don’t have access to `table_state.offset`
// (it’s private), so we use `table_state.selected()` as a
// replacement.
//
// Other widgets, for example `BranchListComponent`, manage
// scroll state themselves and use `self.scroll_top` in this
// situation.
//
// There are plans to change `render_stateful_widgets`, so this
// might be acceptable as an interim solution.
//
// https://github.com/fdehau/tui-rs/issues/448
table_state.selected().unwrap_or(0),
);
self.table_state.set(table_state);
self.current_height.set(area.height.into());
}
Ok(())
}
}
impl Component for BlameFileComponent {
fn commands(
&self,
out: &mut Vec<CommandInfo>,
force_all: bool,
) -> CommandBlocking {
if self.is_visible() || force_all {
out.push(
CommandInfo::new(
strings::commands::close_popup(&self.key_config),
true,
true,
)
.order(1),
);
out.push(
CommandInfo::new(
strings::commands::scroll(&self.key_config),
true,
self.file_blame.is_some(),
)
.order(1),
);
out.push(
CommandInfo::new(
strings::commands::commit_details_open(
&self.key_config,
),
true,
self.file_blame.is_some(),
)
.order(1),
);
}
visibility_blocking(self)
}
fn event(
&mut self,
event: crossterm::event::Event,
) -> Result<EventState> {
if self.is_visible() {
if let Event::Key(key) = event {
if key == self.key_config.exit_popup {
self.hide();
} else if key == self.key_config.move_up {
self.move_selection(ScrollType::Up);
} else if key == self.key_config.move_down {
self.move_selection(ScrollType::Down);
} else if key == self.key_config.shift_up
|| key == self.key_config.home
{
self.move_selection(ScrollType::Home);
} else if key == self.key_config.shift_down
|| key == self.key_config.end
{
self.move_selection(ScrollType::End);
} else if key == self.key_config.page_down {
self.move_selection(ScrollType::PageDown);
} else if key == self.key_config.page_up {
self.move_selection(ScrollType::PageUp);
} else if key == self.key_config.focus_right {
self.hide();
return self.selected_commit().map_or(
Ok(EventState::NotConsumed),
|id| {
self.queue.push(
InternalEvent::InspectCommit(
id, None,
),
);
Ok(EventState::Consumed)
},
);
}
return Ok(EventState::Consumed);
}
}
Ok(EventState::NotConsumed)
}
fn is_visible(&self) -> bool {
self.visible
}
fn hide(&mut self) {
self.visible = false;
}
fn show(&mut self) -> Result<()> {
self.visible = true;
Ok(())
}
}
impl BlameFileComponent {
///
pub fn new(
queue: &Queue,
sender: &Sender<AsyncGitNotification>,
title: &str,
theme: SharedTheme,
key_config: SharedKeyConfig,
) -> Self {
Self {
title: String::from(title),
theme,
async_blame: AsyncBlame::new(sender),
queue: queue.clone(),
visible: false,
file_path: None,
file_blame: None,
table_state: std::cell::Cell::new(TableState::default()),
key_config,
current_height: std::cell::Cell::new(0),
}
}
///
pub fn open(&mut self, file_path: &str) -> Result<()> {
self.file_path = Some(file_path.into());
self.file_blame = None;
self.table_state.get_mut().select(Some(0));
self.show()?;
self.update()?;
Ok(())
}
///
pub fn any_work_pending(&self) -> bool {
self.async_blame.is_pending()
}
///
pub fn update_git(
&mut self,
event: AsyncGitNotification,
) -> Result<()> {
if self.is_visible() && event == AsyncGitNotification::Blame {
self.update()?;
}
Ok(())
}
fn update(&mut self) -> Result<()> {
if self.is_visible() {
if let Some(file_path) = &self.file_path {
let blame_params = BlameParams {
file_path: file_path.into(),
};
if let Some((
previous_blame_params,
last_file_blame,
)) = self.async_blame.last()?
{
if previous_blame_params == blame_params {
self.file_blame = Some(last_file_blame);
return Ok(());
}
}
self.async_blame.request(blame_params)?;
}
}
Ok(())
}
///
fn get_title(&self) -> String {
match (
self.any_work_pending(),
self.file_path.as_ref(),
self.file_blame.as_ref(),
) {
(true, Some(file_path), _) => {
format!(
"{} -- {} -- <calculating.. (who is to blame?)>",
self.title, file_path
)
}
(false, Some(file_path), Some(file_blame)) => {
format!(
"{} -- {} -- {}",
self.title,
file_path,
file_blame.commit_id.get_short_string()
)
}
(false, Some(file_path), None) => {
format!(
"{} -- {} -- <no blame available>",
self.title, file_path
)
}
_ => format!("{} -- <no blame available>", self.title),
}
}
///
fn get_rows(&self, width: usize) -> Vec<Row> {
self.file_blame
.as_ref()
.map_or_else(Vec::new, |file_blame| {
file_blame
.lines
.iter()
.enumerate()
.map(|(i, (blame_hunk, line))| {
self.get_line_blame(
width,
i,
(blame_hunk.as_ref(), line.as_ref()),
file_blame,
)
})
.collect()
})
}
fn get_line_blame(
&self,
width: usize,
line_number: usize,
hunk_and_line: (Option<&BlameHunk>, &str),
file_blame: &FileBlame,
) -> Row {
let (hunk_for_line, line) = hunk_and_line;
let show_metadata = if line_number == 0 {
true
} else {
let hunk_for_previous_line =
&file_blame.lines[line_number - 1];
match (hunk_for_previous_line, hunk_for_line) {
((Some(previous), _), Some(current)) => {
previous.commit_id != current.commit_id
}
_ => true,
}
};
let mut cells = if show_metadata {
self.get_metadata_for_line_blame(width, hunk_for_line)
} else {
vec![Cell::from(""), Cell::from(""), Cell::from("")]
};
let line_number_width = self.get_line_number_width();
cells.push(
Cell::from(format!(
"{:>line_number_width$}{}",
line_number,
VERTICAL,
line_number_width = line_number_width,
))
.style(self.theme.text(true, false)),
);
cells.push(
Cell::from(String::from(line))
.style(self.theme.text(true, false)),
);
Row::new(cells)
}
fn get_metadata_for_line_blame(
&self,
width: usize,
blame_hunk: Option<&BlameHunk>,
) -> Vec<Cell> {
let commit_hash = blame_hunk.map_or_else(
|| NO_COMMIT_ID.into(),
|hunk| hunk.commit_id.get_short_string(),
);
let author_width = get_author_width(width);
let truncated_author: String = blame_hunk.map_or_else(
|| NO_AUTHOR.into(),
|hunk| string_width_align(&hunk.author, author_width),
);
let author = format!(
"{:author_width$}",
truncated_author,
author_width = MAX_AUTHOR_WIDTH
);
let time = blame_hunk.map_or_else(
|| "".into(),
|hunk| utils::time_to_string(hunk.time, true),
);
let is_blamed_commit = self
.file_blame
.as_ref()
.and_then(|file_blame| {
blame_hunk.map(|hunk| {
file_blame.commit_id == hunk.commit_id
})
})
.unwrap_or(false);
vec![
Cell::from(commit_hash).style(
self.theme.commit_hash_in_blame(is_blamed_commit),
),
Cell::from(time).style(self.theme.commit_time(false)),
Cell::from(author).style(self.theme.commit_author(false)),
]
}
fn get_max_line_number(&self) -> usize {
self.file_blame
.as_ref()
.map_or(0, |file_blame| file_blame.lines.len() - 1)
}
fn get_line_number_width(&self) -> usize {
let max_line_number = self.get_max_line_number();
number_of_digits(max_line_number)
}
fn move_selection(&mut self, scroll_type: ScrollType) -> bool {
let mut table_state = self.table_state.take();
let old_selection = table_state.selected().unwrap_or(0);
let max_selection = self.get_max_line_number();
let new_selection = match scroll_type {
ScrollType::Up => old_selection.saturating_sub(1),
ScrollType::Down => {
old_selection.saturating_add(1).min(max_selection)
}
ScrollType::Home => 0,
ScrollType::End => max_selection,
ScrollType::PageUp => old_selection.saturating_sub(
self.current_height.get().saturating_sub(2),
),
ScrollType::PageDown => old_selection
.saturating_add(
self.current_height.get().saturating_sub(2),
)
.min(max_selection),
};
let needs_update = new_selection != old_selection;
table_state.select(Some(new_selection));
self.table_state.set(table_state);
needs_update
}
fn selected_commit(&self) -> Option<CommitId> {
self.file_blame.as_ref().and_then(|file_blame| {
let table_state = self.table_state.take();
let commit_id =
table_state.selected().and_then(|selected| {
file_blame.lines[selected]
.0
.as_ref()
.map(|hunk| hunk.commit_id)
});
self.table_state.set(table_state);
commit_id
})
}
}
| BlameFileComponent |
WelcomeName.tsx | import { useEffect, useState } from "react";
import { useMsal, useAccount } from "@azure/msal-react";
import Typography from "@material-ui/core/Typography";
const WelcomeName = () => {
const { accounts } = useMsal();
const account = useAccount(accounts[0] || {});
const [name, setName] = useState("");
useEffect(() => {
if (account && account.name) {
setName(account.name.split(" ")[0]);
}
}, [account]);
if (name) {
return <Typography variant="h6">Welcome, {name}</Typography>;
} else {
return null;
}
};
export default WelcomeName; | ||
useMounted.ts | import { useCallback, useEffect, useRef } from 'react';
export default function | () {
const mountedRef = useRef<boolean>(false);
useEffect(() => {
mountedRef.current = true;
return () => {
mountedRef.current = false;
};
}, []);
return useCallback(() => mountedRef.current, []);
}
| useMounted |
LanguageDialog.ts | namespace SerenitySkins.Administration {
@Serenity.Decorators.registerClass()
export class La | xtends Serenity.EntityDialog<LanguageRow, any> {
protected getFormKey() { return LanguageForm.formKey; }
protected getIdProperty() { return LanguageRow.idProperty; }
protected getLocalTextPrefix() { return LanguageRow.localTextPrefix; }
protected getNameProperty() { return LanguageRow.nameProperty; }
protected getService() { return LanguageService.baseUrl; }
protected form = new LanguageForm(this.idPrefix);
}
} | nguageDialog e |
appointments.component.ts | import { Component, ElementRef, OnDestroy, OnInit, ViewChild } from '@angular/core';
import { BasePageComponent } from '../../base-page';
import { FormBuilder, FormGroup, Validators } from '@angular/forms';
import { Store } from '@ngrx/store';
import { IAppState } from '../../../interfaces/app-state';
import { HttpService } from '../../../services/http/http.service';
import { TCModalService } from '../../../ui/services/modal/modal.service';
import { IUser } from '../../../ui/interfaces/user';
@Component({
selector: 'page-appointments',
templateUrl: './appointments.component.html',
styleUrls: ['./appointments.component.scss']
})
export class PageAppointmentsComponent extends BasePageComponent implements OnInit, OnDestroy {
@ViewChild('modalBody') modalBody: ElementRef<any>;
@ViewChild('modalFooter') modalFooter: ElementRef<any>;
appointments: any[];
appointmentForm: FormGroup;
currentAvatar: string | ArrayBuffer;
defaultAvatar: string;
doctors: IUser[];
constructor(
store: Store<IAppState>, | ) {
super(store, httpSv);
this.pageData = {
title: 'Appointments',
breadcrumbs: [
{
title: 'Medicine',
route: 'default-dashboard'
},
{
title: 'Appointments'
}
]
};
this.appointments = [];
this.doctors = [];
this.defaultAvatar = 'assets/content/anonymous-400.jpg';
this.currentAvatar = this.defaultAvatar;
}
ngOnInit() {
super.ngOnInit();
this.getData('assets/data/appointments.json', 'appointments', 'setLoaded');
this.getData('assets/data/doctors.json', 'doctors');
}
ngOnDestroy() {
super.ngOnDestroy();
}
// open modal window
openModal(body: any, header: any = null, footer: any = null, data: any = null) {
this.initForm(data);
this.modal.open({
body: body,
header: header,
footer: footer
});
}
// close modal window
closeModal() {
this.modal.close();
this.appointmentForm.reset();
}
// init form
initForm(data: any) {
this.appointmentForm = this.formBuilder.group({
img: [(data ? data.img : this.currentAvatar)],
name: [(data ? data.name : ''), Validators.required],
email: [(data ? data.email : ''), Validators.required],
date: [(data ? data.date : ''), Validators.required],
from: [(data ? data.fromTo.substring(0, (data.fromTo.indexOf('-') - 1)) : ''), Validators.required],
to: [(data ? data.fromTo.substring((data.fromTo.indexOf('-') + 2), data.fromTo.length) : ''), Validators.required],
number: [(data ? data.number : ''), Validators.required],
doctor: [(data ? data.doctor : ''), Validators.required],
injury: [(data ? data.injury : ''), Validators.required]
});
}
// upload new file
onFileChanged(inputValue: any) {
let file: File = inputValue.target.files[0];
let reader: FileReader = new FileReader();
reader.onloadend = () => {
this.currentAvatar = reader.result;
};
reader.readAsDataURL(file);
}
// edit appointment
edit(row: any) {
this.openModal(this.modalBody, 'Add appointment', this.modalFooter, row);
}
// remove appointment
remove(tableRow: any) {
this.appointments = this.appointments.filter(row => row !== tableRow);
}
// add new appointment
addAppointment(form: FormGroup) {
if (form.valid) {
let newAppointment: any = form.value;
newAppointment.fromTo = `${form.value.from} - ${form.value.to}`;
newAppointment.img = this.currentAvatar;
delete newAppointment.from;
delete newAppointment.to;
this.appointments.unshift(newAppointment);
let newTableData = JSON.parse(JSON.stringify(this.appointments));
this.appointments = newTableData;
this.closeModal();
}
}
} | httpSv: HttpService,
private modal: TCModalService,
private formBuilder: FormBuilder |
no_subscriber.rs | use criterion::{black_box, criterion_group, criterion_main, Criterion};
use tracing::Level;
fn bench_no_subscriber(c: &mut Criterion) {
use std::sync::atomic::{AtomicUsize, Ordering};
let mut group = c.benchmark_group("no_subscriber");
group.bench_function("span", |b| {
b.iter(|| {
black_box(tracing::span!(Level::TRACE, "span"));
})
});
group.bench_function("event", |b| {
b.iter(|| {
tracing::event!(Level::TRACE, "hello");
})
});
group.bench_function("relaxed_load", |b| {
let foo = AtomicUsize::new(1);
b.iter(|| black_box(foo.load(Ordering::Relaxed)));
});
group.bench_function("acquire_load", |b| {
let foo = AtomicUsize::new(1);
b.iter(|| black_box(foo.load(Ordering::Acquire)))
});
group.bench_function("log", |b| {
b.iter(|| {
log::log!(log::Level::Info, "log");
})
});
group.finish();
}
fn bench_fields(c: &mut Criterion) {
let mut group = c.benchmark_group("no_subscriber_field");
group.bench_function("span", |b| {
b.iter(|| {
black_box(tracing::span!(
Level::TRACE,
"span", | });
group.bench_function("event", |b| {
b.iter(|| {
tracing::event!(
Level::TRACE,
foo = tracing::field::display(format!("bar {:?}", 2))
);
})
});
group.bench_function("log", |b| {
b.iter(|| log::log!(log::Level::Trace, "{}", format!("bar {:?}", 2)))
});
group.finish();
}
criterion_group!(benches, bench_no_subscriber, bench_fields);
criterion_main!(benches); | foo = tracing::field::display(format!("bar {:?}", 2))
));
}) |
pr.js | /* global done:false */
/* global error:false */
/* global PaymentRequest:false */
/**
* Updates the details based on the selected address.
* @param {object} details - The current details to update.
* @param {PaymentAddress} addr - The address selected by the user.
* @return {object} The updated details.
*/
function updateDetails(details, addr) {
if (addr.country === 'US') {
delete details.error;
var shippingOption = {
id: '',
label: '',
amount: {
currency: 'USD',
value: '0.00',
},
selected: true,
};
if (addr.region === 'CA') {
shippingOption.id = 'ca';
shippingOption.label = 'Free shipping in California';
details.total.amount.value = '55.00';
} else {
shippingOption.id = 'us';
shippingOption.label = 'Standard shipping in US';
shippingOption.amount.value = '5.00';
details.total.amount.value = '60.00';
}
details.displayItems.splice(1, 1, shippingOption);
details.shippingOptions = [shippingOption];
} else {
delete details.shippingOptions;
details.error = 'Cannot ship outside of US.';
}
return details;
}
/**
* Launches payment request that provides different shipping options based on
* the shipping address that the user selects.
*/
function | () {
// eslint-disable-line no-unused-vars
var supportedInstruments = [
{
supportedMethods: 'https://google.com/pay',
data: {
allowedPaymentMethods: ['TOKENIZED_CARD', 'CARD'],
apiVersion: 1,
cardRequirements: {
'allowedCardNetworks': ['VISA', 'MASTERCARD', 'AMEX'],
},
merchantName: 'Rouslan Solomakhin',
merchantId: '00184145120947117657',
paymentMethodTokenizationParameters: {
tokenizationType: 'GATEWAY_TOKEN',
parameters: {
'gateway': 'stripe',
'stripe:publishableKey': 'pk_live_lNk21zqKM2BENZENh3rzCUgo',
'stripe:version': '2016-07-06',
},
},
},
},
{
supportedMethods: 'basic-card',
},
];
var pendingDetails = {
total: {
label: 'Calculating...',
amount: {
currency: 'USD',
value: '0.00',
},
},
};
var options = {
requestShipping: true,
};
if (!window.PaymentRequest) {
error('PaymentRequest API is not supported.');
return;
}
try {
var request = new PaymentRequest(
supportedInstruments,
pendingDetails,
options,
);
var spinner = document.createElement('i');
spinner.classList = 'fa fa-refresh fa-spin';
var button = document.getElementById('buyButton');
button.appendChild(spinner);
request
.show(
new Promise(function(resolveShowPromise) {
info('Calculating final price...');
window.setTimeout(function() {
button.removeChild(spinner);
info('The final price is USD $55.00.');
var details = {
total: {
label: 'Donation',
amount: {
currency: 'USD',
value: '55.00',
},
},
displayItems: [
{
label: 'Original donation amount',
amount: {
currency: 'USD',
value: '65.00',
},
},
{
label: 'Pending shipping price',
amount: {
currency: 'USD',
value: '0.00',
},
pending: true,
},
{
label: 'Friends and family discount',
amount: {
currency: 'USD',
value: '-10.00',
},
},
],
};
request.addEventListener('shippingaddresschange', function(e) {
e.updateWith(
new Promise(function(resolveShippingAddressChange) {
window.setTimeout(function() {
resolveShippingAddressChange(
updateDetails(details, request.shippingAddress),
);
}, 2000);
}),
);
});
resolveShowPromise(details);
}, 5000); // 5 seconds
}),
)
.then(function(instrumentResponse) {
window.setTimeout(function() {
instrumentResponse
.complete('success')
.then(function() {
done(
'This is a demo website. No payment will be processed.',
instrumentResponse,
);
})
.catch(function(err) {
error(err);
});
}, 2000);
})
.catch(function(err) {
error(err);
});
} catch (e) {
error("Developer mistake: '" + e.message + "'");
}
}
| onBuyClicked |
test402.js | var callbackArguments = [];
var argument1 = function callback(a,b,c,d) {
callbackArguments.push(JSON.stringify(arguments))
argument1[2.1869172855352885e+307] = 122
return a+b-c-d
};
var argument2 = function callback(a,b,c,d) {
callbackArguments.push(JSON.stringify(arguments))
argument2[9] = {"25":1.3580484526687759e+308,"157":"","655":"","843":82,"":"","5e-324":5.034160658305139e+307,"8.462627616720447e+307":1.1315952725967238e+308,"1.537448068198547e+308":""}
return a+b+c+d
};
var argument3 = false;
var argument4 = {"460":"N","595":-100,"":3.4203197471313544e+307,"T":655,"MEg%>^[c_<;":"q)","9G":9.645797075099021e+307};
var argument5 = 1.517678662922813e+308;
var argument6 = false;
var argument7 = r_0;
var argument8 = function callback(a,b,c,d) {
callbackArguments.push(JSON.stringify(arguments))
argument6[3] = null
base_2[1][3] = null
return a+b*c+d
};
var argument9 = function callback(a,b,c,d) {
callbackArguments.push(JSON.stringify(arguments))
argument9[8] = {"49":"F","714":"","8.67486099559871e+307":"g","":"0sdwj","2L+":893}
return a-b-c/d
};
var base_0 = ["X","#3"]
var r_0= undefined
try {
r_0 = base_0.reduce(argument1)
}
catch(e) {
r_0= "Error"
}
var base_1 = ["X","#3"]
var r_1= undefined
try {
r_1 = base_1.reduce(argument2,argument3,argument4)
}
catch(e) {
r_1= "Error"
}
var base_2 = ["X","#3"]
var r_2= undefined
try {
r_2 = base_2.reduce(argument5,argument6,argument7,argument8)
}
catch(e) {
r_2= "Error"
}
var base_3 = ["X","#3"]
var r_3= undefined
try {
r_3 = base_3.reduce(argument9)
}
catch(e) {
r_3= "Error"
}
function serialize(array){
return array.map(function(a){
if (a === null || a == undefined) return a;
var name = a.constructor.name;
if (name==='Object' || name=='Boolean'|| name=='Array'||name=='Number'||name=='String')
return JSON.stringify(a);
return name;
});
}
setTimeout(function(){ | },300) | require("fs").writeFileSync("./experiments/reduce/reduceGen/test402.json",JSON.stringify({"baseObjects":serialize([base_0,base_1,base_2,base_3]),"returnObjects":serialize([r_0,r_1,r_2,r_3]),"callbackArgs":callbackArguments})) |
lib.rs | #[macro_use]
extern crate vst;
use std::sync::Arc;
use vst::api::{Events, Supported};
use vst::buffer::AudioBuffer;
use vst::event::Event;
use vst::plugin::{CanDo, Category, HostCallback, Info, Plugin, PluginParameters};
use vst::util::AtomicFloat;
use std::f32::consts::PI;
pub const TAU: f32 = PI * 2.0;
// struct Parameters {
// sample_rate: AtomicFloat,
// }
// impl Default for Parameters {
// fn default() -> Parameters {
// Parameters {
// sample_rate: AtomicFloat::new(44100.),
// }
// }
// }
// impl PluginParameters for Parameters {
// // get_parameter has to return the value used in set_parameter
// fn get_parameter(&self, index: i32) -> f32 {
// match index {
// 0 => self.get_cutoff(),
// 1 => self.res.get() / 4.,
// 2 => self.pole_value.get(),
// 3 => self.drive.get() / 5.,
// _ => 0.0,
// }
// }
// fn set_parameter(&self, index: i32, value: f32) {
// match index {
// 0 => self.set_cutoff(value),
// 1 => self.res.set(value * 4.),
// 2 => self.set_poles(value),
// 3 => self.drive.set(value * 5.),
// _ => (),
// }
// }
// fn get_parameter_name(&self, index: i32) -> String {
// match index {
// 0 => "cutoff".to_string(),
// 1 => "resonance".to_string(),
// 2 => "filter order".to_string(),
// 3 => "drive".to_string(),
// _ => "".to_string(),
// }
// }
// fn get_parameter_label(&self, index: i32) -> String {
// match index {
// 0 => "Hz".to_string(),
// 1 => "%".to_string(),
// 2 => "poles".to_string(),
// 3 => "%".to_string(),
// _ => "".to_string(),
// }
// }
// // This is what will display underneath our control. We can
// // format it into a string that makes the most sense.
// fn get_parameter_text(&self, index: i32) -> String {
// match index {
// 0 => format!("{:.0}", self.cutoff.get()),
// 1 => format!("{:.3}", self.res.get()),
// 2 => format!("{}", self.poles.load(Ordering::Relaxed) + 1),
// 3 => format!("{:.3}", self.drive.get()),
// _ => format!(""),
// }
// }
// }
#[derive(Default)]
struct VocalSynth {
// params: Arc<Parameters>,
sample_rate: f32,
time: f32,
note_duration: f32,
note: Option<u8>,
}
impl VocalSynth {
fn time_per_sample(&self) -> f32 {
1.0 / self.sample_rate
}
/// Process an incoming midi event.
///
/// The midi data is split up like so:
///
/// `data[0]`: Contains the status and the channel. Source: [source]
/// `data[1]`: Contains the supplemental data for the message - so, if this was a NoteOn then
/// this would contain the note.
/// `data[2]`: Further supplemental data. Would be velocity in the case of a NoteOn message.
///
/// [source]: http://www.midimountain.com/midi/midi_status.htm
fn process_midi_event(&mut self, data: [u8; 3]) |
fn note_on(&mut self, note: u8) {
self.note_duration = 0.0;
self.note = Some(note)
}
fn note_off(&mut self, note: u8) {
if self.note == Some(note) {
self.note = None
}
}
}
impl Plugin for VocalSynth {
fn new(_host: HostCallback) -> Self {
VocalSynth {
sample_rate: 44100.0,
note_duration: 0.0,
time: 0.0,
note: None,
}
}
fn get_info(&self) -> Info {
Info {
name: "Viardot".to_string(),
vendor: "Louis Dutton".to_string(),
unique_id: 6667,
category: Category::Synth,
inputs: 1,
outputs: 1,
parameters: 1,
initial_delay: 0,
..Info::default()
}
}
#[allow(unused_variables)]
#[allow(clippy::single_match)]
fn process_events(&mut self, events: &Events) {
for event in events.events() {
match event {
Event::Midi(ev) => self.process_midi_event(ev.data),
// More events can be handled here.
_ => (),
}
}
}
fn set_sample_rate(&mut self, rate: f32) {
self.sample_rate = rate;
}
fn process(&mut self, buffer: &mut AudioBuffer<f32>) {
let samples = buffer.samples();
let (_, mut outputs) = buffer.split();
let output_count = outputs.len();
let per_sample = self.time_per_sample();
let mut output_sample;
for sample_idx in 0..samples {
let time = self.time;
let note_duration = self.note_duration;
if let Some(current_note) = self.note {
let signal = (time * midi_pitch_to_freq(current_note) * TAU).sin();
// Apply a quick envelope to the attack of the signal to avoid popping.
let attack = 0.5;
let alpha = if note_duration < attack {
note_duration / attack
} else {
1.0
};
output_sample = (signal * alpha) as f32;
self.time += per_sample;
self.note_duration += per_sample;
} else {
output_sample = 0.0;
}
for buf_idx in 0..output_count {
let buff = outputs.get_mut(buf_idx);
buff[sample_idx] = output_sample;
}
}
}
fn can_do(&self, can_do: CanDo) -> Supported {
match can_do {
CanDo::ReceiveMidiEvent => Supported::Yes,
_ => Supported::Maybe,
}
}
}
plugin_main!(VocalSynth); // Important!
#[cfg(test)]
mod tests {
use crate::midi_pitch_to_freq;
#[test]
fn test_midi_pitch_to_freq() {
for i in 0..127 {
// expect no panics
midi_pitch_to_freq(i);
}
}
}
| {
match data[0] {
128 => self.note_off(data[1]),
144 => self.note_on(data[1]),
_ => (),
}
} |
test_kv_search_driver.py | from typing import Optional, Iterable
import numpy as np
import pytest
from jina import Document, DocumentArray
from jina.drivers.search import KVSearchDriver
from jina.executors.indexers import BaseKVIndexer
from jina.types.ndarray.generic import NdArray
class MockIndexer(BaseKVIndexer):
def add(
self, keys: Iterable[str], values: Iterable[bytes], *args, **kwargs
) -> None:
pass
def query(self, keys: Iterable[str]) -> Optional[bytes]:
values = []
for k in keys:
values.append(self.db.get(k, None))
return values
def get_query_handler(self):
pass
def get_add_handler(self):
pass
def get_create_handler(self):
pass
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.db = {}
doc_ids = ['1', '2', '3', '4']
doc_ids = [item * 16 for item in doc_ids]
for doc_id in doc_ids:
with Document() as doc:
doc.id = doc_id
doc.embedding = np.array([doc.id])
self.db[doc.id] = doc.SerializeToString()
class SimpleKVSearchDriver(KVSearchDriver):
def __init__(self, docs=None, traversal_paths=['r'], *args, **kwargs):
super().__init__(traversal_paths=traversal_paths, *args, **kwargs)
self._docs = docs
@property
def docs(self):
return self._docs
@property
def exec_fn(self):
return self._exec_fn
@pytest.fixture(scope='function')
def document():
# 1-D embedding
# doc: 0
# - chunk: 1
# - chunk: 2
# - chunk: 3
# - chunk: 4
# - chunk: 5 - will be missing from KV indexer
doc = Document()
doc.id = '0' * 16
for c in range(5):
with Document() as chunk:
chunk.id = str(c + 1) * 16
doc.chunks.add(chunk)
return doc
| # 1-D embedding
# doc: 0
# - chunk: 1
# - match: 2
# - match: 3
# - match: 4
# - match: 5 - will be missing from KV indexer
# - match: 6 - will be missing from KV indexer
with Document() as doc:
doc.id = '0' * 16
with Document() as chunk:
chunk.id = '1' * 16
for m in range(5):
with Document() as match:
match.id = str(m + 2) * 16
match.score.value = 1.0
chunk.matches.append(match)
doc.chunks.append(chunk)
return doc
def test_vectorsearch_driver_mock_indexer_apply_all(document):
driver = SimpleKVSearchDriver()
executor = MockIndexer()
driver.attach(executor=executor, runtime=None)
dcs = list(document.chunks)
assert len(dcs) == 5
for chunk in dcs:
assert chunk.embedding is None
driver._apply_all([DocumentArray(document.chunks)])
dcs = list(document.chunks)
# chunk idx: 5 had no matched and is removed as missing idx
assert len(dcs) == 4
for chunk in dcs:
assert chunk.embedding is not None
embedding_array = chunk.embedding
np.testing.assert_equal(embedding_array, np.array([chunk.id]))
def test_vectorsearch_driver_mock_indexer(document):
dcs = list(document.chunks)
assert len(dcs) == 5
for chunk in dcs:
assert chunk.embedding is None
driver = SimpleKVSearchDriver(
docs=DocumentArray([document]), traversal_paths=('c',)
)
executor = MockIndexer()
driver.attach(executor=executor, runtime=None)
driver()
# chunk idx: 5 had no matched and is removed as missing idx
dcs = list(document.chunks)
assert len(dcs) == 4
for chunk in dcs:
assert chunk.embedding is not None
embedding_array = chunk.embedding
np.testing.assert_equal(embedding_array, np.array([chunk.id]))
def test_vectorsearch_driver_mock_indexer_with_matches_on_chunks(
document_with_matches_on_chunks,
):
driver = SimpleKVSearchDriver(
docs=DocumentArray([document_with_matches_on_chunks]), traversal_paths=('cm',)
)
executor = MockIndexer()
driver.attach(executor=executor, runtime=None)
driver()
dcs = list(document_with_matches_on_chunks.chunks)
assert len(dcs) == 1
chunk = dcs[0]
matches = list(chunk.matches)
assert len(matches) == 3
for match in matches:
assert NdArray(match.embedding).value is not None
embedding_array = NdArray(match.embedding).value
np.testing.assert_equal(embedding_array, np.array([match.id])) | @pytest.fixture(scope='function')
def document_with_matches_on_chunks(): |
MainMenu.test.js | /* eslint-disable no-undef */
import { MainMenu } from '../../../../../src/modules/menu/components/mainMenu/MainMenu';
import { createNoInitialStateMainMenuReducer } from '../../../../../src/store/mainMenu/mainMenu.reducer';
import { TabId, toggle } from '../../../../../src/store/mainMenu/mainMenu.action';
import { TestUtils } from '../../../../test-utils';
import { $injector } from '../../../../../src/injection';
import { setTab } from '../../../../../src/store/mainMenu/mainMenu.action';
import { DevInfo } from '../../../../../src/modules/utils/components/devInfo/DevInfo';
import { SearchResultsPanel } from '../../../../../src/modules/search/components/menu/SearchResultsPanel';
import { TopicsContentPanel } from '../../../../../src/modules/topics/components/menu/TopicsContentPanel';
import { createNoInitialStateMediaReducer } from '../../../../../src/store/media/media.reducer';
import { disableResponsiveParameterObservation, enableResponsiveParameterObservation } from '../../../../../src/store/media/media.action';
import { FeatureInfoPanel } from '../../../../../src/modules/featureInfo/components/FeatureInfoPanel';
import { MapsContentPanel } from '../../../../../src/modules/menu/components/mainMenu/content/maps/MapsContentPanel';
import { BvvMiscContentPanel } from '../../../../../src/modules/menu/components/mainMenu/content/misc/BvvMiscContentPanel';
import { TEST_ID_ATTRIBUTE_NAME } from '../../../../../src/utils/markup';
window.customElements.define(MainMenu.tag, MainMenu);
describe('MainMenu', () => {
const setup = (state = {}, config = {}) => {
const { embed = false } = config;
const initialState = {
mainMenu: {
open: true,
tab: null
},
media: {
portrait: false,
minWidth: true,
observeResponsiveParameter: true
},
...state
};
TestUtils.setupStoreAndDi(initialState, {
mainMenu: createNoInitialStateMainMenuReducer(),
media: createNoInitialStateMediaReducer()
});
$injector
.registerSingleton('EnvironmentService', {
isEmbedded: () => embed
})
.registerSingleton('TranslationService', { translate: (key) => key });
return TestUtils.render(MainMenu.tag);
};
describe('when instantiated', () => {
it('has a model containing default values', async () => {
await setup();
const model = new MainMenu().getModel();
expect(model).toEqual({
tab: null,
open: false,
portrait: false,
minWidth: false,
observeResponsiveParameter: false
});
});
it('has static constants', async () => {
expect(MainMenu.SWIPE_DELTA_PX).toBe(50);
expect(MainMenu.INITIAL_WIDTH_EM).toBe(28);
expect(MainMenu.MIN_WIDTH_EM).toBe(28);
expect(MainMenu.MAX_WIDTH_EM).toBe(100);
});
});
describe('responsive layout ', () => {
it('layouts for landscape and width >= 80em', async () => {
const state = {
media: {
portrait: false,
minWidth: true
}
};
const element = await setup(state);
expect(element.shadowRoot.querySelector('.is-landscape')).toBeTruthy();
expect(element.shadowRoot.querySelector('.is-desktop')).toBeTruthy();
expect(element.shadowRoot.querySelector('.main-menu')).toBeTruthy();
});
it('layouts for portrait and width >= 80em', async () => {
const state = {
media: {
portrait: true,
minWidth: true
}
};
const element = await setup(state);
expect(element.shadowRoot.querySelector('.is-portrait')).toBeTruthy();
expect(element.shadowRoot.querySelector('.is-desktop')).toBeTruthy();
expect(element.shadowRoot.querySelector('.main-menu')).toBeTruthy();
});
it('layouts for landscape and width < 80em', async () => {
const state = {
media: {
portrait: false,
minWidth: false
}
};
const element = await setup(state);
expect(element.shadowRoot.querySelector('.is-landscape')).toBeTruthy();
expect(element.shadowRoot.querySelector('.is-tablet')).toBeTruthy();
expect(element.shadowRoot.querySelector('.main-menu')).toBeTruthy();
});
it('layouts for portrait and width < 80em', async () => {
const state = {
media: {
portrait: true,
minWidth: false
}
};
const element = await setup(state);
expect(element.shadowRoot.querySelector('.is-portrait')).toBeTruthy();
expect(element.shadowRoot.querySelector('.is-tablet')).toBeTruthy();
expect(element.shadowRoot.querySelector('.main-menu')).toBeTruthy();
});
});
describe('when initialized', () => {
it('adds a div which holds the main menu and a close button', async () => {
const element = await setup();
expect(element.shadowRoot.querySelector('.main-menu.is-open')).toBeTruthy();
expect(element.shadowRoot.querySelector('.main-menu__close-button')).toBeTruthy();
expect(element.shadowRoot.querySelector('.main-menu__close-button').id).toBe('toggle');
expect(element.shadowRoot.querySelector('.main-menu__close-button').title).toBe('menu_main_open_button');
expect(element.shadowRoot.querySelector('.main-menu__close-button-text').innerText).toBe('menu_main_open_button');
});
it('adds a container for content and shows demo content', async () => {
const element = await setup();
expect(element.shadowRoot.querySelector('.main-menu__container')).toBeTruthy();
expect(element.shadowRoot.querySelector('.main-menu__container').children.length > 0).toBeTrue();
});
it('renders nothing when embedded', async () => {
const element = await setup({}, { embed: true });
expect(element.shadowRoot.children.length).toBe(0);
});
it('renders the content panels', async () => {
const element = await setup();
const contentPanels = element.shadowRoot.querySelectorAll('.tabcontent');
expect(contentPanels.length).toBe(Object.keys(TabId).length);
for (let i = 0; i < contentPanels.length; i++) {
switch (i) {
case TabId.SEARCH:
expect(contentPanels[i].innerHTML.toString().includes(SearchResultsPanel.tag)).toBeTrue();
break;
case TabId.TOPICS:
expect(contentPanels[i].innerHTML.toString().includes(TopicsContentPanel.tag)).toBeTrue();
break;
case TabId.FEATUREINFO:
expect(contentPanels[i].innerHTML.toString().includes(FeatureInfoPanel.tag)).toBeTrue();
break;
case TabId.MAPS:
expect(contentPanels[i].innerHTML.toString().includes(MapsContentPanel.tag)).toBeTrue();
break;
case TabId.MISC:
expect(contentPanels[i].innerHTML.toString().includes(BvvMiscContentPanel.tag)).toBeTrue();
break;
}
}
});
it('contains test-id attributes', async () => {
const element = await setup();
expect(element.shadowRoot.querySelectorAll(`[${TEST_ID_ATTRIBUTE_NAME}]`)).toHaveSize(5);
expect(element.shadowRoot.querySelector(SearchResultsPanel.tag).hasAttribute(TEST_ID_ATTRIBUTE_NAME)).toBeTrue();
expect(element.shadowRoot.querySelector(TopicsContentPanel.tag).hasAttribute(TEST_ID_ATTRIBUTE_NAME)).toBeTrue();
expect(element.shadowRoot.querySelector(FeatureInfoPanel.tag).hasAttribute(TEST_ID_ATTRIBUTE_NAME)).toBeTrue();
expect(element.shadowRoot.querySelector(MapsContentPanel.tag).hasAttribute(TEST_ID_ATTRIBUTE_NAME)).toBeTrue();
expect(element.shadowRoot.querySelector(BvvMiscContentPanel.tag).hasAttribute(TEST_ID_ATTRIBUTE_NAME)).toBeTrue();
});
it('display the content panel for default index = 0', async () => {
const element = await setup();
const contentPanels = element.shadowRoot.querySelectorAll('.tabcontent');
expect(contentPanels.length).toBe(Object.keys(TabId).length);
for (let i = 0; i < contentPanels.length; i++) {
expect(contentPanels[i].classList.contains('is-active')).toBe(Object.values(TabId)[i] === 0);
}
});
it('displays the content panel for non default index', async () => {
const activeTabIndex = TabId.MISC;
const state = {
mainMenu: {
open: true,
tab: activeTabIndex
}
};
const element = await setup(state);
const contentPanels = element.shadowRoot.querySelectorAll('.tabcontent');
expect(contentPanels.length).toBe(Object.keys(TabId).length);
for (let i = 0; i < contentPanels.length; i++) {
expect(contentPanels[i].classList.contains('is-active')).toBe(Object.values(TabId)[i] === activeTabIndex);
}
});
it('adds a slider to resize width', async () => {
const element = await setup();
const slider = element.shadowRoot.querySelector('.slider-container input');
expect(slider.type).toBe('range');
expect(slider.value).toBe('28');
expect(slider.min).toBe('28');
expect(slider.max).toBe('100');
expect(slider.draggable).toBeTrue();
});
it('contains a dev info', async () => {
const element = await setup();
expect(element.shadowRoot.querySelector('.main-menu').querySelector(DevInfo.tag)).toBeTruthy();
});
it('does not add the prevent-transition css class', async () => {
const state = {
media: {
portrait: true,
minWidth: false,
observeResponsiveParameter: true
}
};
const element = await setup(state);
expect(element.shadowRoot.querySelector('.main-menu').parentElement.classList.contains('prevent-transition')).toBeFalse();
});
});
describe('when tab-index changes', () => {
const check = (index, panels) => {
for (let i = 0; i < panels.length; i++) {
expect(panels[i].classList.contains('is-active')).toBe(Object.values(TabId)[i] === index);
}
};
it('displays the corresponding content panel', async () => {
const element = await setup();
const contentPanels = element.shadowRoot.querySelectorAll('.tabcontent');
setTab(TabId.MAPS);
check(TabId.MAPS, contentPanels);
setTab(TabId.MISC);
check(TabId.MISC, contentPanels);
setTab(TabId.ROUTING);
check(TabId.ROUTING, contentPanels);
setTab(TabId.SEARCH);
check(TabId.SEARCH, contentPanels);
setTab(TabId.FEATUREINFO);
check(TabId.FEATUREINFO, contentPanels);
setTab(TabId.TOPICS);
check(TabId.TOPICS, contentPanels);
});
it('adds or removes a special Css class for the FeatureInfoContentPanel', async () => {
const element = await setup();
setTab(TabId.MAPS);
expect(element.shadowRoot.querySelectorAll('.main-menu.is-full-size')).toHaveSize(0);
setTab(TabId.FEATUREINFO);
expect(element.shadowRoot.querySelectorAll('.main-menu.is-full-size')).toHaveSize(1);
setTab(TabId.MAPS);
expect(element.shadowRoot.querySelectorAll('.main-menu.is-full-size')).toHaveSize(0);
});
});
describe('when close button clicked', () => {
it('closes the main menu', async () => {
const element = await setup();
toggle();
expect(element.shadowRoot.querySelector('.main-menu.is-open')).toBeNull();
expect(element.shadowRoot.querySelector('.main-menu__close-button')).toBeTruthy();
});
});
describe('when close button swiped', () => {
const getCenter = (element) => {
const rect = element.getBoundingClientRect();
return { x: (rect.right + rect.left) / 2, y: (rect.top + rect.bottom) / 2 };
};
it('closes the main menu on swipe upward', async () => {
const state = {
media: {
portrait: true,
minWidth: false
}
};
const element = await setup(state);
const closeButton = element.shadowRoot.querySelector('.main-menu__close-button');
const center = getCenter(closeButton);
// Touch-path upwards
TestUtils.simulateTouchEvent('touchstart', closeButton, center.x, center.y, 2);
TestUtils.simulateTouchEvent('touchmove', closeButton, center.x, center.y - 55, 2);
TestUtils.simulateTouchEvent('touchend', closeButton, center.x, center.y - 200);
expect(element.shadowRoot.querySelector('.main-menu.is-open')).toBeNull();
});
it('does NOT closes the main menu on swipe downwards, left or right', async () => {
const state = {
media: {
portrait: true,
minWidth: false
}
};
const element = await setup(state);
const closeButton = element.shadowRoot.querySelector('.main-menu__close-button');
const center = getCenter(closeButton);
// Touch-path downwards
TestUtils.simulateTouchEvent('touchstart', closeButton, center.x, center.y, 2);
TestUtils.simulateTouchEvent('touchmove', closeButton, center.x, center.y + 55, 2);
TestUtils.simulateTouchEvent('touchend', closeButton, center.x, center.y + 200);
// Touch-path left
TestUtils.simulateTouchEvent('touchstart', closeButton, center.x, center.y, 2);
TestUtils.simulateTouchEvent('touchmove', closeButton, center.x - 55, center.y, 2);
TestUtils.simulateTouchEvent('touchend', closeButton, center.x - 200, center.y);
// Touch-path right
TestUtils.simulateTouchEvent('touchstart', closeButton, center.x, center.y, 2);
TestUtils.simulateTouchEvent('touchmove', closeButton, center.x + 55, center.y, 2);
TestUtils.simulateTouchEvent('touchend', closeButton, center.x + 200, center.y);
expect(element.shadowRoot.querySelector('.main-menu.is-open')).toBeTruthy();
});
it('close-button get the focus after swipe', async () => {
const state = {
media: {
portrait: true,
minWidth: false
}
};
const element = await setup(state);
const closeButton = element.shadowRoot.querySelector('.main-menu__close-button');
const center = getCenter(closeButton);
// Touch-path swipe left
TestUtils.simulateTouchEvent('touchstart', closeButton, center.x, center.y, 2);
TestUtils.simulateTouchEvent('touchmove', closeButton, center.x, center.y - 55, 2);
TestUtils.simulateTouchEvent('touchend', closeButton, center.x, center.y - 200);
expect(closeButton.matches(':focus')).toBeTrue();
});
});
describe('when responsive parameter observation state changes', () => {
it('adds or removes the prevent-transition css class', async () => {
const state = {
media: {
portrait: true,
minWidth: false,
observeResponsiveParameter: true
}
};
const element = await setup(state);
expect(element.shadowRoot.querySelector('.main-menu').parentElement.classList.contains('prevent-transition')).toBeFalse();
disableResponsiveParameterObservation();
expect(element.shadowRoot.querySelector('.main-menu').parentElement.classList.contains('prevent-transition')).toBeTrue();
enableResponsiveParameterObservation();
expect(element.shadowRoot.querySelector('.main-menu').parentElement.classList.contains('prevent-transition')).toBeFalse();
});
});
describe('when slider changes', () => {
it('adjusts the main menu width', async () => {
const value = 50;
const state = {
mainMenu: {
open: true,
tab: TabId.FEATUREINFO
}
};
const element = await setup(state);
const mainMenu = element.shadowRoot.querySelector('#mainmenu');
const slider = element.shadowRoot.querySelector('.slider-container input');
slider.value = value;
slider.dispatchEvent(new Event('input'));
expect(mainMenu.style.width).toBe(`${value}em`);
});
it('saves and restores width values', async () => {
const value = 50;
const element = await setup();
const mainMenu = element.shadowRoot.querySelector('#mainmenu');
const slider = element.shadowRoot.querySelector('.slider-container input');
const initialWidthInPx = window.getComputedStyle(mainMenu).width;
//check initial value
expect(slider.value).toBe('28');
//open FeatureInfo panel and adjust width
setTab(TabId.FEATUREINFO);
slider.value = value;
slider.dispatchEvent(new Event('input'));
const adjustedWidthInPx = window.getComputedStyle(mainMenu).width;
//open another panel
setTab(TabId.MAPS);
expect(window.getComputedStyle(mainMenu).width).toBe(initialWidthInPx);
//open FeatureInfo panel again
setTab(TabId.FEATUREINFO);
expect(window.getComputedStyle(mainMenu).width).toBe(adjustedWidthInPx);
expect(slider.value).toBe('50');
});
it('prevents default event handling and stops its propagation', async () => {
const state = { | tab: TabId.FEATUREINFO
}
};
const element = await setup(state);
const slider = element.shadowRoot.querySelector('.slider-container input');
const event = new Event('dragstart');
const preventDefaultSpy = spyOn(event, 'preventDefault');
const stopPropagationSpy = spyOn(event, 'stopPropagation');
slider.dispatchEvent(event);
expect(preventDefaultSpy).toHaveBeenCalled();
expect(stopPropagationSpy).toHaveBeenCalled();
});
});
}); | mainMenu: {
open: true, |
train.py | '''
Train
Train your nerual network
Author: Tawn Kramer
'''
from __future__ import print_function
import os
import sys
import glob
import time
import fnmatch
import argparse
import numpy as np
from PIL import Image
import keras
import conf
import random
import augment
import models
'''
matplotlib can be a pain to setup. So handle the case where it is absent. When present,
use it to generate a plot of training results.
'''
try:
import matplotlib
# Force matplotlib to not use any Xwindows backend.
matplotlib.use('Agg')
import matplotlib.pyplot as plt
do_plot = True
except:
do_plot = False
def shuffle(samples):
'''
randomly mix a list and return a new list
'''
ret_arr = []
len_samples = len(samples)
while len_samples > 0:
iSample = random.randrange(0, len_samples)
ret_arr.append(samples[iSample])
del samples[iSample]
len_samples -= 1
return ret_arr
def parse_img_filepath(filepath):
basename = os.path.basename(filepath)
#less .jpg
f = basename[:-4]
f = f.split('_')
steering = float(f[3])
throttle = float(f[5])
data = {'steering':steering, 'throttle':throttle }
return data
def generator(samples, batch_size=32, perc_to_augment=0.5):
'''
Rather than keep all data in memory, we will make a function that keeps
it's state and returns just the latest batch required via the yield command.
As we load images, we can optionally augment them in some manner that doesn't
change their underlying meaning or features. This is a combination of
brightness, contrast, sharpness, and color PIL image filters applied with random
settings. Optionally a shadow image may be overlayed with some random rotation and
opacity.
We flip each image horizontally and supply it as a another sample with the steering
negated.
'''
num_samples = len(samples)
shadows = augment.load_shadow_images('./shadows/*.png')
while 1: # Loop forever so the generator never terminates
samples = shuffle(samples)
#divide batch_size in half, because we double each output by flipping image.
for offset in range(0, num_samples, batch_size):
batch_samples = samples[offset:offset+batch_size]
images = []
controls = []
for fullpath in batch_samples:
try:
data = parse_img_filepath(fullpath)
steering = data["steering"]
throttle = data["throttle"]
try:
image = Image.open(fullpath)
except:
image = None
if image is None:
print('failed to open', fullpath)
continue
#PIL Image as a numpy array
image = np.array(image)
if len(shadows) > 0 and random.uniform(0.0, 1.0) < perc_to_augment:
image = augment.augment_image(image, shadows)
center_angle = steering
images.append(image)
if conf.num_outputs == 2:
controls.append([center_angle, throttle])
elif conf.num_outputs == 1:
controls.append([center_angle])
else:
print("expected 1 or 2 ouputs")
except:
print("we threw an exception on:", fullpath)
yield [], []
# final np array to submit to training
X_train = np.array(images)
y_train = np.array(controls)
yield X_train, y_train
def get_files(filemask):
'''
use a filemask and search a path recursively for matches
'''
path, mask = os.path.split(filemask)
matches = []
for root, dirnames, filenames in os.walk(path):
for filename in fnmatch.filter(filenames, mask):
matches.append(os.path.join(root, filename))
return matches
def train_test_split(lines, test_perc):
'''
split a list into two parts, percentage of test used to seperate
'''
train = []
test = []
for line in lines:
if random.uniform(0.0, 1.0) < test_perc:
test.append(line)
else:
train.append(line)
return train, test
def make_generators(inputs, limit=None, batch_size=32, aug_perc=0.0):
'''
load the job spec from the csv and create some generator for training
'''
#get the image/steering pairs from the csv files
lines = get_files(inputs)
print("found %d files" % len(lines))
if limit is not None:
lines = lines[:limit]
print("limiting to %d files" % len(lines))
train_samples, validation_samples = train_test_split(lines, test_perc=0.2)
print("num train/val", len(train_samples), len(validation_samples))
# compile and train the model using the generator function
train_generator = generator(train_samples, batch_size=batch_size, perc_to_augment=aug_perc)
validation_generator = generator(validation_samples, batch_size=batch_size, perc_to_augment=0.0)
n_train = len(train_samples)
n_val = len(validation_samples)
return train_generator, validation_generator, n_train, n_val
def | (model_name, epochs=50, inputs='./log/*.jpg', limit=None, aug_mult=1, aug_perc=0.0):
print('working on model', model_name)
'''
modify config.json to select the model to train.
'''
model = models.get_nvidia_model(conf.num_outputs)
'''
display layer summary and weights info
'''
models.show_model_summary(model)
callbacks = [
keras.callbacks.EarlyStopping(monitor='val_loss', patience=conf.training_patience, verbose=0),
keras.callbacks.ModelCheckpoint(model_name, monitor='val_loss', save_best_only=True, verbose=0),
]
batch_size = conf.training_batch_size
#Train on session images
train_generator, validation_generator, n_train, n_val = make_generators(inputs, limit=limit, batch_size=batch_size, aug_perc=aug_perc)
if n_train == 0:
print('no training data found')
return
steps_per_epoch = n_train // batch_size
validation_steps = n_val // batch_size
print("steps_per_epoch", steps_per_epoch, "validation_steps", validation_steps)
history = model.fit_generator(train_generator,
steps_per_epoch = steps_per_epoch,
validation_data = validation_generator,
validation_steps = validation_steps,
epochs=epochs,
verbose=1,
callbacks=callbacks)
try:
if do_plot:
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
plt.savefig('loss.png')
except:
print("problems with loss graph")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='train script')
parser.add_argument('model', type=str, help='model name')
parser.add_argument('--epochs', type=int, default=conf.training_default_epochs, help='number of epochs')
parser.add_argument('--inputs', default='../dataset/log/*.jpg', help='input mask to gather images')
parser.add_argument('--limit', type=int, default=None, help='max number of images to train with')
parser.add_argument('--aug_mult', type=int, default=conf.training_default_aug_mult, help='how many more images to augment')
parser.add_argument('--aug_perc', type=float, default=conf.training_default_aug_percent, help='what percentage of images to augment 0 - 1')
args = parser.parse_args()
go(args.model, epochs=args.epochs, limit=args.limit, inputs=args.inputs, aug_mult=args.aug_mult, aug_perc=args.aug_perc)
#python train.py mymodel_aug_90_x4_e200 --epochs=200 --aug_mult=4 --aug_perc=0.9
| go |
config.py | # -*-coding:utf-8-*-
import os
class BaseConfig(object):
"""Base configuration."""
DEBUG = True
BROKER_URL = os.getenv('BROKER_URL', 'amqp://guest:guest@localhost:5672/')
BROKER_POOL_LIMIT = os.getenv('BROKER_POOL_LIMIT', None)
CELERY_ENABLE_UTC = True
CELERY_TIMEZONE = os.getenv('CELERY_TIMEZONE', 'UTC')
CELERYD_CONCURRENCY = os.getenv('CELERYD_CONCURRENCY', 20)
SMTP_SERVER = os.getenv('SMTP_SERVER', None)
SMTP_SERVER_USER = os.getenv('SMTP_SERVER_USER', None)
SMTP_SERVER_PASSWORD = os.getenv('SMTP_SERVER_PASSWORD', None)
SMTP_SERVER_PORT = os.getenv('SMTP_SERVER_PORT', None)
FROM_EMAIL = os.getenv('FROM_EMAIL', '[email protected]')
FROM_NAME = os.getenv('FROM_NAME', 'root')
class DevelopmentConfig(BaseConfig):
"""Development configuration."""
DEBUG = True
class | (BaseConfig):
"""Testing configuration."""
DEBUG = False
class ProductionConfig(BaseConfig):
"""Production configuration."""
DEBUG = False
| TestingConfig |
index.js | import Menu from "./Menu"
import MenuItem from './MenuItem' |
Menu.MenuItem = MenuItem
Menu.SubMenu = SubMenu
export { default as Menu } from "./Menu"
export default Menu |
import SubMenu from './SubMenu' |
xpath30_parser.py | #
# Copyright (c), 2018-2021, SISSA (International School for Advanced Studies).
# All rights reserved.
# This file is distributed under the terms of the MIT License.
# See the file 'LICENSE' in the root directory of the present
# distribution, or http://opensource.org/licenses/MIT.
#
# @author Davide Brunato <[email protected]>
#
"""
XPath 3.0 implementation - part 1 (parser class)
Refs:
- https://www.w3.org/TR/2014/REC-xpath-30-20140408/
- https://www.w3.org/TR/xpath-functions-30/
"""
from copy import deepcopy
from typing import Any, Dict, Optional
from ..namespaces import XPATH_MATH_FUNCTIONS_NAMESPACE
from ..xpath2 import XPath2Parser
DecimalFormatsType = Dict[Optional[str], Dict[str, str]]
class XPath30Parser(XPath2Parser):
"""
XPath 3.0 expression parser class. Accepts all XPath 2.0 options as keyword
arguments, but the *strict* option is ignored because XPath 3.0+ has braced
URI literals and the expanded name syntax is not compatible.
:param args: the same positional arguments of class :class:`elementpath.XPath2Parser`.
:param decimal_formats: a mapping with statically known decimal formats.
:param kwargs: the same keyword arguments of class :class:`elementpath.XPath2Parser`.
"""
version = '3.0'
SYMBOLS = XPath2Parser.SYMBOLS | {
'Q{', # see BracedURILiteral rule
'||', # concat operator
'!', # Simple map operator
# Math functions (trigonometric and exponential)
'pi', 'exp', 'exp10', 'log', 'log10', 'pow', 'sqrt',
'sin', 'cos', 'tan', 'asin', 'acos', 'atan', 'atan2',
# Formatting functions
'format-integer', 'format-number', 'format-dateTime',
'format-date', 'format-time',
# String functions that use regular expressions
'analyze-string',
# Functions and operators on nodes
'path', 'has-children', 'innermost', 'outermost',
# Functions and operators on sequences
'head', 'tail', 'generate-id', 'uri-collection',
'unparsed-text', 'unparsed-text-lines', 'unparsed-text-available',
'environment-variable', 'available-environment-variables',
# Parsing and serializing
'parse-xml', 'parse-xml-fragment', 'serialize',
# Higher-order functions
'function-lookup', 'function-name', 'function-arity', '#', '?',
'for-each', 'filter', 'fold-left', 'fold-right', 'for-each-pair',
# Expressions and node type functions
'function', 'let', ':=', 'namespace-node',
# XSD list-types constructor functions
'ENTITIES', 'IDREFS', 'NMTOKENS',
}
DEFAULT_NAMESPACES = {
'math': XPATH_MATH_FUNCTIONS_NAMESPACE, **XPath2Parser.DEFAULT_NAMESPACES
}
PATH_STEP_SYMBOLS = {
'(integer)', '(string)', '(float)', '(decimal)', '(name)',
'*', '@', '..', '.', '(', '{', 'Q{', '$',
}
decimal_formats: DecimalFormatsType = {
None: {
'decimal-separator': '.',
'grouping-separator': ',',
'exponent-separator': 'e',
'infinity': 'Infinity',
'minus-sign': '-',
'NaN': 'NaN',
'percent': '%',
'per-mille': '‰',
'zero-digit': '0',
'digit': '#',
'pattern-separator': ';',
}
}
# https://www.w3.org/TR/xpath-30/#id-reserved-fn-names
RESERVED_FUNCTION_NAMES = {
'attribute', 'comment', 'document-node', 'element', 'empty-sequence',
'function', 'if', 'item', 'namespace-node', 'node', 'processing-instruction',
'schema-attribute', 'schema-element', 'switch', 'text', 'typeswitch',
}
function_signatures = XPath2Parser.function_signatures.copy()
def __init__(self, *args: Any, decimal_formats: Optional[DecimalFormatsType] = None,
**kwargs: Any) -> None:
kw | # XPath 3.0 definitions continue into module xpath3_operators
| args.pop('strict', None)
super(XPath30Parser, self).__init__(*args, **kwargs)
if decimal_formats is not None:
self.decimal_formats = deepcopy(self.decimal_formats)
for k, v in decimal_formats.items():
if k is not None:
self.decimal_formats[k] = self.decimal_formats[None].copy()
self.decimal_formats[k].update(v)
if None in decimal_formats:
self.decimal_formats[None].update(decimal_formats[None])
|
gulp-watch.js | 'use strict';
let gulp = require('gulp'); | module.exports = watch;
function watch() {
gulp.watch(src.scripts.all, ['scripts', reload]);
} | let reload = require('browser-sync').reload;
let src = require('./sources');
|
fake_secret.go | /*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package fake
import (
core_v1 "k8s.io/api/core/v1"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
labels "k8s.io/apimachinery/pkg/labels"
schema "k8s.io/apimachinery/pkg/runtime/schema"
types "k8s.io/apimachinery/pkg/types"
watch "k8s.io/apimachinery/pkg/watch"
testing "k8s.io/client-go/testing"
)
// FakeSecrets implements SecretInterface
type FakeSecrets struct {
Fake *FakeCoreV1
ns string
}
var secretsResource = schema.GroupVersionResource{Group: "", Version: "v1", Resource: "secrets"}
var secretsKind = schema.GroupVersionKind{Group: "", Version: "v1", Kind: "Secret"}
// Get takes name of the secret, and returns the corresponding secret object, and an error if there is any.
func (c *FakeSecrets) Get(name string, options v1.GetOptions) (result *core_v1.Secret, err error) {
obj, err := c.Fake.
Invokes(testing.NewGetAction(secretsResource, c.ns, name), &core_v1.Secret{})
if obj == nil {
return nil, err
}
return obj.(*core_v1.Secret), err
}
// List takes label and field selectors, and returns the list of Secrets that match those selectors.
func (c *FakeSecrets) List(opts v1.ListOptions) (result *core_v1.SecretList, err error) {
obj, err := c.Fake.
Invokes(testing.NewListAction(secretsResource, secretsKind, c.ns, opts), &core_v1.SecretList{})
if obj == nil {
return nil, err
}
label, _, _ := testing.ExtractFromListOptions(opts)
if label == nil {
label = labels.Everything()
}
list := &core_v1.SecretList{}
for _, item := range obj.(*core_v1.SecretList).Items {
if label.Matches(labels.Set(item.Labels)) {
list.Items = append(list.Items, item)
}
}
return list, err
}
// Watch returns a watch.Interface that watches the requested secrets.
func (c *FakeSecrets) Watch(opts v1.ListOptions) (watch.Interface, error) {
return c.Fake.
InvokesWatch(testing.NewWatchAction(secretsResource, c.ns, opts))
}
// Create takes the representation of a secret and creates it. Returns the server's representation of the secret, and an error, if there is any.
func (c *FakeSecrets) Create(secret *core_v1.Secret) (result *core_v1.Secret, err error) {
obj, err := c.Fake.
Invokes(testing.NewCreateAction(secretsResource, c.ns, secret), &core_v1.Secret{})
if obj == nil {
return nil, err
}
return obj.(*core_v1.Secret), err
}
// Update takes the representation of a secret and updates it. Returns the server's representation of the secret, and an error, if there is any.
func (c *FakeSecrets) Update(secret *core_v1.Secret) (result *core_v1.Secret, err error) {
obj, err := c.Fake.
Invokes(testing.NewUpdateAction(secretsResource, c.ns, secret), &core_v1.Secret{})
if obj == nil {
return nil, err
}
return obj.(*core_v1.Secret), err
}
// Delete takes name of the secret and deletes it. Returns an error if one occurs.
func (c *FakeSecrets) Delete(name string, options *v1.DeleteOptions) error {
_, err := c.Fake.
Invokes(testing.NewDeleteAction(secretsResource, c.ns, name), &core_v1.Secret{})
return err
}
// DeleteCollection deletes a collection of objects.
func (c *FakeSecrets) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error {
action := testing.NewDeleteCollectionAction(secretsResource, c.ns, listOptions)
_, err := c.Fake.Invokes(action, &core_v1.SecretList{})
return err
}
// Patch applies the patch and returns the patched secret.
func (c *FakeSecrets) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *core_v1.Secret, err error) {
obj, err := c.Fake.
Invokes(testing.NewPatchSubresourceAction(secretsResource, c.ns, name, data, subresources...), &core_v1.Secret{})
if obj == nil |
return obj.(*core_v1.Secret), err
}
| {
return nil, err
} |
docs.go | // GENERATED BY THE COMMAND ABOVE; DO NOT EDIT
// This file was generated by swaggo/swag at
// 2020-03-22 15:19:36.759704 +0800 CST m=+0.052176208
package docs
import (
"bytes"
"encoding/json"
"strings"
"github.com/alecthomas/template"
"github.com/swaggo/swag"
)
var doc = `{
"schemes": {{ marshal .Schemes }},
"swagger": "2.0",
"info": {
"description": "{{.Description}}",
"title": "{{.Title}}",
"termsOfService": "https://github.com/EDDYCJY/filfox_data",
"contact": {},
"license": {
"name": "MIT",
"url": "https://github.com/EDDYCJY/filfox_data/blob/master/LICENSE"
},
"version": "{{.Version}}"
},
"host": "{{.Host}}",
"basePath": "{{.BasePath}}",
"paths": {
"/api/v1/articles": {
"get": {
"produces": [
"application/json"
],
"summary": "Get multiple articles",
"parameters": [
{
"description": "TagID",
"name": "tag_id",
"in": "body",
"schema": {
"type": "integer"
}
},
{
"description": "State",
"name": "state",
"in": "body",
"schema": {
"type": "integer"
}
},
{
"description": "CreatedBy",
"name": "created_by",
"in": "body",
"schema": {
"type": "integer"
}
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/app.Response"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"$ref": "#/definitions/app.Response"
}
}
}
},
"post": {
"produces": [
"application/json"
],
"summary": "Add article",
"parameters": [
{
"description": "TagID",
"name": "tag_id",
"in": "body",
"required": true,
"schema": {
"type": "integer"
}
},
{
"description": "Title",
"name": "title",
"in": "body",
"required": true,
"schema": {
"type": "string"
}
},
{
"description": "Desc",
"name": "desc",
"in": "body",
"required": true,
"schema": {
"type": "string"
}
},
{
"description": "Content",
"name": "content",
"in": "body",
"required": true,
"schema": {
"type": "string"
}
},
{
"description": "CreatedBy",
"name": "created_by",
"in": "body",
"required": true,
"schema": {
"type": "string"
}
},
{
"description": "State",
"name": "state",
"in": "body",
"required": true,
"schema": {
"type": "integer"
}
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/app.Response"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"$ref": "#/definitions/app.Response"
}
}
}
}
},
"/api/v1/articles/{id}": {
"get": {
"produces": [
"application/json"
],
"summary": "Get a single article",
"parameters": [
{
"type": "integer",
"description": "ID",
"name": "id",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/app.Response"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"$ref": "#/definitions/app.Response"
}
}
}
},
"put": {
"produces": [
"application/json"
],
"summary": "Update article",
"parameters": [
{
"type": "integer",
"description": "ID",
"name": "id",
"in": "path",
"required": true
},
{
"description": "TagID",
"name": "tag_id",
"in": "body",
"schema": {
"type": "string"
}
},
{
"description": "Title",
"name": "title",
"in": "body",
"schema": {
"type": "string"
}
},
{
"description": "Desc",
"name": "desc",
"in": "body",
"schema": {
"type": "string"
}
},
{
"description": "Content",
"name": "content",
"in": "body",
"schema": {
"type": "string"
}
},
{
"description": "ModifiedBy",
"name": "modified_by",
"in": "body",
"required": true,
"schema": {
"type": "string"
}
},
{
"description": "State",
"name": "state",
"in": "body",
"schema": {
"type": "integer"
}
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/app.Response"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"$ref": "#/definitions/app.Response"
}
}
}
},
"delete": {
"produces": [
"application/json"
],
"summary": "Delete article",
"parameters": [
{
"type": "integer",
"description": "ID",
"name": "id",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/app.Response"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"$ref": "#/definitions/app.Response"
}
}
}
}
},
"/api/v1/tags": {
"get": {
"produces": [
"application/json"
],
"summary": "Get multiple article tags",
"parameters": [
{
"type": "string",
"description": "Name",
"name": "name",
"in": "query"
},
{
"type": "integer",
"description": "State",
"name": "state",
"in": "query"
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/app.Response"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"$ref": "#/definitions/app.Response"
}
}
}
},
"post": {
"produces": [
"application/json"
],
"summary": "Add article tag",
"parameters": [
{
"description": "Name",
"name": "name",
"in": "body",
"required": true,
"schema": {
"type": "string"
}
},
{
"description": "State",
"name": "state",
"in": "body",
"schema": {
"type": "integer"
}
},
{
"description": "CreatedBy",
"name": "created_by",
"in": "body",
"schema": {
"type": "integer"
}
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/app.Response"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"$ref": "#/definitions/app.Response"
}
}
}
}
},
"/api/v1/tags/export": {
"post": {
"produces": [
"application/json"
],
"summary": "Export article tag",
"parameters": [
{
"description": "Name",
"name": "name",
"in": "body",
"schema": {
"type": "string"
}
},
{
"description": "State",
"name": "state",
"in": "body",
"schema": {
"type": "integer"
}
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/app.Response"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"$ref": "#/definitions/app.Response"
}
}
}
}
},
"/api/v1/tags/import": {
"post": {
"produces": [
"application/json"
],
"summary": "Import Image",
"parameters": [
{
"type": "file",
"description": "Image File",
"name": "image",
"in": "formData",
"required": true
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/app.Response"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"$ref": "#/definitions/app.Response"
}
}
}
}
},
"/api/v1/tags/{id}": {
"put": {
"produces": [
"application/json"
],
"summary": "Update article tag",
"parameters": [
{
"type": "integer",
"description": "ID",
"name": "id",
"in": "path",
"required": true
},
{
"description": "Name",
"name": "name",
"in": "body",
"required": true,
"schema": {
"type": "string"
}
},
{
"description": "State",
"name": "state",
"in": "body",
"schema": {
"type": "integer"
}
},
{
"description": "ModifiedBy",
"name": "modified_by",
"in": "body",
"required": true,
"schema": {
"type": "string"
}
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/app.Response"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"$ref": "#/definitions/app.Response"
}
}
}
},
"delete": {
"produces": [
"application/json"
],
"summary": "Delete article tag",
"parameters": [
{
"type": "integer",
"description": "ID",
"name": "id",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/app.Response"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"$ref": "#/definitions/app.Response"
}
}
}
}
},
"/auth": {
"get": {
"produces": [
"application/json"
],
"summary": "Get Auth",
"parameters": [
{
"type": "string",
"description": "userName",
"name": "username",
"in": "query",
"required": true
},
{
"type": "string",
"description": "password",
"name": "password",
"in": "query",
"required": true
}
],
"responses": {
"200": {
"description": "OK",
"schema": {
"$ref": "#/definitions/app.Response"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"$ref": "#/definitions/app.Response"
}
}
}
}
}
},
"definitions": {
"app.Response": {
"type": "object",
"properties": {
"code": {
"type": "integer"
},
"data": {
"type": "object"
},
"msg": {
"type": "string"
}
}
}
}
}`
type swaggerInfo struct {
Version string
Host string
BasePath string
Schemes []string
Title string
Description string
}
// SwaggerInfo holds exported Swagger Info so clients can modify it
var SwaggerInfo = swaggerInfo{
Version: "1.0",
Host: "",
BasePath: "",
Schemes: []string{},
Title: "Golang Gin API",
Description: "An example of gin",
}
type s struct{}
func (s *s) ReadDoc() string {
sInfo := SwaggerInfo
sInfo.Description = strings.Replace(sInfo.Description, "\n", "\\n", -1)
t, err := template.New("swagger_info").Funcs(template.FuncMap{
"marshal": func(v interface{}) string {
a, _ := json.Marshal(v)
return string(a)
},
}).Parse(doc)
if err != nil {
return doc
}
var tpl bytes.Buffer
if err := t.Execute(&tpl, sInfo); err != nil {
return doc
}
return tpl.String()
}
func init() | {
swag.Register(swag.Name, &s{})
} |
|
goroutine_test.go | package go_learning
import (
"fmt"
"runtime"
"strconv"
"sync"
"testing"
"time"
)
func TestGoroutine(t *testing.T) {
for i := 0; i < 10; i++ {
// 值传递,每个协程所拥有的变量地址不一样
go func (i int){
fmt.Println(i)
}(i)
// go func() {
// // 10
// // 共享变量,竞争,锁
// fmt.Println(i)
// }()
}
time.Sleep(time.Millisecond*50)
}
var wg sync.WaitGroup
func TestIncreaseCounter(t *testing.T) {
counter := 0
for i := 0; i < 1000; i++ {
wg.Add(1)
go func() {
counter++
wg.Done()
}()
}
wg.Wait()
t.Log(("Counter = " + strconv.Itoa(counter)))
}
func TestSchedule(t *testing.T) {
runtime.GOMAXPROCS(1)
go | Log("0")
}()
for {
t.Log("1")
}
}
func doSomething(i int) {
fmt.Print(i)
}
func TestScheduleGoroutine(t *testing.T) {
runtime.GOMAXPROCS(1)
go func() {
for {
doSomething(0)
}
}()
for {
doSomething(1)
}
}
| func(){
t. |
main.go | package main
import (
"encoding/json"
"fmt"
"log"
)
type person struct {
FirstName string
LastName string
myPrivateInfo string //net exported variables are not marshalled
}
func | () {
//create a slice of struct person, and fill with values
p := []person{}
p = append(p, person{FirstName: "arne", LastName: "arnesen", myPrivateInfo: "Something super secret"})
p = append(p, person{FirstName: "knut", LastName: "knutsen", myPrivateInfo: "Something even more secret"})
fmt.Println("Printing slice of struct\n ----------------------\n", p, "\n----------------------")
pM, err := json.Marshal(p)
if err != nil {
log.Println("Error: marshall : ", err)
}
fmt.Printf("Printing marshalled text %v, which is of type %T \n", pM, pM)
fmt.Printf("Printing the stringified slice of bytes %v \n", string(pM))
}
| main |
lib.rs | #![cfg_attr(not(feature = "std"), no_std)]
use ink_lang as ink;
#[ink::contract]
mod libadd {
#[ink(storage)]
pub struct Libadd {
}
/// Defines the storage of your contract.
/// Add new fields to the below struct in order
/// to add new static storage fields to your contract.
impl Libadd {
/// Constructor that initializes the `bool` value to the given `init_value`.
#[ink(constructor)]
pub fn new() -> Self {
Self{}
}
/// Constructor that initializes the `bool` value to `false`.
///
/// Constructors can delegate to other constructors.
#[ink(constructor)]
pub fn default() -> Self {
Self::new()
}
// #[ink(message)]
// pub fn flip(&mut self) {
// self.value = !self.value;
// }
/// A message that can be called on instantiated contracts.
/// This one flips the value of the stored `bool` from `true`
/// to `false` and vice versa.
#[ink(message)]
pub fn add(&self, a: i32, b: i32) -> i32 {
return a + b;
}
}
/// Unit tests in Rust are normally defined within such a `#[cfg(test)]`
/// module and test functions are marked with a `#[test]` attribute.
/// The below code is technically just normal Rust code.
#[cfg(test)]
mod tests {
/// Imports all the definitions from the outer scope so we can use them here.
use super::*;
/// We test if the default constructor does its job. | let libadd = Libadd::default();
assert_eq!(libadd.add(100, 120), 320);
}
}
} | #[test]
fn integer_works() { |
test_filtro_asientos.py | from main.models import FiltroMovimientos
from fixtures_views import *
class TestFiltroAsientos():
@pytest.fixture
def create_filter(self):
# Create filter with default values (all blanks)
return FiltroMovimientos.objects.create()
@pytest.fixture
def create_and_populate_filter(self):
|
@pytest.fixture
def form_filtro_movimientos(self, django_app):
resp = django_app.get(reverse('main:asientos'), user='username')
return resp.forms['filtro']
@pytest.mark.parametrize('page', ['/asientos/filtro/', reverse('main:filtro_asientos')])
def test_redirect_if_not_logged_in(self, page, django_app):
resp = django_app.get(page)
assert resp.status_code == 302
assert resp.url.startswith('/accounts/login/')
@pytest.mark.parametrize('page', ['/asientos/filtro/', reverse('main:filtro_asientos')])
def test_redirect_get_requests(self, page, django_app):
resp = django_app.get(page, user='username')
assert resp.status_code == 302
assert resp.url.startswith('/asientos/')
def test_load_file_form_attributes(self, form_filtro_movimientos):
form = form_filtro_movimientos
assert form.id == 'filtro'
assert form.method == 'post'
assert form.action == '/asientos/filtro/'
assert form.action == reverse('main:filtro_asientos')
fields = form.fields.keys()
for f in ['f_fecha_inicial', 'f_fecha_final', 'f_descripcion',
'f_cuenta', 'f_asiento']:
assert f in fields
def test_filter_form_fill(self, form_filtro_movimientos):
form = form_filtro_movimientos
form['f_fecha_inicial'] = '2021-12-01'
form['f_fecha_final'] = '2021-12-15'
form['f_descripcion'] = 'descripcion palabras'
form['f_cuenta'] = '100'
form['f_asiento'] = '2'
resp = form.submit('accion_filtro', value="aplicar")
filtro = FiltroMovimientos.objects.all()[0]
assert filtro.fecha_inicial == '2021-12-01'
assert filtro.fecha_final == '2021-12-15'
assert filtro.descripcion == 'descripcion palabras'
assert filtro.cuenta == '100'
assert filtro.asiento == '2'
def test_apply_filter_fecha_inicial(self, populate_database, create_filter, django_app):
filtro = create_filter
_, _, movimientos = populate_database
# set the filter
fecha = '2021-12-19'
filtro.fecha_inicial = fecha
filtro.save()
movimientos_in = [ m.descripcion for m in movimientos if m.fecha >= fecha]
movimientos_out = [ m.descripcion for m in movimientos if m.fecha <= fecha]
# check only account filtered appears
resp = django_app.get(reverse('main:asientos'), user='username')
for name in movimientos_in:
assert name in resp.text
for name in movimientos_out:
assert name not in resp.text
def test_apply_filter_fecha_final(self, populate_database, create_filter, django_app):
filtro = create_filter
_, _, movimientos = populate_database
# set the filter
fecha = '2021-12-19'
filtro.fecha_final = fecha
filtro.save()
movimientos_in = [ m.descripcion for m in movimientos if m.fecha <= fecha]
movimientos_out = [ m.descripcion for m in movimientos if m.fecha > fecha]
# check only account filtered appears
resp = django_app.get(reverse('main:asientos'), user='username')
for name in movimientos_in:
assert name in resp.text
for name in movimientos_out:
assert name not in resp.text
def test_apply_filter_descripcion(self, populate_database, create_filter, django_app):
filtro = create_filter
_, _, movimientos = populate_database
# set the filter
descripcion = 'Calcetines'
filtro.descripcion = descripcion
filtro.save()
movimientos_in = [ m.descripcion for m in movimientos if descripcion in m.descripcion]
movimientos_out = [ m.descripcion for m in movimientos if descripcion not in m.descripcion]
# check only account filtered appears
resp = django_app.get(reverse('main:asientos'), user='username')
for name in movimientos_in:
assert name in resp.text
for name in movimientos_out:
assert name not in resp.text
def test_apply_filter_cuenta(self, populate_database, create_filter, django_app):
filtro = create_filter
_, cuentas, movimientos = populate_database
# set the filter
cuenta = '100' # Caja
filtro.cuenta = cuenta
filtro.save()
movimientos_in = [ m.descripcion for m in movimientos if m.cuenta.num == cuenta]
movimientos_out = [ m.descripcion for m in movimientos if m.cuenta.num != cuenta]
# check only account filtered appears
resp = django_app.get(reverse('main:asientos'), user='username')
for name in movimientos_in:
assert name in resp.text
def test_apply_filter_asiento(self, populate_database, create_filter, django_app):
filtro = create_filter
_, cuentas, movimientos = populate_database
# set the filter
asiento = '2'
filtro.asiento = asiento
filtro.save()
movimientos_in = [ m.descripcion for m in movimientos if m.num == int(asiento)]
movimientos_out = [ m.descripcion for m in movimientos if m.num != int(asiento)]
# check only account filtered appears
resp = django_app.get(reverse('main:asientos'), user='username')
for name in movimientos_in:
assert name in resp.text
for name in movimientos_out:
assert name not in resp.text
def test_borrar_filtro_by_button(self, create_and_populate_filter, form_filtro_movimientos):
create_and_populate_filter
form = form_filtro_movimientos
resp = form.submit('accion_filtro', value="borrar")
# Validate that saved filter is blank
filtro = FiltroMovimientos.objects.all()[0]
assert filtro.fecha_inicial == ''
assert filtro.fecha_final == ''
assert filtro.descripcion == ''
assert filtro.cuenta == ''
assert filtro.asiento == ''
def test_filter_url_with_wrong_action(self, populate_database, create_and_populate_filter, csrf_exempt_django_app):
original_filtro = create_and_populate_filter
form = {
'f_num': '204',
'f_nombre': 'Wrong name',
'f_etiqueta': 'Wrong etiqueta',
'accion_filtro': 'wrong_action',
}
resp = csrf_exempt_django_app.post(reverse('main:filtro_asientos'), form, user='username')
# check that nothing is changed
current_filtro = FiltroMovimientos.objects.all()[0]
assert current_filtro.fecha_inicial == original_filtro.fecha_inicial
assert current_filtro.fecha_final == original_filtro.fecha_final
assert current_filtro.descripcion == original_filtro.descripcion
assert current_filtro.cuenta == original_filtro.cuenta
assert current_filtro.asiento == original_filtro.asiento
# check redirect to the correct page
| f = FiltroMovimientos.objects.create(
fecha_inicial = '2021-12-01',
fecha_final = '2021-12-15',
descripcion = 'descripcion palabras',
cuenta = '100',
asiento = '2',
)
return f |
mod.rs | use crate::utils::{pckg, scope};
use duckscript::types::command::{Command, CommandResult, Commands};
use duckscript::types::instruction::Instruction;
use duckscript::types::runtime::StateValue;
use std::collections::HashMap;
#[cfg(test)]
#[path = "./mod_test.rs"]
mod mod_test;
#[derive(Clone)]
pub(crate) struct CommandImpl {
package: String,
}
impl Command for CommandImpl {
fn name(&self) -> String { | pckg::concat(&self.package, "PopStack")
}
fn aliases(&self) -> Vec<String> {
vec!["scope_pop_stack".to_string()]
}
fn help(&self) -> String {
include_str!("help.md").to_string()
}
fn clone_and_box(&self) -> Box<dyn Command> {
Box::new((*self).clone())
}
fn requires_context(&self) -> bool {
true
}
fn run_with_context(
&self,
arguments: Vec<String>,
state: &mut HashMap<String, StateValue>,
variables: &mut HashMap<String, String>,
_output_variable: Option<String>,
_instructions: &Vec<Instruction>,
_commands: &mut Commands,
_line: usize,
) -> CommandResult {
let copy = if arguments.is_empty() {
&[]
} else if arguments[0] == "--copy" {
&arguments[1..]
} else {
&[]
};
match scope::pop(variables, state, ©) {
Ok(_) => CommandResult::Continue(Some("true".to_string())),
Err(error) => CommandResult::Error(error),
}
}
}
pub(crate) fn create(package: &str) -> Box<dyn Command> {
Box::new(CommandImpl {
package: package.to_string(),
})
} | |
run_websocket_client.py | import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision import transforms, datasets
import logging
import argparse
import sys
import asyncio
import numpy as np
import syft as sy
from syft import workers
from syft.frameworks.torch.federated import utils
logger = logging.getLogger(__name__)
LOG_INTERVAL = 25
# Loss function
@torch.jit.script
def loss_fn(pred, target):
return F.nll_loss(input=pred, target=target)
# Model
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 20, 5, 1)
self.conv2 = nn.Conv2d(20, 50, 5, 1)
self.fc1 = nn.Linear(4 * 4 * 50, 500)
self.fc2 = nn.Linear(500, 10)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.max_pool2d(x, 2, 2)
x = F.relu(self.conv2(x))
x = F.max_pool2d(x, 2, 2)
x = x.view(-1, 4 * 4 * 50)
x = F.relu(self.fc1(x))
x = self.fc2(x)
return F.log_softmax(x, dim=1)
def define_and_get_arguments(args=sys.argv[1:]):
parser = argparse.ArgumentParser(
description="Run federated learning using websocket client workers."
)
parser.add_argument("--batch_size", type=int, default=32, help="batch size of the training")
parser.add_argument(
"--test_batch_size", type=int, default=128, help="batch size used for the test data"
)
parser.add_argument(
"--training_rounds", type=int, default=40, help="number of federated learning rounds"
)
parser.add_argument(
"--federate_after_n_batches",
type=int,
default=10,
help="number of training steps performed on each remote worker before averaging",
)
parser.add_argument("--lr", type=float, default=0.1, help="learning rate")
parser.add_argument("--cuda", action="store_true", help="use cuda")
parser.add_argument("--seed", type=int, default=1, help="seed used for randomization")
parser.add_argument("--save_model", action="store_true", help="if set, model will be saved")
parser.add_argument(
"--verbose",
"-v",
action="store_true",
help="if set, websocket client workers will be started in verbose mode",
)
args = parser.parse_args(args=args)
return args
async def fit_model_on_worker(
worker: workers.WebsocketClientWorker,
traced_model: torch.jit.ScriptModule,
batch_size: int,
curr_round: int,
max_nr_batches: int,
lr: float,
):
|
def evaluate_models_on_test_data(test_loader, results):
np.set_printoptions(formatter={"float": "{: .0f}".format})
for worker_id, worker_model, _ in results:
evaluate_model(worker_id, worker_model, "cpu", test_loader, print_target_hist=False)
def evaluate_model(worker_id, model, device, test_loader, print_target_hist=False):
model.eval()
test_loss = 0.0
correct = 0
hist_target = np.zeros(10)
hist_pred = np.zeros(10)
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
hist, _ = np.histogram(target, bins=10, range=(0, 10))
hist_target += hist
output = model(data)
test_loss += loss_fn(output, target).item() # sum up batch loss
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
hist, _ = np.histogram(pred, bins=10, range=(0, 10))
hist_pred += hist
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(test_loader.dataset)
if print_target_hist:
logger.info("Target histogram: %s", hist_target)
logger.info("Prediction hist.: %s", hist_pred)
logger.info(
"%s: Test set: Average loss: %s, Accuracy: %s/%s (%s)",
worker_id,
"{:.4f}".format(test_loss),
correct,
len(test_loader.dataset),
"{:.2f}".format(100.0 * correct / len(test_loader.dataset)),
)
async def main():
args = define_and_get_arguments()
hook = sy.TorchHook(torch)
kwargs_websocket = {"host": "localhost", "hook": hook, "verbose": args.verbose}
alice = workers.WebsocketClientWorker(id="alice", port=8777, **kwargs_websocket)
bob = workers.WebsocketClientWorker(id="bob", port=8778, **kwargs_websocket)
charlie = workers.WebsocketClientWorker(id="charlie", port=8779, **kwargs_websocket)
worker_instances = [alice, bob, charlie]
use_cuda = args.cuda and torch.cuda.is_available()
torch.manual_seed(args.seed)
device = torch.device("cuda" if use_cuda else "cpu")
kwargs = {"num_workers": 1, "pin_memory": True} if use_cuda else {}
test_loader = torch.utils.data.DataLoader(
datasets.MNIST(
"../data",
train=False,
transform=transforms.Compose(
[transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))]
),
),
batch_size=args.test_batch_size,
shuffle=False,
drop_last=False,
**kwargs,
)
model = Net().to(device)
(data, target) = test_loader.__iter__().next()
traced_model = torch.jit.trace(model, data)
learning_rate = args.lr
for curr_round in range(1, args.training_rounds + 1):
logger.info("Starting training round %s/%s", curr_round, args.training_rounds)
results = await asyncio.gather(
*[
fit_model_on_worker(
worker=worker,
traced_model=traced_model,
batch_size=args.batch_size,
curr_round=curr_round,
max_nr_batches=args.federate_after_n_batches,
lr=learning_rate,
)
for worker in worker_instances
]
)
models = {}
loss_values = {}
test_models = curr_round % 10 == 1 or curr_round == args.training_rounds
if test_models:
evaluate_models_on_test_data(test_loader, results)
for worker_id, worker_model, worker_loss in results:
if worker_model is not None:
models[worker_id] = worker_model
loss_values[worker_id] = worker_loss
traced_model = utils.federated_avg(models)
if test_models:
evaluate_model(
"Federated model", traced_model, "cpu", test_loader, print_target_hist=True
)
# decay learning rate
learning_rate = max(0.98 * learning_rate, args.lr * 0.01)
if args.save_model:
torch.save(model.state_dict(), "mnist_cnn.pt")
if __name__ == "__main__":
# Logging setup
logger = logging.getLogger("run_websocket_server")
FORMAT = "%(asctime)s %(levelname)s %(filename)s(l:%(lineno)d, p:%(process)d) - %(message)s"
logging.basicConfig(format=FORMAT)
logger.setLevel(level=logging.DEBUG)
# Websockets setup
websockets_logger = logging.getLogger("websockets")
websockets_logger.setLevel(logging.INFO)
websockets_logger.addHandler(logging.StreamHandler())
# Run main
asyncio.get_event_loop().run_until_complete(main())
| """Send the model to the worker and fit the model on the worker's training data.
Args:
worker: Remote location, where the model shall be trained.
traced_model: Model which shall be trained.
batch_size: Batch size of each training step.
curr_round: Index of the current training round (for logging purposes).
max_nr_batches: If > 0, training on worker will stop at min(max_nr_batches, nr_available_batches).
lr: Learning rate of each training step.
Returns:
A tuple containing:
* worker_id: Union[int, str], id of the worker.
* improved model: torch.jit.ScriptModule, model after training at the worker.
* loss: Loss on last training batch, torch.tensor.
"""
train_config = sy.TrainConfig(
model=traced_model,
loss_fn=loss_fn,
batch_size=batch_size,
shuffle=True,
max_nr_batches=max_nr_batches,
epochs=1,
lr=lr,
)
train_config.send(worker)
logger.info(
"Training round %s, calling fit on worker: %s, lr = %s",
curr_round,
worker.id,
"{:.3f}".format(train_config.lr),
)
loss = await worker.async_fit(dataset_key="mnist", return_ids=[0])
logger.info("Training round: %s, worker: %s, avg_loss: %s", curr_round, worker.id, loss.mean())
model = train_config.model_ptr.get().obj
return worker.id, model, loss |
Test.py | import os | bot.send(os.getcwd() + "\\Temp\\selenium-python.png","This is a test")
def test_text(bot):
bot.typer(' '.join(bot.message.split(' ')[1:]),bot.textbox) | def test_picture(bot): |
endpoints.py | # Copyright (C) 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from rpython.rlib.debug import debug_print
from rpython.rlib.objectmodel import we_are_translated
from rpython.rlib.rarithmetic import intmask
from typhon import ruv
from typhon.atoms import getAtom
from typhon.autohelp import autohelp, method
from typhon.errors import userError
from typhon.futures import IOEvent
from typhon.objects.collections.lists import wrapList, unwrapList
from typhon.objects.data import StrObject, unwrapBytes, unwrapInt
from typhon.objects.networking.streamcaps import StreamSink, StreamSource
from typhon.objects.refs import LocalResolver, makePromise
from typhon.objects.root import Object, runnable
from typhon.vats import currentVat, scopedVat
RUN_1 = getAtom(u"run", 1)
RUN_2 = getAtom(u"run", 2)
def connectStreamCB(connect, status):
status = intmask(status)
stream = connect.c_handle
try:
vat, resolvers = ruv.unstashStream(stream)
sourceResolver, sinkResolver = unwrapList(resolvers)
assert isinstance(sourceResolver, LocalResolver)
assert isinstance(sinkResolver, LocalResolver)
with scopedVat(vat):
if status >= 0:
debug_print("Made connection!")
wrappedStream = ruv.wrapStream(stream, 2)
sourceResolver.resolve(StreamSource(wrappedStream, vat))
sinkResolver.resolve(StreamSink(wrappedStream, vat))
else:
error = "Connection failed: " + ruv.formatError(status)
debug_print(error)
sourceResolver.smash(StrObject(error.decode("utf-8")))
sinkResolver.smash(StrObject(error.decode("utf-8")))
# Done with stream.
ruv.closeAndFree(stream)
except:
if not we_are_translated():
raise
@autohelp
class TCPClientEndpoint(Object):
"""
Generic TCP client endpoint.
"""
def __init__(self, host, port, inet_type):
self.host = host
self.port = port
self.inet_type = inet_type
def toString(self):
return u"<endpoint (IPv%d, TCP): %s:%d>" % (
self.inet_type, self.host.decode("utf-8"), self.port)
@method("List")
def connectStream(self):
"""
Connect this endpoint, returning a `[source, sink]` pair.of vows.
"""
vat = currentVat.get()
stream = ruv.alloc_tcp(vat.uv_loop)
source, sourceResolver = makePromise()
sink, sinkResolver = makePromise()
# Ugh, the hax.
resolvers = wrapList([sourceResolver, sinkResolver])
ruv.stashStream(ruv.rffi.cast(ruv.stream_tp, stream),
(vat, resolvers))
vat.enqueueEvent(ConnectStreamIOEvent(
vat, stream, self.host, self.port, self.inet_type))
# Return the promises.
return [source, sink]
class ConnectStreamIOEvent(IOEvent):
"""Documentation for ConnectStreamIOEvent
"""
def __init__(self, vat, stream, host, port, inet_type):
self.vat = vat
self.stream = stream
self.host = host
self.port = port
self.inet_type = inet_type
def run(self):
# Make the actual connection.
if self.inet_type == 4:
ruv.tcp4Connect(self.stream, self.host, self.port,
connectStreamCB)
elif self.inet_type == 6:
ruv.tcp6Connect(self.stream, self.host, self.port,
connectStreamCB)
@autohelp
class TCP4ClientEndpoint(TCPClientEndpoint):
"""
A TCPv4 client endpoint.
"""
def __init__(self, host, port):
TCPClientEndpoint.__init__(self, host, port, 4)
@autohelp
class TCP6ClientEndpoint(TCPClientEndpoint):
"""
A TCPv6 client endpoint.
"""
def __init__(self, host, port):
TCPClientEndpoint.__init__(self, host, port, 6)
@runnable(RUN_2)
def makeTCP4ClientEndpoint(host, port):
"""
Make a TCPv4 client endpoint.
"""
host = unwrapBytes(host)
port = unwrapInt(port)
return TCP4ClientEndpoint(host, port)
@runnable(RUN_2)
def makeTCP6ClientEndpoint(host, port):
"""
Make a TCPv6 client endpoint.
"""
host = unwrapBytes(host)
port = unwrapInt(port)
return TCP6ClientEndpoint(host, port)
def shutdownCB(shutdown, status):
try:
ruv.free(shutdown)
# print "Shut down server, status", status
except:
if not we_are_translated():
raise
@autohelp
class TCPServer(Object):
"""
A TCP listening server.
"""
listening = True
def __init__(self, uv_server):
self.uv_server = uv_server
def toString(self):
return u"<server (IPv4, TCP)>"
@method("Void")
def | (self):
if self.listening:
shutdown = ruv.alloc_shutdown()
ruv.shutdown(
shutdown, ruv.rffi.cast(ruv.stream_tp, self.uv_server),
shutdownCB)
self.listening = False
def connectionStreamCB(uv_server, status):
status = intmask(status)
# If the connection failed to complete, then whatever; we're a server, not
# a client, and this is a pretty boring do-nothing failure mode.
# XXX we *really* should have some way to report failures, though; right?
if status < 0:
return
try:
with ruv.unstashingStream(uv_server) as (vat, handler):
uv_client = ruv.rffi.cast(ruv.stream_tp,
ruv.alloc_tcp(vat.uv_loop))
# Actually accept the connection.
ruv.accept(uv_server, uv_client)
# Incant the handler.
from typhon.objects.collections.maps import EMPTY_MAP
wrappedStream = ruv.wrapStream(uv_client, 2)
vat.sendOnly(handler, RUN_2, [StreamSource(wrappedStream, vat),
StreamSink(wrappedStream, vat)],
EMPTY_MAP)
except:
if not we_are_translated():
raise
@autohelp
class TCPServerEndpoint(Object):
"""
TCP Server Endpoint.
"""
def __init__(self, port, inet_type):
self.port = port
self.inet_type = inet_type
def toString(self):
return u"<endpoint (IPv%d, TCP): %d>" % (self.inet_type, self.port)
@method("Any", "Any")
def listenStream(self, handler):
vat = currentVat.get()
p, r = makePromise()
vat.enqueueEvent(ListenStreamIOEvent(vat, self.port, handler,
self.inet_type, r))
return p
class ListenStreamIOEvent(IOEvent):
"""Documentation for ListenStreamIOEvent
"""
def __init__(self, vat, port, handler, inet_type, r):
self.vat = vat
self.port = port
self.handler = handler
self.inet_type = inet_type
self.r = r
def run(self):
uv_server = ruv.alloc_tcp(self.vat.uv_loop)
try:
if self.inet_type == 4:
ruv.tcp4Bind(uv_server, "0.0.0.0", self.port)
elif self.inet_type == 6:
ruv.tcp6Bind(uv_server, "::", self.port)
except ruv.UVError as uve:
raise userError(u"listenStream/1: Couldn't listen: %s" %
uve.repr().decode("utf-8"))
uv_stream = ruv.rffi.cast(ruv.stream_tp, uv_server)
ruv.stashStream(uv_stream, (self.vat, self.handler))
# XXX hardcoded backlog of 42
ruv.listen(uv_stream, 42, connectionStreamCB)
self.r.resolve(TCPServer(uv_server))
@autohelp
class TCP4ServerEndpoint(TCPServerEndpoint):
"""
A TCPv4 server endpoint.
"""
def __init__(self, port):
TCPServerEndpoint.__init__(self, port, 4)
@autohelp
class TCP6ServerEndpoint(TCPServerEndpoint):
"""
A TCPv6 server endpoint.
"""
def __init__(self, port):
TCPServerEndpoint.__init__(self, port, 6)
@runnable(RUN_1)
def makeTCP4ServerEndpoint(port):
"""
Make a TCPv4 server endpoint.
"""
return TCP4ServerEndpoint(unwrapInt(port))
@runnable(RUN_1)
def makeTCP6ServerEndpoint(port):
"""
Make a TCPv4 server endpoint.
"""
return TCP6ServerEndpoint(unwrapInt(port))
| shutdown |
api_router.py | """API router
"""
from django.conf.urls import url
from django.urls import path
from rest_framework.routers import DefaultRouter
from vision_on_edge.azure_app_insight.api import views as app_insight_views
from vision_on_edge.azure_parts.api import views as azure_part_views
from vision_on_edge.azure_settings.api import views as azure_setting_views
from vision_on_edge.azure_training.api import views as azure_training_views
from vision_on_edge.azure_training_status.api import \
views as azure_training_status_views
from vision_on_edge.cameras.api import util_views as camera_util_views
from vision_on_edge.cameras.api import views
from vision_on_edge.feedback.api import views as feedback_views
from vision_on_edge.image_predictions.api import \
views as image_prediction_views
from vision_on_edge.images.api import views as image_views
from vision_on_edge.locations.api import views as location_views
from vision_on_edge.notifications.api import views as notifications_views
from vision_on_edge.relabeling.api import views as relabel_views
from vision_on_edge.streams.api import views as stream_views |
router = DefaultRouter()
router.trailing_slash = '/?'
router.register('settings', azure_setting_views.SettingViewSet)
router.register('cameras', views.CameraViewSet)
router.register('parts', azure_part_views.PartViewSet)
router.register('locations', location_views.LocationViewSet)
router.register('image_predictions',
image_prediction_views.ImagePredictionViewSet)
router.register('projects', azure_training_views.ProjectViewSet)
router.register('training_status',
azure_training_status_views.TrainingStatusViewSet)
router.register('tasks', azure_training_views.TaskViewSet)
router.register('images', image_views.ImageViewSet)
router.register('feedback', feedback_views.FeedbackViewSet)
router.register('notifications', notifications_views.NotificationViewSet)
router.register('images', image_views.ImageViewSet)
urlpatterns = router.urls
urlpatterns += [
url('streams/connect', stream_views.connect_stream),
path('streams/<int:stream_id>/disconnect', stream_views.disconnect_stream),
path('streams/<int:stream_id>/video_feed', stream_views.video_feed),
path('streams/<int:stream_id>/capture', stream_views.capture),
path('streams/<int:stream_id>/keep_alive', stream_views.keep_alive),
path('projects/<int:project_id>/train', azure_training_views.train),
path('projects/<int:project_id>/export', azure_training_views.export),
path('projects/<int:project_id>/train_performance',
azure_training_views.train_performance),
path('projects/<int:project_id>/inference_video_feed',
stream_views.inference_video_feed),
path('projects/<int:project_id>/pull_cv_project',
azure_training_views.pull_cv_project),
path('projects/<int:project_id>/update_prob_threshold',
azure_training_views.update_prob_threshold),
path('projects/<int:project_id>/reset_project',
azure_training_views.reset_project),
path('projects/<int:project_id>/reset_camera',
azure_training_views.project_reset_camera),
path('projects/null/export', azure_training_views.export_null),
path('relabel', relabel_views.upload_relabel_image),
path('relabel/update', relabel_views.relabel_update),
path('appinsight/key', app_insight_views.instrumentation_key),
path('camera_utils/verify_rtsp', camera_util_views.verify_rtsp)
]
app_name = "api" | |
view_model.py | import spotify_manager
import re as re
from functools import lru_cache
MENU_PAGE_SIZE = 6
# Screen render types
MENU_RENDER_TYPE = 0
NOW_PLAYING_RENDER = 1
SEARCH_RENDER = 2
# Menu line item types
LINE_NORMAL = 0
LINE_HIGHLIGHT = 1
LINE_TITLE = 2
spotify_manager.refresh_devices()
class LineItem():
def __init__(self, title = "", line_type = LINE_NORMAL, show_arrow = False):
self.title = title
self.line_type = line_type
self.show_arrow = show_arrow
class Rendering():
def __init__(self, type):
self.type = type
def unsubscribe(self):
pass
class MenuRendering(Rendering):
def __init__(self, header = "", lines = [], page_start = 0, total_count = 0):
super().__init__(MENU_RENDER_TYPE)
self.lines = lines
self.header = header
self.page_start = page_start
self.total_count = total_count
self.now_playing = spotify_manager.DATASTORE.now_playing
self.has_internet = spotify_manager.has_internet
class NowPlayingRendering(Rendering):
def __init__(self):
super().__init__(NOW_PLAYING_RENDER)
self.callback = None
self.after_id = None
def subscribe(self, app, callback):
if callback == self.callback:
return
new_callback = self.callback is None
self.callback = callback
self.app = app
if new_callback:
self.refresh()
def refresh(self):
if not self.callback:
return
if self.after_id:
self.app.after_cancel(self.after_id)
self.callback(spotify_manager.DATASTORE.now_playing)
self.after_id = self.app.after(500, lambda: self.refresh())
def unsubscribe(self):
super().unsubscribe()
self.callback = None
self.app = None
class NowPlayingCommand():
def __init__(self, runnable = lambda:()):
self.has_run = False
self.runnable = runnable
def run(self):
self.has_run = True
self.runnable()
class SearchRendering(Rendering):
def __init__(self, query, active_char):
super().__init__(SEARCH_RENDER)
self.query = query
self.active_char = active_char
self.loading = False
self.callback = None
self.results = None
def get_active_char(self):
return ' ' if self.active_char == 26 else chr(self.active_char + ord('a'))
def subscribe(self, app, callback):
if (callback == self.callback):
return
new_callback = self.callback is None
self.callback = callback
self.app = app
if new_callback:
self.refresh()
def refresh(self):
if not self.callback:
return
self.callback(self.query, self.get_active_char(), self.loading, self.results)
self.results = None
def unsubscribe(self):
super().unsubscribe()
self.callback = None
self.app = None
class SearchPage():
def __init__(self, previous_page):
self.header = "Search"
self.has_sub_page = True
self.previous_page = previous_page
self.live_render = SearchRendering("", 0)
self.is_title = False
def nav_prev(self):
self.live_render.query = self.live_render.query[0:-1]
self.live_render.refresh()
def nav_next(self):
if len(self.live_render.query) > 15:
return
active_char = ' ' if self.live_render.active_char == 26 \
else chr(self.live_render.active_char + ord('a'))
self.live_render.query += active_char
self.live_render.refresh()
def nav_play(self):
pass
def nav_up(self):
self.live_render.active_char += 1
if (self.live_render.active_char > 26):
self.live_render.active_char = 0
self.live_render.refresh()
def nav_down(self):
self.live_render.active_char -= 1
if (self.live_render.active_char < 0):
self.live_render.active_char = 26
self.live_render.refresh()
def run_search(self, query):
self.live_render.loading = True
self.live_render.refresh()
self.live_render.results = spotify_manager.search(query)
self.live_render.loading = False
self.live_render.refresh()
def nav_select(self):
spotify_manager.run_async(lambda: self.run_search(self.live_render.query))
return self
def nav_back(self):
return self.previous_page
def render(self):
return self.live_render
class NowPlayingPage():
def __init__(self, previous_page, header, command):
self.has_sub_page = False
self.previous_page = previous_page
self.command = command
self.header = header
self.live_render = NowPlayingRendering()
self.is_title = False
def play_previous(self):
spotify_manager.play_previous()
self.live_render.refresh()
def play_next(self):
spotify_manager.play_next()
self.live_render.refresh()
def toggle_play(self):
spotify_manager.toggle_play()
self.live_render.refresh()
def nav_prev(self):
spotify_manager.run_async(lambda: self.play_previous())
def nav_next(self):
spotify_manager.run_async(lambda: self.play_next())
def nav_play(self):
spotify_manager.run_async(lambda: self.toggle_play())
def nav_up(self):
pass
def nav_down(self):
pass
def nav_select(self):
return self
def nav_back(self):
return self.previous_page
def render(self):
if (not self.command.has_run):
self.command.run()
return self.live_render
EMPTY_LINE_ITEM = LineItem()
class MenuPage():
def __init__(self, header, previous_page, has_sub_page, is_title = False):
self.index = 0
self.page_start = 0
self.header = header
self.has_sub_page = has_sub_page
self.previous_page = previous_page
self.is_title = is_title
def total_size(self):
return 0
def page_at(self, index):
return None
def nav_prev(self):
spotify_manager.run_async(lambda: spotify_manager.play_previous())
def nav_next(self):
spotify_manager.run_async(lambda: spotify_manager.play_next())
def nav_play(self):
spotify_manager.run_async(lambda: spotify_manager.toggle_play())
def get_index_jump_up(self):
return 1
def get_index_jump_down(self):
return 1
def nav_up(self):
jump = self.get_index_jump_up()
if(self.index >= self.total_size() - jump):
return
if (self.index >= self.page_start + MENU_PAGE_SIZE - jump):
self.page_start = self.page_start + jump
self.index = self.index + jump
def nav_down(self):
jump = self.get_index_jump_down()
if(self.index <= (jump - 1)):
return
if (self.index <= self.page_start + (jump - 1)):
self.page_start = self.page_start - jump
if (self.page_start == 1):
self.page_start = 0
self.index = self.index - jump
def nav_select(self):
return self.page_at(self.index)
def nav_back(self):
return self.previous_page
def render(self):
lines = []
total_size = self.total_size()
for i in range(self.page_start, self.page_start + MENU_PAGE_SIZE):
if (i < total_size):
page = self.page_at(i)
if (page is None) :
lines.append(EMPTY_LINE_ITEM)
else:
line_type = LINE_TITLE if page.is_title else \
LINE_HIGHLIGHT if i == self.index else LINE_NORMAL
lines.append(LineItem(page.header, line_type, page.has_sub_page))
else:
lines.append(EMPTY_LINE_ITEM)
return MenuRendering(lines=lines, header=self.header, page_start=self.index, total_count=total_size)
class ShowsPage(MenuPage):
def __init__(self, previous_page):
super().__init__(self.get_title(), previous_page, has_sub_page=True)
self.shows = self.get_content()
self.num_shows = len(self.shows)
def get_title(self):
return "Podcasts"
def get_content(self):
return spotify_manager.DATASTORE.getAllSavedShows()
def total_size(self):
return self.num_shows
@lru_cache(maxsize=15)
def page_at(self, index):
return SingleShowPage(self.shows[index], self)
class PlaylistsPage(MenuPage):
def __init__(self, previous_page):
super().__init__(self.get_title(), previous_page, has_sub_page=True)
self.playlists = self.get_content()
self.num_playlists = len(self.playlists)
self.playlists.sort(key=self.get_idx) # sort playlists to keep order as arranged in Spotify library
def get_title(self):
return "Playlists"
def get_content(self):
return spotify_manager.DATASTORE.getAllSavedPlaylists()
def get_idx(self, e): # function to get idx from UserPlaylist for sorting
if type(e) == spotify_manager.UserPlaylist: # self.playlists also contains albums as it seems and they don't have the idx value
return e.idx
else:
return 0
def total_size(self):
return self.num_playlists
@lru_cache(maxsize=15)
def page_at(self, index):
return SinglePlaylistPage(self.playlists[index], self)
class AlbumsPage(PlaylistsPage):
def __init__(self, previous_page):
super().__init__(previous_page)
def get_title(self):
return "Albums"
def get_content(self):
return spotify_manager.DATASTORE.getAllSavedAlbums()
class SearchResultsPage(MenuPage):
def __init__(self, previous_page, results):
super().__init__("Search Results", previous_page, has_sub_page=True)
self.results = results
tracks, albums, artists = len(results.tracks), len(results.albums), len(results.artists)
# Add 1 to each count (if > 0) to make room for section header line items
self.tracks = tracks + 1 if tracks > 0 else 0
self.artists = artists + 1 if artists > 0 else 0
self.albums = albums + 1 if albums > 0 else 0
self.total_count = self.tracks + self.albums + self.artists
self.index = 1
# indices of the section header line items
self.header_indices = [0, self.tracks, self.artists + self.tracks]
def total_size(self):
return self.total_count
def page_at(self, index):
if self.tracks > 0 and index == 0:
return PlaceHolderPage("TRACKS", self, has_sub_page=False, is_title=True)
elif self.artists > 0 and index == self.header_indices[1]:
return PlaceHolderPage("ARTISTS", self, has_sub_page=False, is_title=True)
elif self.albums > 0 and index == self.header_indices[2]:
return PlaceHolderPage("ALBUMS", self, has_sub_page=False, is_title=True)
elif self.tracks > 0 and index < self.header_indices[1]:
track = self.results.tracks[index - 1]
command = NowPlayingCommand(lambda: spotify_manager.play_track(track.uri))
return NowPlayingPage(self, track.title, command)
elif self.albums > 0 and index < self.header_indices[2]:
artist = self.results.artists[index - (self.tracks + 1)]
command = NowPlayingCommand(lambda: spotify_manager.play_artist(artist.uri))
return NowPlayingPage(self, artist.name, command)
else:
album = self.results.albums[index - (self.artists + self.tracks + 1)]
tracks = self.results.album_track_map[album.uri]
return InMemoryPlaylistPage(album, tracks, self)
def get_index_jump_up(self):
if self.index + 1 in self.header_indices:
return 2
return 1
def get_index_jump_down(self):
if self.index - 1 in self.header_indices:
return 2
return 1
class NewReleasesPage(PlaylistsPage):
|
class ArtistsPage(MenuPage):
def __init__(self, previous_page):
super().__init__("Artists", previous_page, has_sub_page=True)
def total_size(self):
return spotify_manager.DATASTORE.getArtistCount()
def page_at(self, index):
# play track
artist = spotify_manager.DATASTORE.getArtist(index)
command = NowPlayingCommand(lambda: spotify_manager.play_artist(artist.uri))
return NowPlayingPage(self, artist.name, command)
class SingleArtistPage(MenuPage):
def __init__(self, artistName, previous_page):
super().__init__(artistName, previous_page, has_sub_page=True)
class SinglePlaylistPage(MenuPage):
def __init__(self, playlist, previous_page):
# Credit for code to remove emoticons from string: https://stackoverflow.com/a/49986645
regex_pattern = re.compile(pattern = "["
u"\U0001F600-\U0001F64F" # emoticons
u"\U0001F300-\U0001F5FF" # symbols & pictographs
u"\U0001F680-\U0001F6FF" # transport & map symbols
u"\U0001F1E0-\U0001F1FF" # flags (iOS)
"]+", flags = re.UNICODE)
super().__init__(regex_pattern.sub(r'',playlist.name), previous_page, has_sub_page=True)
self.playlist = playlist
self.tracks = None
def get_tracks(self):
if self.tracks is None:
self.tracks = spotify_manager.DATASTORE.getPlaylistTracks(self.playlist.uri)
return self.tracks
def total_size(self):
return self.playlist.track_count
def page_at(self, index):
track = self.get_tracks()[index]
command = NowPlayingCommand(lambda: spotify_manager.play_from_playlist(self.playlist.uri, track.uri, None))
return NowPlayingPage(self, track.title, command)
class SingleShowPage(MenuPage):
def __init__(self, show, previous_page):
super().__init__(show.name, previous_page, has_sub_page=True)
self.show = show
self.episodes = None
def get_episodes(self):
if self.episodes is None:
self.episodes = spotify_manager.DATASTORE.getShowEpisodes(self.show.uri)
return self.episodes
def total_size(self):
return self.show.episode_count
def page_at(self, index):
episode = self.get_episodes()[index]
command = NowPlayingCommand(lambda: spotify_manager.play_from_show(self.show.uri, episode.uri, None))
return NowPlayingPage(self, episode.name, command)
class InMemoryPlaylistPage(SinglePlaylistPage):
def __init__(self, playlist, tracks, previous_page):
super().__init__(playlist, previous_page)
self.tracks = tracks
class SingleTrackPage(MenuPage):
def __init__(self, track, previous_page, playlist = None, album = None):
super().__init__(track.title, previous_page, has_sub_page=False)
self.track = track
self.playlist = playlist
self.album = album
def render(self):
r = super().render()
print("render track")
context_uri = self.playlist.uri if self.playlist else self.album.uri
spotify_manager.play_from_playlist(context_uri, self.track.uri, None)
return r
class SingleEpisodePage(MenuPage):
def __init__(self, episode, previous_page, show = None):
super().__init__(episode.name, previous_page, has_sub_page=False)
self.episode = episode
self.show = show
def render(self):
r = super().render()
print("render episode")
context_uri = self.show.uri
spotify_manager.play_from_show(context_uri, self.episode.uri, None)
return r
class SavedTracksPage(MenuPage):
def __init__(self, previous_page):
super().__init__("Saved Tracks", previous_page, has_sub_page=True)
def total_size(self):
return spotify_manager.DATASTORE.getSavedTrackCount()
def page_at(self, index):
# play track
return SingleTrackPage(spotify_manager.DATASTORE.getSavedTrack(index), self)
class PlaceHolderPage(MenuPage):
def __init__(self, header, previous_page, has_sub_page=True, is_title = False):
super().__init__(header, previous_page, has_sub_page, is_title)
class RootPage(MenuPage):
def __init__(self, previous_page):
super().__init__("sPot", previous_page, has_sub_page=True)
self.pages = [
ArtistsPage(self),
AlbumsPage(self),
NewReleasesPage(self),
PlaylistsPage(self),
ShowsPage(self),
SearchPage(self),
NowPlayingPage(self, "Now Playing", NowPlayingCommand())
]
self.index = 0
self.page_start = 0
def get_pages(self):
if (not spotify_manager.DATASTORE.now_playing):
return self.pages[0:-1]
return self.pages
def total_size(self):
return len(self.get_pages())
def page_at(self, index):
return self.get_pages()[index]
| def __init__(self, previous_page):
super().__init__(previous_page)
def get_title(self):
return "New Releases"
def get_content(self):
return spotify_manager.DATASTORE.getAllNewReleases() |
config.ts | import assert from "assert";
import { GraphQLJSONObject } from "graphql-type-json";
import { isEqual, reduce } from "lodash";
import { Arg, Authorized, Mutation, Query, Resolver } from "type-graphql";
import { baseConfig, baseConfigAdmin } from "../../client/constants/baseConfig";
import { configStringToValue } from "../../client/constants/validation";
import { ADMIN } from "../constants";
import { ConfigurationTable } from "../db/tables";
@Resolver()
export class ConfigurationResolver {
static async getConfigData() {
const data = await ConfigurationTable().select("*");
const dataDb = data.reduce<Record<string, any>>((acum, { name, value }) => {
acum[name] = configStringToValue(value);
return acum;
}, {});
return reduce(
baseConfigAdmin,
(acum, value, key) => {
acum[key] = dataDb[key] ?? value;
return acum;
},
{ ...baseConfigAdmin }
);
}
@Authorized([ADMIN])
@Mutation(() => GraphQLJSONObject)
async editConfig(
@Arg("name") name: string, | new Error("Invalid type of configuration value")
);
const exists = await ConfigurationTable()
.select("name")
.where({
name,
})
.first();
if (exists) {
await ConfigurationTable()
.update({
value,
})
.where({
name,
});
} else {
await ConfigurationTable().insert({
name,
value,
});
}
const dbConfigData = await ConfigurationResolver.getConfigData();
(async () => {
const dataDb = await ConfigurationTable().select("*");
const dataConfigDb = dataDb.reduce<Record<string, any>>(
(acum, { name, value }) => {
acum[name] = configStringToValue(value);
return acum;
},
{}
);
const { unnecessaryKeys, differentKeys } = reduce<
typeof dataConfigDb,
{
differentKeys: { key: string; value: any }[];
unnecessaryKeys: string[];
}
>(
dataConfigDb,
(acum, dbValue, dbKey) => {
if (isEqual(baseConfigAdmin[dbKey], dbValue)) {
acum.unnecessaryKeys.push(dbKey);
} else {
acum.differentKeys.push({
key: dbKey,
value: dbValue,
});
}
return acum;
},
{
differentKeys: [],
unnecessaryKeys: [],
}
);
console.log({
unnecessaryKeys,
differentKeys,
});
await ConfigurationTable()
.delete()
.whereIn("name", unnecessaryKeys)
.catch((err) => {
console.error("Error removing baseConfig unnecessaryKeys", err);
});
})();
return dbConfigData;
}
@Query(() => GraphQLJSONObject)
async config(): Promise<typeof baseConfig> {
return await ConfigurationResolver.getConfigData();
}
} | @Arg("value") value: string
): Promise<typeof baseConfig> {
assert(
typeof baseConfigAdmin[name] === typeof configStringToValue(value), |
ip_parser.py | import dpkt
from parsers.utils import *
name = 'ip_parser'
def parseFunc(ts, eth):
|
def parseIPPacket(ts, eth):
ip = eth.data
tpa = getIPString(ip.dst)
tha = getMACString(eth.dst)
return {
'protocol': 'ip',
'layer': 3,
'time': ts,
'description': 'ip packet to (%s,%s)' % (tha, tpa),
'target': {
'ip': tpa,
'mac': tha
}
} | if getMACString(eth.dst) == 'FF:FF:FF:FF:FF:FF':
return None
if isinstance(eth.data, dpkt.ip.IP):
return parseIPPacket(ts, eth) |
config.rs | use std::io::Read;
use std::path::Path;
use std::fs::File;
use serde_json::{from_str, Value};
#[derive(Debug)]
pub struct JsonConfig(Value);
impl JsonConfig {
pub fn | (path: &str) -> Result<Self, String> {
let file_path = Path::new(path);
let mut file = File::open(&file_path).expect(&*format!("config file {} is not exists!", path));
let mut content = String::new();
file.read_to_string(&mut content)
.map_err(|err| {
err.to_string()
})
.and_then(|_| {
from_str(&*content)
.map_err(|err| {
err.to_string()
})
})
.map(|obj| {
JsonConfig(obj)
})
}
pub fn get_value(self) -> Value {
self.0
}
}
| new |
index.ts | import { Application as ExpressApp, Request, Response, Router } from 'express';
import { HummockConfig } from '../../models/config';
import { Logger, pGreen, pYellow } from '../log';
import { getLaunchers, LauncherService } from '../launcher';
import { ServerToggleDto, StubbDetailsDto } from '../../models/types';
const logger = new Logger('api');
function showNotFound(req: Request, res: Response): void {
res.status(404).send({
message: 'Not found'
});
}
class ApiRouter {
public readonly router = Router();
private readonly launchers: LauncherService[];
constructor(private readonly config: HummockConfig) {
this.launchers = getLaunchers(config);
this.handleRoutes();
}
public stopAll(): Promise<void> {
return Promise.all(this.launchers.map(launcher => launcher.stop())).then(() => {
// Make single void as the result
});
}
public startAll(): Promise<void> {
return Promise.all(this.launchers.map(launcher => launcher.start())).then(() => {
// Make single void as the result
});
}
private handleRoutes(): void {
this.router.get('/config', (req: Request, res: Response) => {
res.status(200).send(this.config);
});
this.router.get('/proxies', (req: Request, res: Response) => {
Promise.all(this.launchers.map(launcher => launcher.getListDto()))
.then(items => {
res.status(200).send({
total: this.config.servers.length,
items
});
})
.catch(err => {
logger.error(err);
res.status(500).send({ message: 'Something went wrong' });
});
});
this.router.get('/proxies/:proxyId', (req: Request, res: Response) => {
const id = req.params.proxyId;
const launcher = this.launchers.find(instance => instance.server.id === id);
if (!launcher) {
res.status(404).send({ message: `Host with id=${id} not found` });
return;
}
launcher
.getDto()
.then(dto => {
res.status(200).send(dto);
})
.catch(err => {
logger.error(err);
res.status(500).send({ message: 'Something went wrong' });
});
});
this.router.put('/proxies/:proxyId/stubb/:stubbId', (req: Request, res: Response) => {
const id = req.params.proxyId;
const stubbData: StubbDetailsDto = req.body;
const launcher = this.launchers.find(instance => instance.server.id === id);
if (!launcher) {
res.status(404).send({ message: `Host with id=${id} not found` });
return;
}
const wasLaunched = launcher.isLaunched();
launcher
.stop()
.then(() => launcher.updateStubb(stubbData))
.then(() => wasLaunched && launcher.start())
.then(() => res.status(200).send({}))
.catch(err => {
logger.error(err);
res
.status(500)
.send({ message: `Unable to update stubb ${stubbData.name} for id ${id}` });
});
});
this.router.delete('/proxies/:proxyId/stubb/:stubbId', (req: Request, res: Response) => {
const id = req.params.proxyId;
const stubbId = decodeURIComponent(req.params.stubbId);
const launcher = this.launchers.find(instance => instance.server.id === id);
if (!launcher) {
res.status(404).send({ message: `Host with id=${id} not found` });
return;
}
const wasLaunched = launcher.isLaunched();
launcher
.stop()
.then(() => launcher.deleteStubb(stubbId))
.then(() => wasLaunched && launcher.start())
.then(() => res.status(200).send({}))
.catch(err => {
logger.error(err);
res.status(500).send({ message: `Unable to delete stubb ${stubbId} for id ${id}` });
});
});
this.router.post('/proxies', (req: Request, res: Response) => {
const toggleData: ServerToggleDto = req.body;
logger.info(!toggleData.run ? 'Stopping mock servers 🌑' : 'Starting mock servers 🌕');
Promise.all(
toggleData.ids.map(id => {
const launcher = this.launchers.find(item => item.server.id === id);
if (!launcher) {
logger.warn(pYellow(`Unable to toggle launcher with id=${id}`));
}
return toggleData.run ? launcher.start() : launcher.stop();
})
)
.then(() => {
logger.info(
pGreen('All good ✨'),
this.launchers.map(launcher => launcher.state)
);
res.status(200).send({});
})
.catch(err => {
logger.error(err);
res.status(500).send({ message: 'Something went wrong' });
});
});
this.router.all('/*', showNotFound);
}
}
export function pickApiR | pressApp, config: HummockConfig): Promise<ApiRouter> {
const apiRouter = new ApiRouter(config);
app.use('/api/v0', apiRouter.router);
app.all('/api/*', showNotFound);
return config.autostart ? apiRouter.startAll().then(() => apiRouter) : Promise.resolve(apiRouter);
}
| outes(app: Ex |
generator_builtins.go | // Copyright 2016 The Cockroach Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License.
//
// Author: Raphael 'kena' Poss ([email protected])
package parser
import (
"errors"
"fmt"
)
// Table generators, also called "set-generating functions", are
// special functions that return an entire table.
//
// Overview of the concepts:
//
// - generators are implemented as regular built-in functions that
// return values of type DTable (this is a Datum type).
//
// - the return type of generators is a TTable. This describes objects
// that are conceptually sets of rows. A TTable type is
// characterized by its column types (no names).
//
// - a DTable doesn't carry the contents of a table directly; instead
// it carries a ValueGenerator reference.
//
// - ValueGenerator is an interface that offers a
// Start/Next/Values/Stop API similar to sql.planNode.
//
// - because generators are regular functions, it is possible to use
// them in any expression context. This is useful to e.g
// pass an entire table as argument to the ARRAY( ) conversion
// function.
//
// - the data source mechanism in the sql package has a special case
// for generators appearing in FROM contexts and knows how to
// construct a special row source from them (the valueGenerator
// planNode).
// generatorFactory is the type of constructor functions for
// ValueGenerator objects suitable for use with DTable.
type generatorFactory func(ctx *EvalContext, args DTuple) (ValueGenerator, error)
// ValueGenerator is the interface provided by the object held by a
// DTable; objects that implement this interface are able to produce
// rows of values in a streaming fashion (like Go iterators or
// generators in Python).
type ValueGenerator interface {
// ColumnTypes returns the type signature of this value generator.
// Used by DTable.ResolvedType().
ColumnTypes() TTuple
// Start initializes the generator. Must be called once before
// Next() and Values().
Start() error
// Next determines whether there is a row of data available.
Next() (bool, error)
// Values retrieves the current row of data.
Values() DTuple
// Close must be called after Start() before disposing of the
// ValueGenerator. It does not need to be called if Start() has not
// been called yet.
Close()
}
var _ ValueGenerator = &seriesValueGenerator{}
func initGeneratorBuiltins() {
// Add all windows to the Builtins map after a few sanity checks.
for k, v := range generators {
for _, g := range v {
if !g.impure {
panic(fmt.Sprintf("generator functions should all be impure, found %v", g))
}
if g.class != GeneratorClass {
panic(fmt.Sprintf("generator functions should be marked with the GeneratorClass "+
"function class, found %v", g))
}
if g.generator == nil {
panic(fmt.Sprintf("generator functions should have Generator objects, "+
"found %v", g))
}
}
Builtins[k] = v
}
}
var generators = map[string][]Builtin{
"pg_catalog.generate_series": {
makeGeneratorBuiltin(ArgTypes{TypeInt, TypeInt}, TTuple{TypeInt}, makeSeriesGenerator),
makeGeneratorBuiltin(ArgTypes{TypeInt, TypeInt, TypeInt}, TTuple{TypeInt}, makeSeriesGenerator),
},
}
func makeGeneratorBuiltin(in ArgTypes, ret TTuple, g generatorFactory) Builtin |
// seriesValueGenerator supports the execution of generate_series()
// with integer bounds.
type seriesValueGenerator struct {
value, start, stop, step int64
}
var errStepCannotBeZero = errors.New("step cannot be 0")
func makeSeriesGenerator(_ *EvalContext, args DTuple) (ValueGenerator, error) {
start := int64(*args[0].(*DInt))
stop := int64(*args[1].(*DInt))
step := int64(1)
if len(args) > 2 {
step = int64(*args[2].(*DInt))
}
if step == 0 {
return nil, errStepCannotBeZero
}
return &seriesValueGenerator{start, start, stop, step}, nil
}
// ColumnTypes implements the ValueGenerator interface.
func (s *seriesValueGenerator) ColumnTypes() TTuple { return TTuple{TypeInt} }
// Start implements the ValueGenerator interface.
func (s *seriesValueGenerator) Start() error { return nil }
// Close implements the ValueGenerator interface.
func (s *seriesValueGenerator) Close() {}
// Next implements the ValueGenerator interface.
func (s *seriesValueGenerator) Next() (bool, error) {
if s.step < 0 && (s.start < s.stop) {
return false, nil
}
if s.step > 0 && (s.stop < s.start) {
return false, nil
}
s.value = s.start
s.start += s.step
return true, nil
}
// Values implements the ValueGenerator interface.
func (s *seriesValueGenerator) Values() DTuple {
return DTuple{NewDInt(DInt(s.value))}
}
| {
return Builtin{
impure: true,
class: GeneratorClass,
Types: in,
ReturnType: TTable{Cols: ret},
generator: g,
fn: func(ctx *EvalContext, args DTuple) (Datum, error) {
gen, err := g(ctx, args)
if err != nil {
return nil, err
}
return &DTable{gen}, nil
},
category: categoryCompatibility,
}
} |
nft.go | package service
import (
"database/sql"
"encoding/hex"
"errors"
"fmt"
"sensiblequery/dao/clickhouse"
"sensiblequery/logger"
"sensiblequery/model"
"strconv"
"github.com/go-redis/redis/v8"
"go.uber.org/zap"
)
// "height, codehash, genesis, code_type, nft_idx, in_data_value, out_data_value, invalue, outvalue, blkid"
func nftInfoResultSRF(rows *sql.Rows) (interface{}, error) |
func getNFTMetaInfo(nftsRsp []*model.NFTInfoResp) {
pipe := rdb.Pipeline()
nftinfoCmds := make([]*redis.StringStringMapCmd, 0)
for _, nft := range nftsRsp {
// nftinfo of each token
key, _ := hex.DecodeString(nft.CodeHashHex + nft.GenesisHex)
nftinfoCmds = append(nftinfoCmds, pipe.HGetAll(ctx, "nI"+string(key)+"0"))
}
_, err := pipe.Exec(ctx)
if err != nil && err != redis.Nil {
panic(err)
}
for idx, nft := range nftsRsp {
nftinfo, err := nftinfoCmds[idx].Result()
if err == redis.Nil {
continue
} else if err != nil {
logger.Log.Info("getNFTDecimal redis failed", zap.Error(err))
}
supply, _ := strconv.Atoi(nftinfo["supply"])
nft.Supply = supply
metavout, _ := strconv.Atoi(nftinfo["metavout"])
nft.MetaOutputIndex = metavout
nft.MetaTxIdHex = hex.EncodeToString([]byte(nftinfo["metatxid"]))
nft.SensibleIdHex = hex.EncodeToString([]byte(nftinfo["sensibleid"]))
}
}
func ListNFTInfoByGenesis(codeHashHex, genesisHex string) (nftRsp *model.NFTInfoResp, err error) {
psql := fmt.Sprintf(`
SELECT codehash, genesis, count(1), sum(in_times), sum(out_times), sum(in_satoshi), sum(out_satoshi) FROM (
SELECT codehash, genesis, nft_idx,
sum(in_data_value) AS in_times , sum(out_data_value) AS out_times,
sum(invalue) AS in_satoshi , sum(outvalue) AS out_satoshi FROM blk_codehash_height
WHERE code_type = 3 AND codehash = unhex('%s') AND genesis = unhex('%s')
GROUP BY codehash, genesis, nft_idx
)
GROUP BY codehash, genesis
ORDER BY count(1) DESC
`, codeHashHex, genesisHex)
nftsRsp, err := GetNFTInfoBySQL(psql)
if err != nil {
return
}
if len(nftsRsp) > 0 {
getNFTMetaInfo(nftsRsp)
return nftsRsp[0], nil
}
return nil, errors.New("not exist")
}
func GetNFTSummary(codeHashHex string) (nftsRsp []*model.NFTInfoResp, err error) {
psql := fmt.Sprintf(`
SELECT codehash, genesis, count(1), sum(in_times), sum(out_times), sum(in_satoshi), sum(out_satoshi) FROM (
SELECT codehash, genesis, nft_idx,
sum(in_data_value) AS in_times , sum(out_data_value) AS out_times,
sum(invalue) AS in_satoshi , sum(outvalue) AS out_satoshi FROM blk_codehash_height
WHERE code_type = 3 AND codehash = unhex('%s')
GROUP BY codehash, genesis, nft_idx
)
GROUP BY codehash, genesis
ORDER BY count(1) DESC
`, codeHashHex)
nftsRsp, err = GetNFTInfoBySQL(psql)
if err != nil {
return
}
getNFTMetaInfo(nftsRsp)
return
}
func GetNFTInfo() (nftsRsp []*model.NFTInfoResp, err error) {
psql := `
SELECT codehash, genesis, count(1), sum(in_times), sum(out_times), sum(in_satoshi), sum(out_satoshi) FROM (
SELECT codehash, genesis, nft_idx,
sum(in_data_value) AS in_times , sum(out_data_value) AS out_times,
sum(invalue) AS in_satoshi , sum(outvalue) AS out_satoshi FROM blk_codehash_height
WHERE code_type = 3
GROUP BY codehash, genesis, nft_idx
)
GROUP BY codehash, genesis
ORDER BY count(1) DESC
`
nftsRsp, err = GetNFTInfoBySQL(psql)
if err != nil {
return
}
getNFTMetaInfo(nftsRsp)
return
}
func GetNFTInfoBySQL(psql string) (blksRsp []*model.NFTInfoResp, err error) {
blksRet, err := clickhouse.ScanAll(psql, nftInfoResultSRF)
if err != nil {
logger.Log.Info("query blk failed", zap.Error(err))
return nil, err
}
if blksRet == nil {
return nil, errors.New("not exist")
}
blocks := blksRet.([]*model.NFTInfoDO)
for _, block := range blocks {
blksRsp = append(blksRsp, &model.NFTInfoResp{
CodeHashHex: hex.EncodeToString(block.CodeHash),
GenesisHex: hex.EncodeToString(block.Genesis),
Count: int(block.Count),
InTimes: int(block.InTimes),
OutTimes: int(block.OutTimes),
InSatoshi: int(block.InSatoshi),
OutSatoshi: int(block.OutSatoshi),
})
}
return
}
| {
var ret model.NFTInfoDO
err := rows.Scan(&ret.CodeHash, &ret.Genesis, &ret.Count, &ret.InTimes, &ret.OutTimes, &ret.InSatoshi, &ret.OutSatoshi)
if err != nil {
return nil, err
}
return &ret, nil
} |
eds_test.go | // Copyright Istio Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package xds_test
import (
"encoding/json"
"errors"
"fmt"
"io"
"log"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"reflect"
"runtime"
"strconv"
"strings"
"sync"
"testing"
"time"
endpoint "github.com/envoyproxy/go-control-plane/envoy/config/endpoint/v3"
uatomic "go.uber.org/atomic"
"istio.io/istio/pilot/pkg/model"
"istio.io/istio/pilot/pkg/networking"
"istio.io/istio/pilot/pkg/xds"
v3 "istio.io/istio/pilot/pkg/xds/v3"
"istio.io/istio/pkg/adsc"
"istio.io/istio/pkg/config/host"
"istio.io/istio/pkg/config/protocol"
"istio.io/istio/pkg/config/schema/gvk"
"istio.io/istio/pkg/test/env"
)
// The connect and reconnect tests are removed - ADS already has coverage, and the
// StreamEndpoints is not used in 1.0+
const (
asdcLocality = "region1/zone1/subzone1"
asdc2Locality = "region2/zone2/subzone2"
edsIncSvc = "eds.test.svc.cluster.local"
edsIncVip = "10.10.1.2"
)
func TestIncrementalPush(t *testing.T) {
s := xds.NewFakeDiscoveryServer(t, xds.FakeOptions{ConfigString: mustReadFile(t, "tests/testdata/config/destination-rule-all.yaml")})
ads := s.Connect(nil, nil, watchAll)
t.Run("Full Push", func(t *testing.T) {
s.Discovery.Push(&model.PushRequest{Full: true})
if _, err := ads.Wait(time.Second*5, watchAll...); err != nil {
t.Fatal(err)
}
})
t.Run("Incremental Push", func(t *testing.T) {
ads.WaitClear()
s.Discovery.Push(&model.PushRequest{Full: false})
if err := ads.WaitSingle(time.Second*5, v3.EndpointType, v3.ClusterType); err != nil {
t.Fatal(err)
}
})
t.Run("Incremental Push with updated services", func(t *testing.T) {
ads.WaitClear()
s.Discovery.Push(&model.PushRequest{
Full: false,
ConfigsUpdated: map[model.ConfigKey]struct{}{
{Name: "destall.default.svc.cluster.local", Namespace: "testns", Kind: gvk.ServiceEntry}: {},
},
})
if err := ads.WaitSingle(time.Second*5, v3.EndpointType, v3.ClusterType); err != nil {
t.Fatal(err)
}
})
t.Run("Full Push with updated services", func(t *testing.T) {
ads.WaitClear()
s.Discovery.Push(&model.PushRequest{
Full: true,
ConfigsUpdated: map[model.ConfigKey]struct{}{
{Name: "foo.bar", Namespace: "default", Kind: gvk.ServiceEntry}: {},
{Name: "destall", Namespace: "testns", Kind: gvk.DestinationRule}: {},
},
})
if _, err := ads.Wait(time.Second*5, watchAll...); err != nil {
t.Fatal(err)
}
if len(ads.GetEndpoints()) < 3 {
t.Fatalf("Expected a full EDS update, but got: %v", ads.GetEndpoints())
}
})
t.Run("Full Push without updated services", func(t *testing.T) {
ads.WaitClear()
s.Discovery.Push(&model.PushRequest{
Full: true,
ConfigsUpdated: map[model.ConfigKey]struct{}{
{Name: "destall", Namespace: "testns", Kind: gvk.DestinationRule}: {},
},
})
if _, err := ads.Wait(time.Second*5, v3.ClusterType, v3.EndpointType); err != nil {
t.Fatal(err)
}
if len(ads.GetEndpoints()) < 3 {
t.Fatalf("Expected a full EDS update, but got: %v", ads.GetEndpoints())
}
})
}
func TestEds(t *testing.T) {
s := xds.NewFakeDiscoveryServer(t, xds.FakeOptions{
ConfigString: mustReadFile(t, "tests/testdata/config/destination-rule-locality.yaml"),
DiscoveryServerModifier: func(s *xds.DiscoveryServer) {
addUdsEndpoint(s)
// enable locality load balancing and add relevant endpoints in order to test
addLocalityEndpoints(s, "locality.cluster.local")
addLocalityEndpoints(s, "locality-no-outlier-detection.cluster.local")
// Add the test ads clients to list of service instances in order to test the context dependent locality coloring.
addTestClientEndpoints(s)
s.MemRegistry.AddHTTPService(edsIncSvc, edsIncVip, 8080)
s.MemRegistry.SetEndpoints(edsIncSvc, "",
newEndpointWithAccount("127.0.0.1", "hello-sa", "v1"))
},
})
adscConn := s.Connect(&model.Proxy{IPAddresses: []string{"10.10.10.10"}}, nil, watchAll)
adscConn2 := s.Connect(&model.Proxy{IPAddresses: []string{"10.10.10.11"}}, nil, watchAll)
t.Run("TCPEndpoints", func(t *testing.T) {
testTCPEndpoints("127.0.0.1", adscConn, t)
})
t.Run("edsz", func(t *testing.T) {
testEdsz(t, s, "test-1.default")
})
t.Run("LocalityPrioritizedEndpoints", func(t *testing.T) {
testLocalityPrioritizedEndpoints(adscConn, adscConn2, t)
})
t.Run("UDSEndpoints", func(t *testing.T) {
testUdsEndpoints(adscConn, t)
})
t.Run("PushIncremental", func(t *testing.T) {
edsUpdateInc(s, adscConn, t)
})
t.Run("Push", func(t *testing.T) {
edsUpdates(s, adscConn, t)
})
t.Run("MultipleRequest", func(t *testing.T) {
multipleRequest(s, false, 20, 5, 25*time.Second, nil, t)
})
// 5 pushes for 100 clients, using EDS incremental only.
t.Run("MultipleRequestIncremental", func(t *testing.T) {
multipleRequest(s, true, 20, 5, 25*time.Second, nil, t)
})
t.Run("CDSSave", func(t *testing.T) {
// Moved from cds_test, using new client
clusters := adscConn.GetClusters()
if len(clusters) == 0 {
t.Error("No clusters in ADS response")
}
strResponse, _ := json.MarshalIndent(clusters, " ", " ")
_ = os.WriteFile(env.IstioOut+"/cdsv2_sidecar.json", strResponse, 0o644)
})
}
// newEndpointWithAccount is a helper for IstioEndpoint creation. Creates endpoints with
// port name "http", with the given IP, service account and a 'version' label.
// nolint: unparam
func newEndpointWithAccount(ip, account, version string) []*model.IstioEndpoint {
return []*model.IstioEndpoint{
{
Address: ip,
ServicePortName: "http-main",
EndpointPort: 80,
Labels: map[string]string{"version": version},
ServiceAccount: account,
},
}
}
func TestTunnelServerEndpointEds(t *testing.T) {
s := xds.NewFakeDiscoveryServer(t, xds.FakeOptions{})
s.Discovery.MemRegistry.AddHTTPService(edsIncSvc, edsIncVip, 8080)
s.Discovery.MemRegistry.SetEndpoints(edsIncSvc, "",
[]*model.IstioEndpoint{
{
Address: "127.0.0.1",
ServicePortName: "http-main",
EndpointPort: 80,
// Labels: map[string]string{"version": version},
ServiceAccount: "hello-sa",
TunnelAbility: networking.MakeTunnelAbility(networking.H2Tunnel),
},
})
t.Run("TestClientWantsTunnelEndpoints", func(t *testing.T) {
t.Helper()
adscConn1 := s.Connect(&model.Proxy{IPAddresses: []string{"10.10.10.10"}, Metadata: &model.NodeMetadata{
ProxyConfig: &model.NodeMetaProxyConfig{
ProxyMetadata: map[string]string{
"tunnel": networking.H2TunnelTypeName,
},
},
}}, nil, watchAll)
testTunnelEndpoints("127.0.0.1", 15009, adscConn1, t)
})
t.Run("TestClientWantsNoTunnelEndpoints", func(t *testing.T) {
t.Helper()
adscConn2 := s.Connect(&model.Proxy{IPAddresses: []string{"10.10.10.11"}, Metadata: &model.NodeMetadata{
ProxyConfig: &model.NodeMetaProxyConfig{},
}}, nil, watchAll)
testTunnelEndpoints("127.0.0.1", 80, adscConn2, t)
})
}
func TestNoTunnelServerEndpointEds(t *testing.T) {
s := xds.NewFakeDiscoveryServer(t, xds.FakeOptions{})
// Add the test ads clients to list of service instances in order to test the context dependent locality coloring.
addTestClientEndpoints(s.Discovery)
s.Discovery.MemRegistry.AddHTTPService(edsIncSvc, edsIncVip, 8080)
s.Discovery.MemRegistry.SetEndpoints(edsIncSvc, "",
[]*model.IstioEndpoint{
{
Address: "127.0.0.1",
ServicePortName: "http-main",
EndpointPort: 80,
// Labels: map[string]string{"version": version},
ServiceAccount: "hello-sa",
// No Tunnel Support at this endpoint.
TunnelAbility: networking.MakeTunnelAbility(),
},
})
t.Run("TestClientWantsTunnelEndpoints", func(t *testing.T) {
adscConn := s.Connect(&model.Proxy{IPAddresses: []string{"10.10.10.10"}, Metadata: &model.NodeMetadata{
ProxyConfig: &model.NodeMetaProxyConfig{
ProxyMetadata: map[string]string{
"tunnel": networking.H2TunnelTypeName,
},
},
}}, nil, watchAll)
testTunnelEndpoints("127.0.0.1", 80, adscConn, t)
})
t.Run("TestClientWantsNoTunnelEndpoints", func(t *testing.T) {
adscConn := s.Connect(&model.Proxy{IPAddresses: []string{"10.10.10.11"}, Metadata: &model.NodeMetadata{}}, nil, watchAll)
testTunnelEndpoints("127.0.0.1", 80, adscConn, t)
})
}
func mustReadFile(t *testing.T, fpaths ...string) string {
result := ""
for _, fpath := range fpaths {
if !strings.HasPrefix(fpath, ".") {
fpath = filepath.Join(env.IstioSrc, fpath)
}
bytes, err := os.ReadFile(fpath)
if err != nil {
t.Fatal(err)
}
result += "---\n"
result += string(bytes)
}
return result
}
func mustReadfolder(t *testing.T, folder string) string {
result := ""
fpathRoot := folder
if !strings.HasPrefix(fpathRoot, ".") {
fpathRoot = filepath.Join(env.IstioSrc, folder)
}
f, err := os.ReadDir(fpathRoot)
if err != nil {
t.Fatal(err)
}
for _, fpath := range f {
bytes, err := os.ReadFile(filepath.Join(fpathRoot, fpath.Name()))
if err != nil {
t.Fatal(err)
}
result += "---\n"
result += string(bytes)
}
return result
}
func TestEdsWeightedServiceEntry(t *testing.T) {
s := xds.NewFakeDiscoveryServer(t, xds.FakeOptions{ConfigString: mustReadFile(t, "tests/testdata/config/static-weighted-se.yaml")})
adscConn := s.Connect(nil, nil, watchEds)
endpoints := adscConn.GetEndpoints()
lbe, f := endpoints["outbound|80||weighted.static.svc.cluster.local"]
if !f || len(lbe.Endpoints) == 0 {
t.Fatalf("No lb endpoints for %v, %v", "outbound|80||weighted.static.svc.cluster.local", adscConn.EndpointsJSON())
}
expected := map[string]uint32{
"a": 9, // sum of 1 and 8
"b": 3,
"3.3.3.3": 1, // no weight provided is normalized to 1
"2.2.2.2": 8,
"1.1.1.1": 3,
}
got := make(map[string]uint32)
for _, lbe := range lbe.Endpoints {
got[lbe.Locality.Region] = lbe.LoadBalancingWeight.Value
for _, e := range lbe.LbEndpoints {
got[e.GetEndpoint().Address.GetSocketAddress().Address] = e.LoadBalancingWeight.Value
}
}
if !reflect.DeepEqual(expected, got) {
t.Errorf("Expected LB weights %v got %v", expected, got)
}
}
var (
watchEds = []string{v3.ClusterType, v3.EndpointType}
watchAll = []string{v3.ClusterType, v3.EndpointType, v3.ListenerType, v3.RouteType}
)
func TestEDSOverlapping(t *testing.T) {
s := xds.NewFakeDiscoveryServer(t, xds.FakeOptions{})
addOverlappingEndpoints(s)
adscon := s.Connect(nil, nil, watchEds)
testOverlappingPorts(s, adscon, t)
}
// Validates the behavior when Service resolution type is updated after initial EDS push.
// See https://github.com/istio/istio/issues/18355 for more details.
func TestEDSServiceResolutionUpdate(t *testing.T) {
s := xds.NewFakeDiscoveryServer(t, xds.FakeOptions{})
addEdsCluster(s, "edsdns.svc.cluster.local", "http", "10.0.0.53", 8080)
addEdsCluster(s, "other.local", "http", "1.1.1.1", 8080)
adscConn := s.Connect(nil, nil, watchAll)
// Validate that endpoints are pushed correctly.
testEndpoints("10.0.0.53", "outbound|8080||edsdns.svc.cluster.local", adscConn, t)
// Now update the service resolution to DNSLB with a DNS endpoint.
updateServiceResolution(s)
if _, err := adscConn.Wait(5*time.Second, v3.EndpointType); err != nil {
t.Fatal(err)
}
// Validate that endpoints are skipped.
lbe := adscConn.GetEndpoints()["outbound|8080||edsdns.svc.cluster.local"]
if lbe != nil && len(lbe.Endpoints) > 0 {
t.Fatalf("endpoints not expected for %s, but got %v", "edsdns.svc.cluster.local", adscConn.EndpointsJSON())
}
}
// Validate that when endpoints of a service flipflop between 1 and 0 does not trigger a full push.
func TestEndpointFlipFlops(t *testing.T) {
s := xds.NewFakeDiscoveryServer(t, xds.FakeOptions{})
addEdsCluster(s, "flipflop.com", "http", "10.0.0.53", 8080)
adscConn := s.Connect(nil, nil, watchAll)
// Validate that endpoints are pushed correctly.
testEndpoints("10.0.0.53", "outbound|8080||flipflop.com", adscConn, t)
// Clear the endpoint and validate it does not trigger a full push.
s.Discovery.MemRegistry.SetEndpoints("flipflop.com", "", []*model.IstioEndpoint{})
upd, _ := adscConn.Wait(5*time.Second, v3.EndpointType)
if contains(upd, "cds") {
t.Fatalf("Expecting only EDS update as part of a partial push. But received CDS also %v", upd)
}
if len(upd) > 0 && !contains(upd, v3.EndpointType) {
t.Fatalf("Expecting EDS push as part of a partial push. But received %v", upd)
}
lbe := adscConn.GetEndpoints()["outbound|8080||flipflop.com"]
if len(lbe.Endpoints) != 0 {
t.Fatalf("There should be no endpoints for outbound|8080||flipflop.com. Endpoints:\n%v", adscConn.EndpointsJSON())
}
// Validate that keys in service still exist in EndpointShardsByService - this prevents full push.
if len(s.Discovery.EndpointShardsByService["flipflop.com"]) == 0 {
t.Fatalf("Expected service key %s to be present in EndpointShardsByService. But missing %v", "flipflop.com", s.Discovery.EndpointShardsByService)
}
// Set the endpoints again and validate it does not trigger full push.
s.Discovery.MemRegistry.SetEndpoints("flipflop.com", "",
[]*model.IstioEndpoint{
{
Address: "10.10.1.1",
ServicePortName: "http",
EndpointPort: 8080,
},
})
upd, _ = adscConn.Wait(5*time.Second, v3.EndpointType)
if contains(upd, v3.ClusterType) {
t.Fatalf("expecting only EDS update as part of a partial push. But received CDS also %+v", upd)
}
if len(upd) > 0 && !contains(upd, v3.EndpointType) {
t.Fatalf("expecting EDS push as part of a partial push. But did not receive %+v", upd)
}
testEndpoints("10.10.1.1", "outbound|8080||flipflop.com", adscConn, t)
}
// Validate that deleting a service clears entries from EndpointShardsByService.
func TestDeleteService(t *testing.T) {
s := xds.NewFakeDiscoveryServer(t, xds.FakeOptions{})
addEdsCluster(s, "removeservice.com", "http", "10.0.0.53", 8080)
adscConn := s.Connect(nil, nil, watchEds)
// Validate that endpoints are pushed correctly.
testEndpoints("10.0.0.53", "outbound|8080||removeservice.com", adscConn, t)
s.Discovery.MemRegistry.RemoveService("removeservice.com")
if len(s.Discovery.EndpointShardsByService["removeservice.com"]) != 0 {
t.Fatalf("Expected service key %s to be deleted in EndpointShardsByService. But is still there %v",
"removeservice.com", s.Discovery.EndpointShardsByService)
}
}
func TestUpdateServiceAccount(t *testing.T) {
cluster1Endppoints := []*model.IstioEndpoint{
{Address: "10.172.0.1", ServiceAccount: "sa1"},
{Address: "10.172.0.2", ServiceAccount: "sa-vm1"},
}
testCases := []struct {
name string
clusterID string
endpoints []*model.IstioEndpoint
expect bool
}{
{
name: "added new endpoint",
clusterID: "c1",
endpoints: append(cluster1Endppoints, &model.IstioEndpoint{Address: "10.172.0.3", ServiceAccount: "sa1"}),
expect: false,
},
{
name: "added new sa",
clusterID: "c1",
endpoints: append(cluster1Endppoints, &model.IstioEndpoint{Address: "10.172.0.3", ServiceAccount: "sa2"}),
expect: true,
},
{
name: "updated endpoints address",
clusterID: "c1",
endpoints: []*model.IstioEndpoint{
{Address: "10.172.0.5", ServiceAccount: "sa1"},
{Address: "10.172.0.2", ServiceAccount: "sa-vm1"},
},
expect: false,
},
{
name: "deleted one endpoint with unique sa",
clusterID: "c1",
endpoints: []*model.IstioEndpoint{
{Address: "10.172.0.1", ServiceAccount: "sa1"},
},
expect: true,
},
{
name: "deleted one endpoint with duplicate sa",
clusterID: "c1",
endpoints: []*model.IstioEndpoint{
{Address: "10.172.0.2", ServiceAccount: "sa-vm1"},
},
expect: false,
},
{
name: "deleted endpoints",
clusterID: "c1",
endpoints: nil,
expect: true,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
s := new(xds.DiscoveryServer)
originalEndpointsShard := &xds.EndpointShards{
Shards: map[string][]*model.IstioEndpoint{
"c1": cluster1Endppoints,
"c2": {{Address: "10.244.0.1", ServiceAccount: "sa1"}, {Address: "10.244.0.2", ServiceAccount: "sa-vm2"}},
},
ServiceAccounts: map[string]struct{}{
"sa1": {},
"sa-vm1": {},
"sa-vm2": {},
},
}
originalEndpointsShard.Shards[tc.clusterID] = tc.endpoints
ret := s.UpdateServiceAccount(originalEndpointsShard, "test-svc")
if ret != tc.expect {
t.Errorf("expect UpdateServiceAccount %v, but got %v", tc.expect, ret)
}
})
}
}
func fullPush(s *xds.FakeDiscoveryServer) {
s.Discovery.Push(&model.PushRequest{Full: true})
}
func addTestClientEndpoints(server *xds.DiscoveryServer) {
server.MemRegistry.AddService("test-1.default", &model.Service{
Hostname: "test-1.default",
Ports: model.PortList{
{
Name: "http",
Port: 80,
Protocol: protocol.HTTP,
},
},
})
server.MemRegistry.AddInstance("test-1.default", &model.ServiceInstance{
Endpoint: &model.IstioEndpoint{
Address: "10.10.10.10",
ServicePortName: "http",
EndpointPort: 80,
Locality: model.Locality{Label: asdcLocality},
},
ServicePort: &model.Port{
Name: "http",
Port: 80,
Protocol: protocol.HTTP,
},
})
server.MemRegistry.AddInstance("test-1.default", &model.ServiceInstance{
Endpoint: &model.IstioEndpoint{
Address: "10.10.10.11",
ServicePortName: "http",
EndpointPort: 80,
Locality: model.Locality{Label: asdc2Locality},
},
ServicePort: &model.Port{
Name: "http",
Port: 80,
Protocol: protocol.HTTP,
},
})
}
// Verify server sends the endpoint. This check for a single endpoint with the given
// address.
func testTCPEndpoints(expected string, adsc *adsc.ADSC, t *testing.T) {
t.Helper()
testEndpoints(expected, "outbound|8080||eds.test.svc.cluster.local", adsc, t)
}
// Verify server sends the endpoint. This check for a single endpoint with the given
// address.
func testEndpoints(expected string, cluster string, adsc *adsc.ADSC, t *testing.T) {
t.Helper()
lbe, f := adsc.GetEndpoints()[cluster]
if !f || len(lbe.Endpoints) == 0 {
t.Fatalf("No lb endpoints for %v, %v", cluster, adsc.EndpointsJSON())
}
var found []string
for _, lbe := range lbe.Endpoints {
for _, e := range lbe.LbEndpoints {
addr := e.GetEndpoint().Address.GetSocketAddress().Address
found = append(found, addr)
if expected == addr {
return
}
}
}
t.Fatalf("Expecting %s got %v", expected, found)
}
// Verify server sends the tunneled endpoints.
// nolint: unparam
func testTunnelEndpoints(expectIP string, expectPort uint32, adsc *adsc.ADSC, t *testing.T) {
t.Helper()
cluster := "outbound|8080||eds.test.svc.cluster.local"
allClusters := adsc.GetEndpoints()
cla, f := allClusters[cluster]
if !f || len(cla.Endpoints) == 0 {
t.Fatalf("No lb endpoints for %v, %v", cluster, adsc.EndpointsJSON())
}
var found []string
for _, lbe := range cla.Endpoints {
for _, e := range lbe.LbEndpoints {
addr := e.GetEndpoint().Address.GetSocketAddress().Address
port := e.GetEndpoint().Address.GetSocketAddress().GetPortValue()
found = append(found, fmt.Sprintf("%s:%d", addr, port))
if expectIP == addr && expectPort == port {
return
}
}
}
t.Errorf("REACH HERE cannot find %s:%d", expectIP, expectPort)
t.Fatalf("Expecting address %s:%d got %v", expectIP, expectPort, found)
}
func testLocalityPrioritizedEndpoints(adsc *adsc.ADSC, adsc2 *adsc.ADSC, t *testing.T) {
endpoints1 := adsc.GetEndpoints()
endpoints2 := adsc2.GetEndpoints()
verifyLocalityPriorities(asdcLocality, endpoints1["outbound|80||locality.cluster.local"].GetEndpoints(), t)
verifyLocalityPriorities(asdc2Locality, endpoints2["outbound|80||locality.cluster.local"].GetEndpoints(), t)
// No outlier detection specified for this cluster, so we shouldn't apply priority.
verifyNoLocalityPriorities(endpoints1["outbound|80||locality-no-outlier-detection.cluster.local"].GetEndpoints(), t)
verifyNoLocalityPriorities(endpoints2["outbound|80||locality-no-outlier-detection.cluster.local"].GetEndpoints(), t)
}
// Tests that Services with multiple ports sharing the same port number are properly sent endpoints.
// Real world use case for this is kube-dns, which uses port 53 for TCP and UDP.
func testOverlappingPorts(s *xds.FakeDiscoveryServer, adsc *adsc.ADSC, t *testing.T) {
// Test initial state
testEndpoints("10.0.0.53", "outbound|53||overlapping.cluster.local", adsc, t)
s.Discovery.Push(&model.PushRequest{
Full: true,
ConfigsUpdated: map[model.ConfigKey]struct{}{{
Kind: gvk.ServiceEntry,
Name: "overlapping.cluster.local",
}: {}},
})
_, _ = adsc.Wait(5 * time.Second)
// After the incremental push, we should still see the endpoint
testEndpoints("10.0.0.53", "outbound|53||overlapping.cluster.local", adsc, t)
}
func verifyNoLocalityPriorities(eps []*endpoint.LocalityLbEndpoints, t *testing.T) {
for _, ep := range eps {
if ep.GetPriority() != 0 {
t.Errorf("expected no locality priorities to apply, got priority %v.", ep.GetPriority())
}
}
}
func verifyLocalityPriorities(proxyLocality string, eps []*endpoint.LocalityLbEndpoints, t *testing.T) {
items := strings.SplitN(proxyLocality, "/", 3)
region, zone, subzone := items[0], items[1], items[2]
for _, ep := range eps {
if ep.GetLocality().Region == region {
if ep.GetLocality().Zone == zone {
if ep.GetLocality().SubZone == subzone {
if ep.GetPriority() != 0 {
t.Errorf("expected endpoint pool from same locality to have priority of 0, got %v", ep.GetPriority())
}
} else if ep.GetPriority() != 1 {
t.Errorf("expected endpoint pool from a different subzone to have priority of 1, got %v", ep.GetPriority())
}
} else {
if ep.GetPriority() != 2 {
t.Errorf("expected endpoint pool from a different zone to have priority of 2, got %v", ep.GetPriority())
}
}
} else {
if ep.GetPriority() != 3 {
t.Errorf("expected endpoint pool from a different region to have priority of 3, got %v", ep.GetPriority())
}
}
}
}
// Verify server sends UDS endpoints
func testUdsEndpoints(adsc *adsc.ADSC, t *testing.T) {
// Check the UDS endpoint ( used to be separate test - but using old unused GRPC method)
// The new test also verifies CDS is pusing the UDS cluster, since adsc.eds is
// populated using CDS response
lbe, f := adsc.GetEndpoints()["outbound|0||localuds.cluster.local"]
if !f || len(lbe.Endpoints) == 0 {
t.Error("No UDS lb endpoints")
} else {
ep0 := lbe.Endpoints[0]
if len(ep0.LbEndpoints) != 1 {
t.Fatalf("expected 1 LB endpoint but got %d", len(ep0.LbEndpoints))
}
lbep := ep0.LbEndpoints[0]
path := lbep.GetEndpoint().GetAddress().GetPipe().GetPath()
if path != udsPath {
t.Fatalf("expected Pipe to %s, got %s", udsPath, path)
}
}
}
// Update
func edsUpdates(s *xds.FakeDiscoveryServer, adsc *adsc.ADSC, t *testing.T) {
// Old style (non-incremental)
s.Discovery.MemRegistry.SetEndpoints(edsIncSvc, "",
newEndpointWithAccount("127.0.0.3", "hello-sa", "v1"))
xds.AdsPushAll(s.Discovery)
// will trigger recompute and push
if _, err := adsc.Wait(5*time.Second, v3.EndpointType); err != nil {
t.Fatal("EDS push failed", err)
}
testTCPEndpoints("127.0.0.3", adsc, t)
}
// edsFullUpdateCheck checks for updates required in a full push after the CDS update
func edsFullUpdateCheck(adsc *adsc.ADSC, t *testing.T) {
t.Helper()
if upd, err := adsc.Wait(15*time.Second, watchAll...); err != nil {
t.Fatal("Expecting CDS, EDS, LDS, and RDS update as part of a full push", err, upd)
}
}
// This test must be run in isolation, can't be parallelized with any other v2 test.
// It makes different kind of updates, and checks that incremental or full push happens.
// In particular:
// - just endpoint changes -> incremental
// - service account changes -> full ( in future: CDS only )
// - label changes -> full
func edsUpdateInc(s *xds.FakeDiscoveryServer, adsc *adsc.ADSC, t *testing.T) {
// TODO: set endpoints for a different cluster (new shard)
// Verify initial state
testTCPEndpoints("127.0.0.1", adsc, t)
adsc.WaitClear() // make sure there are no pending pushes.
// Equivalent with the event generated by K8S watching the Service.
// Will trigger a push.
s.Discovery.MemRegistry.SetEndpoints(edsIncSvc, "",
newEndpointWithAccount("127.0.0.2", "hello-sa", "v1"))
upd, err := adsc.Wait(5*time.Second, v3.EndpointType)
if err != nil {
t.Fatal("Incremental push failed", err)
}
if contains(upd, v3.ClusterType) {
t.Fatal("Expecting EDS only update, got", upd)
}
testTCPEndpoints("127.0.0.2", adsc, t)
// Update the endpoint with different SA - expect full
s.Discovery.MemRegistry.SetEndpoints(edsIncSvc, "",
newEndpointWithAccount("127.0.0.3", "account2", "v1"))
edsFullUpdateCheck(adsc, t)
testTCPEndpoints("127.0.0.3", adsc, t)
// Update the endpoint again, no SA change - expect incremental
s.Discovery.MemRegistry.SetEndpoints(edsIncSvc, "",
newEndpointWithAccount("127.0.0.4", "account2", "v1"))
upd, err = adsc.Wait(5 * time.Second)
if err != nil {
t.Fatal("Incremental push failed", err)
}
if !reflect.DeepEqual(upd, []string{v3.EndpointType}) {
t.Fatal("Expecting EDS only update, got", upd)
}
testTCPEndpoints("127.0.0.4", adsc, t)
// Update the endpoint to original SA - expect full
s.Discovery.MemRegistry.SetEndpoints(edsIncSvc, "",
newEndpointWithAccount("127.0.0.2", "hello-sa", "v1"))
edsFullUpdateCheck(adsc, t)
testTCPEndpoints("127.0.0.2", adsc, t)
// Update the endpoint again, no label change - expect incremental
s.Discovery.MemRegistry.SetEndpoints(edsIncSvc, "",
newEndpointWithAccount("127.0.0.5", "hello-sa", "v1"))
upd, err = adsc.Wait(5 * time.Second)
if err != nil {
t.Fatal("Incremental push failed", err)
}
if !reflect.DeepEqual(upd, []string{v3.EndpointType}) {
t.Fatal("Expecting EDS only update, got", upd) | testTCPEndpoints("127.0.0.5", adsc, t)
// Wipe out all endpoints - expect full
s.Discovery.MemRegistry.SetEndpoints(edsIncSvc, "", []*model.IstioEndpoint{})
if upd, err := adsc.Wait(15*time.Second, v3.EndpointType); err != nil {
t.Fatal("Expecting EDS update as part of a partial push", err, upd)
}
lbe := adsc.GetEndpoints()["outbound|8080||eds.test.svc.cluster.local"]
if len(lbe.Endpoints) != 0 {
t.Fatalf("There should be no endpoints for outbound|8080||eds.test.svc.cluster.local. Endpoints:\n%v", adsc.EndpointsJSON())
}
}
// Make a direct EDS grpc request to pilot, verify the result is as expected.
// This test includes a 'bad client' regression test, which fails to read on the
// stream.
func multipleRequest(s *xds.FakeDiscoveryServer, inc bool, nclients,
nPushes int, to time.Duration, _ map[string]string, t *testing.T) {
wgConnect := &sync.WaitGroup{}
wg := &sync.WaitGroup{}
errChan := make(chan error, nclients)
// Bad client - will not read any response. This triggers Write to block, which should
// be detected
// This is not using adsc, which consumes the events automatically.
ads := s.ConnectADS()
ads.Request(t, nil)
n := nclients
wg.Add(n)
wgConnect.Add(n)
rcvPush := uatomic.NewInt32(0)
rcvClients := uatomic.NewInt32(0)
for i := 0; i < n; i++ {
current := i
go func(id int) {
defer wg.Done()
// Connect and get initial response
adscConn := s.Connect(&model.Proxy{IPAddresses: []string{fmt.Sprintf("1.1.1.%d", id)}}, nil, nil)
_, err := adscConn.Wait(15*time.Second, v3.RouteType)
if err != nil {
errChan <- errors.New("failed to get initial rds: " + err.Error())
wgConnect.Done()
return
}
if len(adscConn.GetEndpoints()) == 0 {
errChan <- errors.New("no endpoints")
wgConnect.Done()
return
}
wgConnect.Done()
// Check we received all pushes
log.Println("Waiting for pushes ", id)
// Pushes may be merged so we may not get nPushes pushes
got, err := adscConn.Wait(15*time.Second, v3.EndpointType)
// If in incremental mode, shouldn't receive cds|rds|lds here
if inc {
for _, g := range got {
if g == "cds" || g == "rds" || g == "lds" {
errChan <- fmt.Errorf("should be eds incremental but received cds. %v %v",
err, id)
return
}
}
}
rcvPush.Inc()
if err != nil {
log.Println("Recv failed", err, id)
errChan <- fmt.Errorf("failed to receive a response in 15 s %v %v",
err, id)
return
}
log.Println("Received all pushes ", id)
rcvClients.Inc()
adscConn.Close()
}(current)
}
ok := waitTimeout(wgConnect, to)
if !ok {
t.Fatal("Failed to connect")
}
log.Println("Done connecting")
// All clients are connected - this can start pushing changes.
for j := 0; j < nPushes; j++ {
if inc {
// This will be throttled - we want to trigger a single push
s.Discovery.AdsPushAll(strconv.Itoa(j), &model.PushRequest{
Full: false,
ConfigsUpdated: map[model.ConfigKey]struct{}{{
Kind: gvk.ServiceEntry,
Name: edsIncSvc,
}: {}},
Push: s.Discovery.Env.PushContext,
})
} else {
xds.AdsPushAll(s.Discovery)
}
log.Println("Push done ", j)
}
ok = waitTimeout(wg, to)
if !ok {
t.Errorf("Failed to receive all responses %d %d", rcvClients.Load(), rcvPush.Load())
buf := make([]byte, 1<<16)
runtime.Stack(buf, true)
fmt.Printf("%s", buf)
}
close(errChan)
// moved from ads_test, which had a duplicated test.
for e := range errChan {
t.Error(e)
}
}
func waitTimeout(wg *sync.WaitGroup, timeout time.Duration) bool {
c := make(chan struct{})
go func() {
defer close(c)
wg.Wait()
}()
select {
case <-c:
return true
case <-time.After(timeout):
return false
}
}
const udsPath = "/var/run/test/socket"
func addUdsEndpoint(s *xds.DiscoveryServer) {
s.MemRegistry.AddService("localuds.cluster.local", &model.Service{
Hostname: "localuds.cluster.local",
Ports: model.PortList{
{
Name: "grpc",
Port: 0,
Protocol: protocol.GRPC,
},
},
MeshExternal: true,
Resolution: model.ClientSideLB,
})
s.MemRegistry.AddInstance("localuds.cluster.local", &model.ServiceInstance{
Endpoint: &model.IstioEndpoint{
Address: udsPath,
EndpointPort: 0,
ServicePortName: "grpc",
Locality: model.Locality{Label: "localhost"},
Labels: map[string]string{"socket": "unix"},
},
ServicePort: &model.Port{
Name: "grpc",
Port: 0,
Protocol: protocol.GRPC,
},
})
pushReq := &model.PushRequest{
Full: true,
Reason: []model.TriggerReason{model.ConfigUpdate},
}
s.ConfigUpdate(pushReq)
}
func addLocalityEndpoints(server *xds.DiscoveryServer, hostname host.Name) {
server.MemRegistry.AddService(hostname, &model.Service{
Hostname: hostname,
Ports: model.PortList{
{
Name: "http",
Port: 80,
Protocol: protocol.HTTP,
},
},
})
localities := []string{
"region1/zone1/subzone1",
"region1/zone1/subzone2",
"region1/zone2/subzone1",
"region2/zone1/subzone1",
"region2/zone1/subzone2",
"region2/zone2/subzone1",
"region2/zone2/subzone2",
}
for i, locality := range localities {
server.MemRegistry.AddInstance(hostname, &model.ServiceInstance{
Endpoint: &model.IstioEndpoint{
Address: fmt.Sprintf("10.0.0.%v", i),
EndpointPort: 80,
ServicePortName: "http",
Locality: model.Locality{Label: locality},
},
ServicePort: &model.Port{
Name: "http",
Port: 80,
Protocol: protocol.HTTP,
},
})
}
}
// nolint: unparam
func addEdsCluster(s *xds.FakeDiscoveryServer, hostName string, portName string, address string, port int) {
s.Discovery.MemRegistry.AddService(host.Name(hostName), &model.Service{
Hostname: host.Name(hostName),
Ports: model.PortList{
{
Name: portName,
Port: port,
Protocol: protocol.HTTP,
},
},
})
s.Discovery.MemRegistry.AddInstance(host.Name(hostName), &model.ServiceInstance{
Endpoint: &model.IstioEndpoint{
Address: address,
EndpointPort: uint32(port),
ServicePortName: portName,
},
ServicePort: &model.Port{
Name: portName,
Port: port,
Protocol: protocol.HTTP,
},
})
fullPush(s)
}
func updateServiceResolution(s *xds.FakeDiscoveryServer) {
s.Discovery.MemRegistry.AddService("edsdns.svc.cluster.local", &model.Service{
Hostname: "edsdns.svc.cluster.local",
Ports: model.PortList{
{
Name: "http",
Port: 8080,
Protocol: protocol.HTTP,
},
},
Resolution: model.DNSLB,
})
s.Discovery.MemRegistry.AddInstance("edsdns.svc.cluster.local", &model.ServiceInstance{
Endpoint: &model.IstioEndpoint{
Address: "somevip.com",
EndpointPort: 8080,
ServicePortName: "http",
},
ServicePort: &model.Port{
Name: "http",
Port: 8080,
Protocol: protocol.HTTP,
},
})
fullPush(s)
}
func addOverlappingEndpoints(s *xds.FakeDiscoveryServer) {
s.Discovery.MemRegistry.AddService("overlapping.cluster.local", &model.Service{
Hostname: "overlapping.cluster.local",
Ports: model.PortList{
{
Name: "dns",
Port: 53,
Protocol: protocol.UDP,
},
{
Name: "tcp-dns",
Port: 53,
Protocol: protocol.TCP,
},
},
})
s.Discovery.MemRegistry.AddInstance("overlapping.cluster.local", &model.ServiceInstance{
Endpoint: &model.IstioEndpoint{
Address: "10.0.0.53",
EndpointPort: 53,
ServicePortName: "tcp-dns",
},
ServicePort: &model.Port{
Name: "tcp-dns",
Port: 53,
Protocol: protocol.TCP,
},
})
fullPush(s)
}
// Verify the endpoint debug interface is installed and returns some string.
// TODO: parse response, check if data captured matches what we expect.
// TODO: use this in integration tests.
// TODO: refine the output
// TODO: dump the ServiceInstances as well
func testEdsz(t *testing.T, s *xds.FakeDiscoveryServer, proxyID string) {
req, err := http.NewRequest("GET", "/debug/edsz?proxyID="+proxyID, nil)
if err != nil {
t.Fatal(err)
}
rr := httptest.NewRecorder()
debug := http.HandlerFunc(s.Discovery.Edsz)
debug.ServeHTTP(rr, req)
data, err := io.ReadAll(rr.Body)
if err != nil {
t.Fatalf("Failed to read /edsz")
}
statusStr := string(data)
if !strings.Contains(statusStr, "\"outbound|8080||eds.test.svc.cluster.local\"") {
t.Fatal("Mock eds service not found ", statusStr)
}
}
func contains(s []string, e string) bool {
for _, a := range s {
if a == e {
return true
}
}
return false
} | } |
CommunicationConfigurationService.ts | import { IWebPartContext} from "@microsoft/sp-webpart-base";
import { CommunicationServiceConfiguration, Constants, ICommunicationConfigurationService } from "./";
export class CommunicationConfigurationService implements ICommunicationConfigurationService {
private _currentConfiguration: CommunicationServiceConfiguration;
private _webPartContext: IWebPartContext;
public constructor(WebPartContext: IWebPartContext) { | public async getCurrentConfiguration(): Promise<CommunicationServiceConfiguration> {
if (!this._currentConfiguration) {
this._currentConfiguration = new CommunicationServiceConfiguration();
this._currentConfiguration.ClientId = Constants.ApplicationIdKey;
this._currentConfiguration.RedirectUri =
`${this._webPartContext.pageContext.web.absoluteUrl}${Constants.ApplicationRedirectUrl}`;
return this._currentConfiguration;
} else {
return Promise.resolve(this._currentConfiguration);
}
}
} | this._webPartContext = WebPartContext;
} |
nested_atomic_number_ops.rs | use query_engine_tests::*;
#[test_suite]
mod atomic_number_ops {
use indoc::indoc;
use query_engine_tests::{run_query, run_query_json};
fn schema_1() -> String {
let schema = indoc! {
r#"model TestModel {
#id(id, Int, @id)
uniq Int @unique
rel RelatedModel?
}
model RelatedModel {
#id(id, Int, @id)
field String
tm_id Int
tm TestModel @relation(fields: [tm_id], references: [id])
}"#
};
schema.to_owned()
}
// "An updateOne mutation with number operations on the top and updates on the child (inl. child)" should "handle id changes correctly"
#[connector_test(schema(schema_1), capabilities(UpdateableId))]
async fn update_number_ops_on_child(runner: Runner) -> TestResult<()> {
run_query!(
&runner,
r#"mutation {
createOneTestModel(
data: {
id: 1
uniq: 2
rel: { create: { id: 1, field: "field" } }
}
) {
id
}
}"#
);
insta::assert_snapshot!(
run_query!(&runner, r#"mutation {
updateOneTestModel(
where: { uniq: 2 }
data: {
id: { increment: 1 }
uniq: { multiply: 3 }
rel: {
update: {
field: { set: "updated" }
}
}
}
){
rel {
id
}
}
}"#),
@r###"{"data":{"updateOneTestModel":{"rel":{"id":1}}}}"###
);
insta::assert_snapshot!(
run_query!(&runner, r#"mutation {
updateOneTestModel(
where: { id: 2 }
data: {
id: { increment: 1 }
uniq: { multiply: 3 }
rel: {
update: {
field: { set: "updated 2" }
}
}
}
){
rel {
id
field
}
}
}"#),
@r###"{"data":{"updateOneTestModel":{"rel":{"id":1,"field":"updated 2"}}}}"###
);
Ok(())
}
fn schema_2() -> String {
let schema = indoc! {
r#"model TestModel {
#id(id, Int, @id)
uniq Int @unique
rel_id Int
rel RelatedModel @relation(fields: [rel_id], references: [id])
}
model RelatedModel {
#id(id, Int, @id)
field String
test TestModel[]
}"#
};
schema.to_owned()
}
//"An updateOne mutation with number operations on the top and updates on the child (inl. parent)" should "handle id changes correctly"
#[connector_test(schema(schema_2), capabilities(UpdateableId))]
async fn update_number_ops_on_parent(runner: Runner) -> TestResult<()> {
run_query!(
&runner,
r#"mutation {
createOneTestModel(
data: {
id: 1
uniq: 2
rel: { create: { id: 1, field: "field" } }
}
) {
id
}
}"#
);
insta::assert_snapshot!(
run_query!(&runner, r#"mutation {
updateOneTestModel(
where: { uniq: 2 }
data: {
id: { increment: 1 }
uniq: { multiply: 3 }
rel: {
update: {
field: { set: "updated" }
}
}
}
){
rel {
id
}
}
}"#),
@r###"{"data":{"updateOneTestModel":{"rel":{"id":1}}}}"###
);
insta::assert_snapshot!(
run_query!(&runner, r#"mutation {
updateOneTestModel(
where: { id: 2 }
data: {
id: { increment: 1 }
uniq: { multiply: 3 }
rel: {
update: {
field: { set: "updated 2" }
}
}
}
){
rel {
id
field
}
}
}"#),
@r###"{"data":{"updateOneTestModel":{"rel":{"id":1,"field":"updated 2"}}}}"###
);
Ok(())
}
fn schema_3() -> String {
let schema = indoc! {
r#"model TestModel {
#id(id, Int, @id)
rel RelatedModel?
}
model RelatedModel {
#id(id, Int, @id)
optInt Int? | };
schema.to_owned()
}
// "A nested updateOne mutation" should "correctly apply all number operations for Int"
#[connector_test(schema(schema_3), exclude(Cockroach))]
async fn nested_update_int_ops(runner: Runner) -> TestResult<()> {
create_test_model(&runner, 1, None, None).await?;
create_test_model(&runner, 2, Some(3), None).await?;
// Increment
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optInt", "increment", "10").await?,
@r###"{"optInt":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optInt", "increment", "10").await?,
@r###"{"optInt":13}"###
);
// Decrement
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optInt", "decrement", "10").await?,
@r###"{"optInt":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optInt", "decrement", "10").await?,
@r###"{"optInt":3}"###
);
// Multiply
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optInt", "multiply", "2").await?,
@r###"{"optInt":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optInt", "multiply", "2").await?,
@r###"{"optInt":6}"###
);
// Divide
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optInt", "divide", "3").await?,
@r###"{"optInt":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optInt", "divide", "3").await?,
@r###"{"optInt":2}"###
);
// Set
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optInt", "set", "5").await?,
@r###"{"optInt":5}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optInt", "set", "5").await?,
@r###"{"optInt":5}"###
);
// Set null
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optInt", "set", "null").await?,
@r###"{"optInt":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optInt", "set", "null").await?,
@r###"{"optInt":null}"###
);
Ok(())
}
// CockroachDB does not support the "divide" operator as is.
// See https://github.com/cockroachdb/cockroach/issues/41448.
#[connector_test(schema(schema_3), only(Cockroach))]
async fn nested_update_int_ops_cockroach(runner: Runner) -> TestResult<()> {
create_test_model(&runner, 1, None, None).await?;
create_test_model(&runner, 2, Some(3), None).await?;
// Increment
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optInt", "increment", "10").await?,
@r###"{"optInt":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optInt", "increment", "10").await?,
@r###"{"optInt":13}"###
);
// Decrement
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optInt", "decrement", "10").await?,
@r###"{"optInt":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optInt", "decrement", "10").await?,
@r###"{"optInt":3}"###
);
// Multiply
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optInt", "multiply", "2").await?,
@r###"{"optInt":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optInt", "multiply", "2").await?,
@r###"{"optInt":6}"###
);
// Set
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optInt", "set", "5").await?,
@r###"{"optInt":5}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optInt", "set", "5").await?,
@r###"{"optInt":5}"###
);
// Set null
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optInt", "set", "null").await?,
@r###"{"optInt":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optInt", "set", "null").await?,
@r###"{"optInt":null}"###
);
Ok(())
}
// "A nested updateOne mutation" should "correctly apply all number operations for Int"
#[connector_test(schema(schema_3), exclude(MongoDb))]
async fn nested_update_float_ops(runner: Runner) -> TestResult<()> {
create_test_model(&runner, 1, None, None).await?;
create_test_model(&runner, 2, None, Some("5.5")).await?;
// Increment
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optFloat", "increment", "4.6").await?,
@r###"{"optFloat":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optFloat", "increment", "4.6").await?,
@r###"{"optFloat":10.1}"###
);
// Decrement
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optFloat", "decrement", "4.6").await?,
@r###"{"optFloat":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optFloat", "decrement", "4.6").await?,
@r###"{"optFloat":5.5}"###
);
// Multiply
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optFloat", "multiply", "2").await?,
@r###"{"optFloat":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optFloat", "multiply", "2").await?,
@r###"{"optFloat":11.0}"###
);
// Divide
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optFloat", "divide", "2").await?,
@r###"{"optFloat":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optFloat", "divide", "2").await?,
@r###"{"optFloat":5.5}"###
);
// Set
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optFloat", "set", "5.1").await?,
@r###"{"optFloat":5.1}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optFloat", "set", "5.1").await?,
@r###"{"optFloat":5.1}"###
);
// Set null
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optFloat", "set", "null").await?,
@r###"{"optFloat":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optFloat", "set", "null").await?,
@r###"{"optFloat":null}"###
);
Ok(())
}
// TODO(mongo, precision): Suffers from precision issues on Float
// These precision issues should be gone once the floating point fixes effort is done
// Note: These precision issues are created within Prisma's MongoDB connector, not within MongoDB.
#[connector_test(schema(schema_3), only(MongoDb))]
async fn nested_update_float_ops_mongo(runner: Runner) -> TestResult<()> {
create_test_model(&runner, 1, None, None).await?;
create_test_model(&runner, 2, None, Some("5.5")).await?;
// Increment
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optFloat", "increment", "4.6").await?,
@r###"{"optFloat":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optFloat", "increment", "4.6").await?,
@r###"{"optFloat":10.1}"###
);
// Decrement
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optFloat", "decrement", "4.6").await?,
@r###"{"optFloat":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optFloat", "decrement", "4.6").await?,
@r###"{"optFloat":5.500000000000001}"###
);
// Multiply
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optFloat", "multiply", "2").await?,
@r###"{"optFloat":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optFloat", "multiply", "2").await?,
@r###"{"optFloat":11.0}"###
);
// Divide
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optFloat", "divide", "2").await?,
@r###"{"optFloat":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optFloat", "divide", "2").await?,
@r###"{"optFloat":5.500000000000001}"###
);
// Set
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optFloat", "set", "5.1").await?,
@r###"{"optFloat":5.100000000000001}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optFloat", "set", "5.1").await?,
@r###"{"optFloat":5.100000000000001}"###
);
// Set null
insta::assert_snapshot!(
query_nested_number_ops(&runner, 1, "optFloat", "set", "null").await?,
@r###"{"optFloat":null}"###
);
insta::assert_snapshot!(
query_nested_number_ops(&runner, 2, "optFloat", "set", "null").await?,
@r###"{"optFloat":null}"###
);
Ok(())
}
async fn create_test_model(
runner: &Runner,
id: u32,
opt_int: Option<u32>,
opt_float: Option<&str>,
) -> TestResult<()> {
let f = opt_float.unwrap_or("null");
let i = opt_int.map(|i| i.to_string()).unwrap_or_else(|| "null".to_string());
run_query!(
runner,
format!(
r#"mutation {{
createOneTestModel(
data: {{
id: {id}
rel: {{
create: {{
id: {id}
optInt: {int}
optFloat: {float}
}}
}}
}}
) {{
id
}}
}}"#,
id = id,
int = i,
float = f
)
);
Ok(())
}
async fn query_nested_number_ops(
runner: &Runner,
id: u32,
field: &str,
op: &str,
value: &str,
) -> TestResult<String> {
let res = run_query_json!(
runner,
format!(
r#"mutation {{
updateOneTestModel(
where: {{ id: {id} }}
data: {{ rel: {{ update: {{ {field}: {{ {op}: {value} }}}}}}}}
){{
rel {{
{field}
}}
}}
}}"#,
id = id,
field = field,
op = op,
value = value
),
&["data", "updateOneTestModel", "rel"]
);
Ok(res.to_string())
}
} | optFloat Float?
tm_id Int
tm TestModel @relation(fields: [tm_id], references: [id])
}"# |
key.go | package cryptoutil
import (
"math/rand"
"path/filepath"
"strings"
"github.com/Shravan-1908/binod/cli/internal/fsutil"
)
// generateRandomKey generates a random 32 bit signing key for all encryption-decryption tasks.
func | () []byte {
var letters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890")
var b strings.Builder
for i := 0; i < 32; i++ {
b.WriteRune(letters[rand.Intn(len(letters))])
}
return []byte(b.String())
}
// getKey fetches the signing key from the local file system, and if it doesn't find one, generates one.
func getKey() []byte {
rootDir := fsutil.GetBinodRootDir()
keyLocation := filepath.Join(rootDir, "data", "key.dat")
var key []byte
// key doesnt exist
if !fsutil.Exists(keyLocation) {
key = generateRandomKey()
fsutil.WriteToFile(string(key), keyLocation)
} else { // key exists
key = []byte(fsutil.ReadFile(keyLocation))
}
return key
}
| generateRandomKey |
html_filter_body.rs | use crate::filter::html_body_action::HtmlBodyVisitor;
use crate::html;
use std::collections::HashSet;
#[derive(Debug)]
struct BufferLink {
buffer: String,
tag_name: String,
previous: Option<Box<BufferLink>>,
}
#[derive(Debug)]
pub struct HtmlFilterBodyAction {
enter: Option<String>,
leave: Option<String>,
visitor: HtmlBodyVisitor,
current_buffer: Option<Box<BufferLink>>,
last_buffer: String,
}
lazy_static! {
pub static ref VOID_ELEMENTS: HashSet<&'static str> = {
let mut set = HashSet::new();
set.insert("area");
set.insert("base");
set.insert("br");
set.insert("col");
set.insert("embed");
set.insert("hr");
set.insert("img");
set.insert("input");
set.insert("meta");
set.insert("param");
set.insert("source");
set.insert("track");
set.insert("wbr");
set
};
}
impl HtmlFilterBodyAction {
pub fn new(visitor: HtmlBodyVisitor) -> Self |
pub fn filter(&mut self, input: String) -> String {
let mut data = self.last_buffer.clone();
data.push_str(input.as_str());
let buffer = &mut data.as_bytes() as &mut dyn std::io::Read;
let mut tokenizer = html::Tokenizer::new(buffer);
let mut to_return = "".to_string();
loop {
let mut token_type = tokenizer.next();
if token_type == html::TokenType::ErrorToken {
self.last_buffer = tokenizer.raw();
self.last_buffer.push_str(tokenizer.buffered().as_str());
break;
}
let mut token_data = tokenizer.raw().clone();
while token_type == html::TokenType::TextToken && (token_data.contains('<') || token_data.contains("</")) {
token_type = tokenizer.next();
if token_type == html::TokenType::ErrorToken {
self.last_buffer = token_data;
self.last_buffer.push_str(tokenizer.raw().as_str());
self.last_buffer.push_str(tokenizer.buffered().as_str());
return to_return;
}
if self.current_buffer.is_some() {
self.current_buffer.as_mut().unwrap().buffer.push_str(token_data.as_str());
} else {
to_return.push_str(token_data.as_str());
}
token_data = tokenizer.raw();
}
match token_type {
html::TokenType::StartTagToken => {
let (tag_name, _) = tokenizer.tag_name();
let tag_name_str = tag_name.unwrap_or_else(|| "".to_string());
let (new_buffer_link, new_token_data) = self.on_start_tag_token(tag_name_str.clone(), token_data);
self.current_buffer = new_buffer_link;
token_data = new_token_data;
if VOID_ELEMENTS.contains(tag_name_str.as_str()) {
let (new_buffer_link, new_token_data) = self.on_end_tag_token(tag_name_str.clone(), token_data);
self.current_buffer = new_buffer_link;
token_data = new_token_data;
}
}
html::TokenType::EndTagToken => {
let (tag_name, _) = tokenizer.tag_name();
let (new_buffer_link, new_token_data) = self.on_end_tag_token(tag_name.unwrap(), token_data);
self.current_buffer = new_buffer_link;
token_data = new_token_data;
}
html::TokenType::SelfClosingTagToken => {
let (tag_name, _) = tokenizer.tag_name();
let (new_buffer_link, new_token_data) = self.on_start_tag_token(tag_name.as_ref().unwrap().clone(), token_data);
self.current_buffer = new_buffer_link;
token_data = new_token_data;
let (new_buffer_link, new_token_data) = self.on_end_tag_token(tag_name.unwrap(), token_data);
self.current_buffer = new_buffer_link;
token_data = new_token_data;
}
_ => {}
}
if self.current_buffer.is_some() {
self.current_buffer.as_mut().unwrap().buffer.push_str(token_data.as_str());
} else {
to_return.push_str(token_data.as_str());
}
}
to_return
}
pub fn end(&mut self) -> String {
let mut to_return = self.last_buffer.clone();
let mut buffer = self.current_buffer.as_ref();
while buffer.is_some() {
to_return.push_str(buffer.unwrap().buffer.as_str());
buffer = buffer.unwrap().previous.as_ref();
}
to_return
}
fn on_start_tag_token(&mut self, tag_name: String, data: String) -> (Option<Box<BufferLink>>, String) {
let mut buffer = data;
let mut buffer_link_actions = 0;
if self.enter.is_some() && self.enter.as_ref().unwrap() == tag_name.as_str() {
let (next_enter, next_leave, start_buffer, new_buffer) = self.visitor.enter(buffer);
buffer = new_buffer;
self.enter = next_enter;
self.leave = next_leave;
if start_buffer {
buffer_link_actions += 1;
}
}
if buffer_link_actions > 0 {
let new_current_buffer = BufferLink {
tag_name,
previous: self.current_buffer.take(),
buffer: "".to_string(),
};
self.current_buffer = Some(Box::new(new_current_buffer));
}
(self.current_buffer.take(), buffer)
}
fn on_end_tag_token(&mut self, tag_name: String, data: String) -> (Option<Box<BufferLink>>, String) {
let mut buffer: String;
if self.current_buffer.is_some() && self.current_buffer.as_ref().unwrap().tag_name == tag_name {
buffer = self.current_buffer.as_ref().unwrap().buffer.clone();
buffer.push_str(data.as_str());
} else {
buffer = data;
}
if self.leave.is_some() && self.leave.as_ref().unwrap() == tag_name.as_str() {
let (next_enter, next_leave, new_buffer) = self.visitor.leave(buffer);
buffer = new_buffer;
self.enter = next_enter;
self.leave = next_leave;
}
if self.current_buffer.is_some() && self.current_buffer.as_ref().unwrap().tag_name == tag_name {
return (self.current_buffer.as_mut().unwrap().previous.take(), buffer);
}
(self.current_buffer.take(), buffer)
}
}
| {
Self {
enter: Some(visitor.first()),
leave: None,
last_buffer: "".to_string(),
current_buffer: None,
visitor,
}
} |
gezags_verhouding.py | from dataclasses import dataclass
from openpersonen.api.enum import IndicatieGezagMinderjarigeChoices
from .in_onderzoek import GezagsVerhoudingInOnderzoek
@dataclass
class GezagsVerhouding:
indicatieCurateleRegister: bool
indicatieGezagMinderjarige: str
inOnderzoek: GezagsVerhoudingInOnderzoek
def | (self):
return IndicatieGezagMinderjarigeChoices.values[self.indicatieGezagMinderjarige]
| get_indicatieGezagMinderjarige_display |
application_type.go | package graph
import (
"strings"
"errors"
)
// Provides operations to manage the deviceManagement singleton.
type ApplicationType int
const (
UNIVERSAL_APPLICATIONTYPE ApplicationType = iota
DESKTOP_APPLICATIONTYPE
)
func (i ApplicationType) String() string {
return []string{"UNIVERSAL", "DESKTOP"}[i]
}
func ParseApplicationType(v string) (interface{}, error) |
func SerializeApplicationType(values []ApplicationType) []string {
result := make([]string, len(values))
for i, v := range values {
result[i] = v.String()
}
return result
}
| {
result := UNIVERSAL_APPLICATIONTYPE
switch strings.ToUpper(v) {
case "UNIVERSAL":
result = UNIVERSAL_APPLICATIONTYPE
case "DESKTOP":
result = DESKTOP_APPLICATIONTYPE
default:
return 0, errors.New("Unknown ApplicationType value: " + v)
}
return &result, nil
} |
api.js | const fetch = require('node-fetch')
const api_url = 'https://api.linksb.me'
async function get(username) {
let res = await fetch(`${api_url}/user/${username}`)
let data = await res.json()
return data
}
async function | (username) {
let res = await fetch(`${api_url}/qrcode/${username}`)
let data = await res.json()
return data
}
module.exports = {
'get': get,
'get_qrcode': get_qrcode
} | get_qrcode |
0038_rename_de_to_lang2.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-01-29 16:22
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
| dependencies = [
('questions', '0037_rename_en_to_lang1'),
]
operations = [
migrations.RenameField(
model_name='catalog',
old_name='title_de',
new_name='title_lang2',
),
migrations.RenameField(
model_name='question',
old_name='help_de',
new_name='help_lang2',
),
migrations.RenameField(
model_name='question',
old_name='text_de',
new_name='text_lang2',
),
migrations.RenameField(
model_name='question',
old_name='verbose_name_de',
new_name='verbose_name_lang2',
),
migrations.RenameField(
model_name='question',
old_name='verbose_name_plural_de',
new_name='verbose_name_plural_lang2',
),
migrations.RenameField(
model_name='questionset',
old_name='help_de',
new_name='help_lang2',
),
migrations.RenameField(
model_name='questionset',
old_name='title_de',
new_name='title_lang2',
),
migrations.RenameField(
model_name='questionset',
old_name='verbose_name_de',
new_name='verbose_name_lang2',
),
migrations.RenameField(
model_name='questionset',
old_name='verbose_name_plural_de',
new_name='verbose_name_plural_lang2',
),
migrations.RenameField(
model_name='section',
old_name='title_de',
new_name='title_lang2',
),
] |
|
test_images.py | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
from nova import exception
from nova.openstack.common import processutils
from nova import test
from nova import utils
from nova.virt import images
class QemuTestCase(test.NoDBTestCase):
def test_qemu_info_with_bad_path(self):
self.assertRaises(exception.InvalidDiskInfo,
images.qemu_img_info,
'/path/that/does/not/exist')
@mock.patch.object(os.path, 'exists', return_value=True)
def test_qemu_info_with_errors(self, path_exists):
self.assertRaises(processutils.ProcessExecutionError,
images.qemu_img_info,
'/fake/path')
@mock.patch.object(os.path, 'exists', return_value=True)
@mock.patch.object(utils, 'execute',
return_value=('stdout', None))
def test_qemu_info_with_no_errors(self, path_exists,
utils_execute):
| image_info = images.qemu_img_info('/fake/path')
self.assertTrue(image_info)
self.assertTrue(str(image_info)) |
|
hikvision.py | import threading
import traceback
from camera.sdk_gige_hikvision.GrabImage import MVS_Cam # 工业相机SDK读流
class hikCamera(threading.Thread):
def __init__(sel | me):
threading.Thread.__init__(self)
self.ip_name = ip_name
# 初始化摄像头
self.device_camera = MVS_Cam(self.ip_name)
def run(self):
i = 0
while i < 100:
try:
# 获取图像
# 该读流方式不会缓存,读到的一定是最新帧
frame = self.device_camera.Get_Frame()
except:
print(traceback.format_exc())
else:
if type(frame) != type(None):
print(type(frame), self.ip_name, frame.shape[:2])
i += 1
# 关闭摄像头
self.device_camera.Close_Cam() | f, ip_na |
_vtst.py | """ Run and Read the scans from VTST calculations
"""
import automol
import autofile
from routines.es._routines import sp
from routines.es._routines import _wfn as wfn
from routines.es._routines import _scan as scan
from lib import filesys
from lib.submission import qchem_params
from lib.reaction import grid as rxngrid
def radrad_scan(ts_zma, ts_info, hs_info,
ts_formula, high_mul, active_space,
rct_info, rct_ichs, rcts_cnf_fs, rcts_gra,
grid1, grid2, coord_name, frm_bnd_keys,
mod_var_scn_thy_info,
mod_var_sp1_thy_info, # Need an unmodifie
var_sp1_thy_info,
var_sp2_thy_info,
hs_var_sp1_thy_info,
hs_var_sp2_thy_info,
mod_thy_info,
vscnlvl_thy_save_fs,
vscnlvl_ts_save_fs,
scn_run_fs, scn_save_fs,
pot_thresh,
overwrite, update_guess,
constraint_dct=None,
zma_locs=(0,)):
""" Run the scan for VTST calculations
"""
# Set up the casscf options
ref_zma = automol.zmatrix.set_values(ts_zma, {coord_name: grid1[0]})
cas_kwargs = wfn.build_wfn(ref_zma, ts_info, ts_formula, high_mul,
rct_ichs, rct_info,
active_space, mod_var_scn_thy_info)
# Run the scan along the reaction coordinate
scan.multiref_rscan(
ts_zma=ts_zma,
ts_info=ts_info,
grid1=grid1,
grid2=grid2,
coord_name=coord_name,
mod_var_scn_thy_info=mod_var_scn_thy_info,
vscnlvl_thy_save_fs=vscnlvl_thy_save_fs,
scn_run_fs=scn_run_fs,
scn_save_fs=scn_save_fs,
overwrite=overwrite,
update_guess=update_guess,
constraint_dct=constraint_dct,
**cas_kwargs
)
# Assess the potentials to see if there is a saddle point zma
print('above vtst max')
sadpt_zma = rxngrid.vtst_max(
list(grid1)+list(grid2), coord_name, scn_save_fs,
mod_var_scn_thy_info, constraint_dct,
ethresh=pot_thresh)
print('sadpt_zma', sadpt_zma)
if sadpt_zma is None:
# Calculate and the energies needed for inf sep ene
far_locs = [[coord_name], [grid1[0]]]
ts_zma = scn_save_fs[-1].file.zmatrix.read(far_locs)
geo = scn_save_fs[-1].file.geometry.read(far_locs)
geo_run_path = scn_run_fs[-1].path(far_locs)
geo_save_path = scn_save_fs[-1].path(far_locs)
_ = scan.radrad_inf_sep_ene(
hs_info, ts_zma,
rct_info, rcts_cnf_fs,
var_sp1_thy_info, var_sp2_thy_info,
hs_var_sp1_thy_info, hs_var_sp2_thy_info,
geo, geo_run_path, geo_save_path,
scn_save_fs, far_locs,
overwrite=overwrite,
**cas_kwargs)
# Save the vmatrix for use in reading
_save_traj(ts_zma, frm_bnd_keys, rcts_gra,
vscnlvl_ts_save_fs, zma_locs=zma_locs)
print('\nRunning Hessians and energies...')
_vtst_hess_ene(ts_info, coord_name,
mod_var_scn_thy_info, mod_var_sp1_thy_info,
scn_save_fs, scn_run_fs,
overwrite, **cas_kwargs)
def molrad_scan(ts_zma, ts_info,
rct_info, rcts_cnf_fs, rcts_gra,
grid1, grid2, coord_name, frm_bnd_keys,
thy_info, vsp1_thy_info,
thy_save_fs,
ts_save_fs,
scn_run_fs, scn_save_fs,
overwrite, update_guess, retryfail,
zma_locs=(0,)):
""" Run the scan for VTST calculations
"""
# Set the thy info objects appropriately
if vsp1_thy_info is not None:
inf_thy_info = vsp1_thy_info
else:
inf_thy_info = thy_info
mod_thy_info = filesys.inf.modify_orb_restrict(ts_info, thy_info)
mod_vsp1_thy_info = filesys.inf.modify_orb_restrict(ts_info, vsp1_thy_info)
# Set script
_, opt_script_str, _, opt_kwargs = qchem_params(
*mod_thy_info[0:2])
# Setup and run the first part of the scan to shorte
scan.run_two_way_scan(
ts_zma, ts_info, mod_thy_info,
grid1, grid2, coord_name,
thy_save_fs,
scn_run_fs, scn_save_fs,
opt_script_str, overwrite,
update_guess=update_guess,
reverse_sweep=False,
saddle=False, # opts along scan are min, not sadpt opts
constraint_dct=None,
retryfail=retryfail,
**opt_kwargs
)
# Infinite seperation energy calculation
print('\nCalculating infinite separation energy...')
print('inf_thy_info', inf_thy_info)
_ = scan.molrad_inf_sep_ene(
rct_info, rcts_cnf_fs,
inf_thy_info, overwrite)
# Save the vmatrix for use in reading | _vtst_hess_ene(ts_info, coord_name,
mod_thy_info, mod_vsp1_thy_info,
scn_save_fs, scn_run_fs,
overwrite, **{})
def _vtst_hess_ene(ts_info, coord_name,
mod_thy_info, mod_vsp1_thy_info,
scn_save_fs, scn_run_fs,
overwrite, **cas_kwargs):
""" VTST Hessians and Energies
"""
scn_locs = filesys.build.scn_locs_from_fs(
scn_save_fs, [coord_name], constraint_dct=None)
print('\n Running Hessians and Gradients...')
hess_script_str, _, hess_kwargs, _ = qchem_params(
*mod_thy_info[0:2])
hess_kwargs.update(cas_kwargs)
for locs in scn_locs:
geo_run_path = scn_run_fs[-1].path(locs)
geo_save_path = scn_save_fs[-1].path(locs)
scn_run_fs[-1].create(locs)
zma, geo = filesys.inf.cnf_fs_zma_geo(scn_save_fs, locs)
sp.run_hessian(zma, geo, ts_info, mod_thy_info,
scn_save_fs, geo_run_path, geo_save_path, locs,
hess_script_str, overwrite, **hess_kwargs)
sp.run_gradient(zma, geo, ts_info, mod_thy_info,
scn_save_fs, geo_run_path, geo_save_path, locs,
hess_script_str, overwrite, **hess_kwargs)
print('\n Running Energies...')
script_str, _, ene_kwargs, _ = qchem_params(
*mod_vsp1_thy_info[0:2])
ene_kwargs.update(cas_kwargs)
for locs in scn_locs:
geo_run_path = scn_run_fs[-1].path(locs)
geo_save_path = scn_save_fs[-1].path(locs)
scn_run_fs[-1].create(locs)
zma, geo = filesys.inf.cnf_fs_zma_geo(scn_save_fs, locs)
sp.run_energy(zma, geo, ts_info, mod_vsp1_thy_info,
scn_save_fs, geo_run_path, geo_save_path, locs,
script_str, overwrite, **ene_kwargs)
def _save_traj(ts_zma, frm_bnd_keys, rcts_gra, ts_save_fs, zma_locs=(0,)):
""" save trajectory and zma stuff
"""
print('\nSaving the V-Matrix into the filesystem...')
ts_fs, _ = ts_save_fs
ts_path = ts_fs[-1].path()
zma_fs = autofile.fs.zmatrix(ts_path)
zma_fs[-1].create(zma_locs)
zma_fs[-1].file.vmatrix.write(automol.zmatrix.var_(ts_zma), zma_locs)
print('\nSaving the trajectory into the filesystem...')
tra = (frozenset({frm_bnd_keys}),
frozenset({}))
zma_fs[-1].file.transformation.write(tra, zma_locs)
zma_fs[-1].file.reactant_graph.write(rcts_gra, zma_locs) | _save_traj(ts_zma, frm_bnd_keys, rcts_gra,
ts_save_fs, zma_locs=zma_locs)
print('\nRunning Hessians and energies...') |
DescribeTablev2.go | // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
// SPDX - License - Identifier: Apache - 2.0
// snippet-start:[dynamodb.gov2.DescribeTable]
package main
import (
"context"
"flag"
"fmt"
"github.com/aws/aws-sdk-go-v2/config"
"github.com/aws/aws-sdk-go-v2/service/dynamodb"
)
// DynamoDBDescribeTableAPI defines the interface for the DescribeTable function.
// We use this interface to enable unit testing.
type DynamoDBDescribeTableAPI interface {
DescribeTable(ctx context.Context,
params *dynamodb.DescribeTableInput,
optFns ...func(*dynamodb.Options)) (*dynamodb.DescribeTableOutput, error)
}
// GetTableInfo retrieves information about the table.
func GetTableInfo(c context.Context, api DynamoDBDescribeTableAPI, input *dynamodb.DescribeTableInput) (*dynamodb.DescribeTableOutput, error) {
return api.DescribeTable(c, input)
}
func main() |
// snippet-end:[dynamodb.gov2.DescribeTable]
| {
table := flag.String("t", "", "The name of the table")
flag.Parse()
if *table == "" {
fmt.Println("You must specify a table name (-t TABLE)")
return
}
// Use the SDK's default configuration.
cfg, err := config.LoadDefaultConfig(context.TODO())
if err != nil {
panic("unable to load SDK config, " + err.Error())
}
// Create an Amazon DynamoDB client.
client := dynamodb.NewFromConfig(cfg)
// Build the input parameters for the request.
input := &dynamodb.DescribeTableInput{
TableName: table,
}
resp, err := GetTableInfo(context.TODO(), client, input)
if err != nil {
panic("failed to describe table, " + err.Error())
}
fmt.Println("Info about " + *table + ":")
fmt.Println(" #items: ", resp.Table.ItemCount)
fmt.Println(" Size (bytes)", resp.Table.TableSizeBytes)
fmt.Println(" Status: ", string(resp.Table.TableStatus))
} |
eval_tiny_one_image.py | # -*- coding: utf-8 -*-
"""
Created on Mon Dec 10 15:49:15 2018
@author: fei.wu
"""
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import tiny_face_model
import util
import cv2
import numpy as np
import matplotlib.pyplot as plt
import pickle
import pylab as pl
from scipy.special import expit
MAX_INPUT_DIM = 5000.0
def overlay_bounding_boxes(raw_img, refined_bboxes, lw):
|
def evaluate(weight_file_path, frame, prob_thresh=0.5, nms_thresh=0.1, lw=3, display=False):
"""Detect faces in images.
Args:
prob_thresh:
The threshold of detection confidence.
nms_thresh:
The overlap threshold of non maximum suppression
weight_file_path:
A pretrained weight file in the pickle format
generated by matconvnet_hr101_to_tf.py.
data_dir:
A directory which contains images.
output_dir:
A directory into which images with detected faces are output.
lw:
Line width of bounding boxes. If zero specified,
this is determined based on confidence of each detection.
display:
Display tiny face images on window.
Returns:
None.
"""
# placeholder of input images. Currently batch size of one is supported.
x = tf.placeholder(tf.float32, [1, None, None, 3]) # n, h, w, c
# Create the tiny face model which weights are loaded from a pretrained model.
model = tiny_face_model.Model(weight_file_path)
score_final = model.tiny_face(x)
# Load an average image and clusters(reference boxes of templates).
with open(weight_file_path, "rb") as f:
_, mat_params_dict = pickle.load(f)
average_image = model.get_data_by_key("average_image")
clusters = model.get_data_by_key("clusters")
clusters_h = clusters[:, 3] - clusters[:, 1] + 1
clusters_w = clusters[:, 2] - clusters[:, 0] + 1
normal_idx = np.where(clusters[:, 4] == 1)
# main
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
raw_img = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
raw_img_f = raw_img.astype(np.float32)
def _calc_scales():
raw_h, raw_w = raw_img.shape[0], raw_img.shape[1]
min_scale = min(np.floor(np.log2(np.max(clusters_w[normal_idx] / raw_w))),
np.floor(np.log2(np.max(clusters_h[normal_idx] / raw_h))))
max_scale = min(1.0, -np.log2(max(raw_h, raw_w) / MAX_INPUT_DIM))
scales_down = pl.frange(min_scale, 0, 1.)
scales_up = pl.frange(0.5, max_scale, 0.5)
scales_pow = np.hstack((scales_down, scales_up))
scales = np.power(2.0, scales_pow)
return scales
scales = _calc_scales()
# initialize output
bboxes = np.empty(shape=(0, 5))
# process input at different scales
for s in scales:
img = cv2.resize(raw_img_f, (0, 0), fx=s, fy=s, interpolation=cv2.INTER_LINEAR)
img = img - average_image
img = img[np.newaxis, :]
# we don't run every template on every scale ids of templates to ignore
tids = list(range(4, 12)) + ([] if s <= 1.0 else list(range(18, 25)))
ignoredTids = list(set(range(0, clusters.shape[0])) - set(tids))
# run through the net
score_final_tf = sess.run(score_final, feed_dict={x: img})
# collect scores
score_cls_tf, score_reg_tf = score_final_tf[:, :, :, :25], score_final_tf[:, :, :, 25:125]
prob_cls_tf = expit(score_cls_tf)
prob_cls_tf[0, :, :, ignoredTids] = 0.0
def _calc_bounding_boxes():
# threshold for detection
_, fy, fx, fc = np.where(prob_cls_tf > prob_thresh)
# interpret heatmap into bounding boxes
cy = fy * 8 - 1
cx = fx * 8 - 1
ch = clusters[fc, 3] - clusters[fc, 1] + 1
cw = clusters[fc, 2] - clusters[fc, 0] + 1
# extract bounding box refinement
Nt = clusters.shape[0]
tx = score_reg_tf[0, :, :, 0:Nt]
ty = score_reg_tf[0, :, :, Nt:2*Nt]
tw = score_reg_tf[0, :, :, 2*Nt:3*Nt]
th = score_reg_tf[0, :, :, 3*Nt:4*Nt]
# refine bounding boxes
dcx = cw * tx[fy, fx, fc]
dcy = ch * ty[fy, fx, fc]
rcx = cx + dcx
rcy = cy + dcy
rcw = cw * np.exp(tw[fy, fx, fc])
rch = ch * np.exp(th[fy, fx, fc])
scores = score_cls_tf[0, fy, fx, fc]
tmp_bboxes = np.vstack((rcx - rcw / 2, rcy - rch / 2, rcx + rcw / 2, rcy + rch / 2))
tmp_bboxes = np.vstack((tmp_bboxes / s, scores))
tmp_bboxes = tmp_bboxes.transpose()
return tmp_bboxes
tmp_bboxes = _calc_bounding_boxes()
bboxes = np.vstack((bboxes, tmp_bboxes)) # <class 'tuple'>: (5265, 5)
# non maximum suppression
# refind_idx = util.nms(bboxes, nms_thresh)
refind_idx = tf.image.non_max_suppression(tf.convert_to_tensor(bboxes[:, :4], dtype=tf.float32),
tf.convert_to_tensor(bboxes[:, 4], dtype=tf.float32),
max_output_size=bboxes.shape[0], iou_threshold=nms_thresh)
refind_idx = sess.run(refind_idx)
refined_bboxes = bboxes[refind_idx]
overlay_bounding_boxes(raw_img, refined_bboxes, lw)
if display:
# plt.axis('off')
plt.imshow(raw_img)
plt.show()
return refined_bboxes
def main(frame):
print("Searching faces...")
with tf.Graph().as_default():
faces = evaluate(
weight_file_path= "weights.pckl", frame = frame,
prob_thresh=0.7, nms_thresh=0.1, #non max suppression threshold,
lw=2, display= False)
return faces
| """Overlay bounding boxes of face on images.
Args:
raw_img:
A target image.
refined_bboxes:
Bounding boxes of detected faces.
lw:
Line width of bounding boxes. If zero specified,
this is determined based on confidence of each detection.
Returns:
None.
"""
# Overlay bounding boxes on an image with the color based on the confidence.
for r in refined_bboxes:
_score = expit(r[4])
cm_idx = int(np.ceil(_score * 255))
rect_color = [int(np.ceil(x * 255)) for x in util.cm_data[cm_idx]] # parula
_lw = lw
if lw == 0: # line width of each bounding box is adaptively determined.
bw, bh = r[2] - r[0] + 1, r[3] - r[0] + 1
_lw = 1 if min(bw, bh) <= 20 else max(2, min(3, min(bh / 20, bw / 20)))
_lw = int(np.ceil(_lw * _score))
_r = [int(x) for x in r[:4]]
cv2.rectangle(raw_img, (_r[0], _r[1]), (_r[2], _r[3]), rect_color, _lw) |
delete_result_psql.py | from django.core.management.base import BaseCommand, CommandError
from results.models import ResultStage, ResultCheck
def count_result(model_arg):
if model_arg == "resultcheck":
|
else:
result_count = ResultStage.objects.all().count()
return result_count
def delete_result(model_arg):
if model_arg == "resultcheck":
ResultCheck.objects.all().delete()
else:
ResultStage.objects.all().delete()
def count_delete_result(model_arg):
count = str(count_result(model_arg))
delete_result(model_arg)
message = '%s objects deleted:\t%s' % (model_arg, count)
print "\n" + message + "\n"
class Command(BaseCommand):
help = 'Delete all items in ResultStage or ResultCheck model.'
def add_arguments(self, parser):
## positional requred arguments
parser.add_argument('model',
# action='store_true',
# dest='model',
# default='result',
help='Specify the model (resultstage or resultcheck) you want to delete'
)
def handle(self, *args, **options):
model_arg = options['model']
count_delete_result(model_arg) | result_count = ResultCheck.objects.all().count() |
views.py | import logging
import os
import json
import shutil
import threading
from typing import Any, List
from django.contrib.auth import login
from django.forms.models import BaseModelForm
from django.http.request import HttpRequest
from django.http.response import HttpResponse
from django.views.generic import ListView, DetailView, CreateView
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from django.urls import reverse_lazy
from django.views.generic.edit import DeleteView
from django.shortcuts import redirect, render
from django.urls import reverse
from django.utils import timezone
from datetime import datetime
from django.contrib.auth.mixins import LoginRequiredMixin
from core.utils import build_zip_json, create_venv, extract_zip, get_python_choices, write_log
from core.models import Plugin, PluginRun
from core.forms import NewUserForm, PluginFormSet, PluginSourceForm
from core.enums.log_type_enum import LogType
logging.basicConfig(level=logging.DEBUG,
format='[%(levelname)s] (%(threadName)-9s) %(message)s',)
def register_request(request: HttpRequest):
if request.method == "POST":
form = NewUserForm(request.POST)
if form.is_valid():
user = form.save()
login(request, user)
return redirect(reverse("core:index"))
form = NewUserForm()
return render(request=request, template_name="registration/register.html", context={"register_form":form})
class PluginIndexView(LoginRequiredMixin, ListView):
model = Plugin
template_name = 'core/index.html'
context_object_name = 'plugins'
paginate_by = 5
def get_context_data(self, **kwargs):
context = super(PluginIndexView, self).get_context_data(**kwargs)
plugins = self.get_queryset()
page = self.request.GET.get('page')
paginator = Paginator(plugins, self.paginate_by)
try:
plugins = paginator.page(page)
except PageNotAnInteger:
plugins = paginator.page(1)
except EmptyPage:
plugins = paginator.page(paginator.num_pages)
context['plugins'] = plugins
return context
class PluginDetailView(LoginRequiredMixin, DetailView):
|
class PluginCreateView(LoginRequiredMixin, CreateView):
form_class = PluginSourceForm
template_name = 'core/plugin_create_form.html'
success_url = reverse_lazy('core:index')
def get_context_data(self, **kwargs):
context = super(PluginCreateView, self).get_context_data(**kwargs)
context['plugin_formset'] = PluginFormSet()
return context
def post(self, request, *args, **kwargs):
self.object = None
form_class = self.get_form_class()
form = self.get_form(form_class)
plugin_formset = PluginFormSet(self.request.POST)
if form.is_valid() and plugin_formset.is_valid():
return self.form_valid(form, plugin_formset, request.user)
else:
return self.form_invalid(form, plugin_formset)
def form_valid(self, form: BaseModelForm, plugin_formset: PluginFormSet, user):
# save PluginSource
self.object = form.save(commit=False)
self.object.source_dest = form.cleaned_data['source_dest']
self.object.source_hash = form.cleaned_data['source_hash']
self.object.upload_time = form.cleaned_data['upload_time']
self.object.upload_user = user
self.object.save()
build_hash_thread = threading.Thread(
target=build_zip_json, args=(form.cleaned_data['plugin_zip_file'].file, self.object))
build_hash_thread.start()
log_json: dict = {
'log_datetime': datetime.timestamp(timezone.now()),
'source_dest': self.object.source_dest,
'source_hash': self.object.source_hash,
'upload_time': self.object.upload_time.strftime("%m/%d/%Y, %H:%M:%S"),
'upload_user_username': self.object.upload_user.username,
'upload_user_email': self.object.upload_user.email,
}
write_log(LogType.CREATE, self.object, log_json)
# save Plugin
plugin: List[Plugin] = plugin_formset.save(commit=False)
plugin[0].plugin_source = self.object
plugin[0].python_version = plugin_formset.cleaned_data[0]['python_version']
plugin[0].plugin_dest = 'core' + os.sep + \
'plugin' + os.sep + self.object.source_hash + '_' + \
str(datetime.timestamp(self.object.upload_time))
extract_zip_thread = threading.Thread(target=extract_zip, args=(
form.cleaned_data['plugin_zip_file'], plugin[0].plugin_dest))
extract_zip_thread.start()
plugin[0].save()
extract_zip_thread.join()
venv_thread = threading.Thread(target=create_venv, args=(plugin[0], ))
venv_thread.start()
return redirect(reverse("core:index"))
def form_invalid(self, form, plugin_formset):
return self.render_to_response(
self.get_context_data(form=form,
product_meta_formset=plugin_formset
)
)
class PluginDeleteView(LoginRequiredMixin, DeleteView):
model = Plugin
template_name = 'core/plugin_delete.html'
success_url = reverse_lazy('core:index')
def delete(self, request: HttpRequest, *args: str, **kwargs: Any) -> HttpResponse:
object: Plugin = self.get_object()
user = request.user
source_dest = object.plugin_source.source_dest
shutil.rmtree(object.plugin_dest)
deleted_time = timezone.now()
deleted_dest = 'core' + os.sep + 'source' + os.sep + 'deleted_' + object.plugin_source.source_hash + \
'_' + str(datetime.timestamp(object.plugin_source.upload_time))
log_json: dict = {
'log_datetime': datetime.timestamp(deleted_time),
'source_dest': object.plugin_source.source_dest,
'source_hash': object.plugin_source.source_hash,
'upload_time': object.plugin_source.upload_time.strftime("%m/%d/%Y, %H:%M:%S"),
'upload_user_username': object.plugin_source.upload_user.username,
'upload_user_email': object.plugin_source.upload_user.email,
'source_file_hash': json.loads(object.plugin_source.source_file_hash),
'username': user.username,
'user_email': user.email,
'deleted_dest': deleted_dest
}
write_log(LogType.DELETE, object.plugin_source, log_json)
shutil.move(source_dest, deleted_dest)
object.plugin_source.source_hash = 'deleted_' + object.plugin_source.source_hash
object.plugin_source.source_dest = deleted_dest
object.plugin_source.save()
return super().delete(request, *args, **kwargs)
| model = Plugin
template_name = 'core/plugin_detail.html'
context_object_name = 'plugin'
paginate_by = 5
def get_context_data(self, **kwargs):
context = super(PluginDetailView, self).get_context_data(**kwargs)
plugin_runs = PluginRun.objects.filter(plugin=self.kwargs['pk'])
page = self.request.GET.get('page')
paginator = Paginator(plugin_runs, self.paginate_by)
try:
plugin_runs = paginator.page(page)
except PageNotAnInteger:
plugin_runs = paginator.page(1)
except EmptyPage:
plugin_runs = paginator.page(paginator.num_pages)
context['plugin_runs'] = plugin_runs
return context |
traits.rs | // Compile with:
// rustc traits.rs
trait Stringer {
fn string(&self) -> &'static str;
}
fn stringify<T : Stringer>(s: Vec<T>) -> String {
let mut ret = String::new();
for x in s.iter() {
ret.push_str(x.string());
}
ret
}
struct MyT {
}
impl Stringer for MyT {
fn string(&self) -> &'static str {
"X"
}
}
fn | () {
let v = vec![MyT{}, MyT{}, MyT{}];
println!("{}", stringify(v));
}
| main |
main.py | # Recomendação : Use apenas se seu computador/celular for bom.
# Autor : Kiny
# Pix : (61) 9603-5417
# Github : https://github.com/Kiny-Kiny
# WhatsApp : http://wa.me/552179180533
# Telegram : @K_iny
# Instagram : @parziovanni
# Twitter : @KinyBruno
############################################
'''Módulos'''
from itertools import product;
from sys import argv,stdout;
from time import sleep;
from os import system;
############################################
'''Cores'''
global R,B,C,G
R='\033[1;31m';
B='\033[1;34m';
C='\033[1;37m';
G='\033[1;32m';
############################################
'''Funções'''
def slow(msg):
for i in msg: stdout.write(i);sleep(0.007);stdout.flush();
def clear(): system('cls||clear');
############################################
'''Banner'''
logo=B+''' __ __ __ __ __ __ __
/\ \/ / /\ \ /\ "-.\ \ /\ \_\ \
\ \ _"-. \ \ \ \ \ \-. \ \ \____ \
\ \_\ \_\ \ \_\ \ \_\\"\_\ \/\_____\
\/_/\/_/ \/_/ \/_/ \/_/ \/_____/ \n'''+C
############################################
'''Parte de criação da Wordlist'''
def wordlis | sg='';res = product('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_1234567890', repeat=i);
for g in res:
senha=''
for i in g: senha+=i
msg+=f'{senha}\n'
return msg
def main(min,max):
lis=[]
slow(
f'[{G}!{C}] Criando a WordList...\n'
)
for i in range(int(min),int(max)): lis.append(str(wordlist(i)));
msg='';
for i in lis: msg+=i
file=open('KingCrimson.txt','w+');
file.write(msg);
file.close();
clear();
slow(
f'{logo}\n[{G}Wordlist Criada!{C}] A wordlist foi criada e salva no arquivo KingCrimson.txt\n'
);
############################################
if int(len(argv)) < 3:
slow(
str(logo) + f'\n{G}- {C}Modo de Uso{G} : {C}python3 '+ str(argv[0]) + G+' {'+C+'Quantidade mínima'+G+'} {' +C+'Quantidade Máxima'+G+'}\n'+C
);exit();
try: int(argv[1]);int(argv[2]);
except: slow(
f'{logo}\n[{R}Error{C}] Use apenas números inteiros! (ex: 7)\n'
);exit();
if __name__=='__main__':
clear()
if int(argv[1]) == int(argv[2]):
slow(
f'{logo}\n[{R}Error{C}] A quantidade mínima não pode ser igual a quantidade máxima.\n'
);
elif int(argv[1]) > int(argv[2]):
slow(
f'{logo}\n[{R}Error{C}] A quantidade mínima não pode ser maior que a quantidade máxima.\n'
);
else:
try:
main(int(argv[1]),int(argv[2]));
except:
clear();
slow(
f'{logo}[{R}Error{C}] Erro Desconhecido.\n'
);
| t(i):
m |
RequestTi.go | // Code generated by msgraph.go/gen DO NOT EDIT.
package msgraph
import (
"context"
"fmt"
"io/ioutil"
"net/http"
"github.com/yaegashi/msgraph.go/jsonx"
)
// TiIndicatorRequestBuilder is request builder for TiIndicator
type TiIndicatorRequestBuilder struct{ BaseRequestBuilder }
// Request returns TiIndicatorRequest
func (b *TiIndicatorRequestBuilder) Request() *TiIndicatorRequest {
return &TiIndicatorRequest{
BaseRequest: BaseRequest{baseURL: b.baseURL, client: b.client},
}
}
// TiIndicatorRequest is request for TiIndicator
type TiIndicatorRequest struct{ BaseRequest }
// Get performs GET request for TiIndicator
func (r *TiIndicatorRequest) Get(ctx context.Context) (resObj *TiIndicator, err error) {
var query string
if r.query != nil {
query = "?" + r.query.Encode()
}
err = r.JSONRequest(ctx, "GET", query, nil, &resObj)
return
}
// Update performs PATCH request for TiIndicator
func (r *TiIndicatorRequest) Update(ctx context.Context, reqObj *TiIndicator) error {
return r.JSONRequest(ctx, "PATCH", "", reqObj, nil)
}
// Delete performs DELETE request for TiIndicator
func (r *TiIndicatorRequest) Delete(ctx context.Context) error {
return r.JSONRequest(ctx, "DELETE", "", nil, nil)
}
//
type TiIndicatorCollectionSubmitTiIndicatorsRequestBuilder struct{ BaseRequestBuilder }
// SubmitTiIndicators action undocumented
func (b *SecurityTiIndicatorsCollectionRequestBuilder) SubmitTiIndicators(reqObj *TiIndicatorCollectionSubmitTiIndicatorsRequestParameter) *TiIndicatorCollectionSubmitTiIndicatorsRequestBuilder {
bb := &TiIndicatorCollectionSubmitTiIndicatorsRequestBuilder{BaseRequestBuilder: b.BaseRequestBuilder}
bb.BaseRequestBuilder.baseURL += "/submitTiIndicators"
bb.BaseRequestBuilder.requestObject = reqObj
return bb
}
//
type TiIndicatorCollectionSubmitTiIndicatorsRequest struct{ BaseRequest }
//
func (b *TiIndicatorCollectionSubmitTiIndicatorsRequestBuilder) Request() *TiIndicatorCollectionSubmitTiIndicatorsRequest {
return &TiIndicatorCollectionSubmitTiIndicatorsRequest{
BaseRequest: BaseRequest{baseURL: b.baseURL, client: b.client, requestObject: b.requestObject},
}
}
//
func (r *TiIndicatorCollectionSubmitTiIndicatorsRequest) Paging(ctx context.Context, method, path string, obj interface{}, n int) ([]TiIndicator, error) {
req, err := r.NewJSONRequest(method, path, obj)
if err != nil {
return nil, err
}
if ctx != nil {
req = req.WithContext(ctx)
}
res, err := r.client.Do(req)
if err != nil {
return nil, err
}
var values []TiIndicator
for {
if res.StatusCode != http.StatusOK {
b, _ := ioutil.ReadAll(res.Body)
res.Body.Close()
errRes := &ErrorResponse{Response: res}
err := jsonx.Unmarshal(b, errRes)
if err != nil {
return nil, fmt.Errorf("%s: %s", res.Status, string(b))
}
return nil, errRes
}
var (
paging Paging
value []TiIndicator
)
err := jsonx.NewDecoder(res.Body).Decode(&paging)
res.Body.Close()
if err != nil {
return nil, err
}
err = jsonx.Unmarshal(paging.Value, &value)
if err != nil {
return nil, err
}
values = append(values, value...)
if n >= 0 {
n--
}
if n == 0 || len(paging.NextLink) == 0 {
return values, nil
}
req, err = http.NewRequest("GET", paging.NextLink, nil)
if ctx != nil {
req = req.WithContext(ctx)
}
res, err = r.client.Do(req)
if err != nil {
return nil, err
}
}
}
//
func (r *TiIndicatorCollectionSubmitTiIndicatorsRequest) PostN(ctx context.Context, n int) ([]TiIndicator, error) {
return r.Paging(ctx, "POST", "", r.requestObject, n)
}
//
func (r *TiIndicatorCollectionSubmitTiIndicatorsRequest) Post(ctx context.Context) ([]TiIndicator, error) {
return r.Paging(ctx, "POST", "", r.requestObject, 0)
}
//
type TiIndicatorCollectionUpdateTiIndicatorsRequestBuilder struct{ BaseRequestBuilder }
// UpdateTiIndicators action undocumented
func (b *SecurityTiIndicatorsCollectionRequestBuilder) UpdateTiIndicators(reqObj *TiIndicatorCollectionUpdateTiIndicatorsRequestParameter) *TiIndicatorCollectionUpdateTiIndicatorsRequestBuilder {
bb := &TiIndicatorCollectionUpdateTiIndicatorsRequestBuilder{BaseRequestBuilder: b.BaseRequestBuilder}
bb.BaseRequestBuilder.baseURL += "/updateTiIndicators"
bb.BaseRequestBuilder.requestObject = reqObj
return bb
}
//
type TiIndicatorCollectionUpdateTiIndicatorsRequest struct{ BaseRequest }
//
func (b *TiIndicatorCollectionUpdateTiIndicatorsRequestBuilder) Request() *TiIndicatorCollectionUpdateTiIndicatorsRequest {
return &TiIndicatorCollectionUpdateTiIndicatorsRequest{
BaseRequest: BaseRequest{baseURL: b.baseURL, client: b.client, requestObject: b.requestObject},
}
}
//
func (r *TiIndicatorCollectionUpdateTiIndicatorsRequest) Paging(ctx context.Context, method, path string, obj interface{}, n int) ([]TiIndicator, error) {
req, err := r.NewJSONRequest(method, path, obj)
if err != nil {
return nil, err
}
if ctx != nil {
req = req.WithContext(ctx)
}
res, err := r.client.Do(req)
if err != nil {
return nil, err
}
var values []TiIndicator
for {
if res.StatusCode != http.StatusOK {
b, _ := ioutil.ReadAll(res.Body)
res.Body.Close()
errRes := &ErrorResponse{Response: res}
err := jsonx.Unmarshal(b, errRes)
if err != nil {
return nil, fmt.Errorf("%s: %s", res.Status, string(b))
}
return nil, errRes
}
var (
paging Paging
value []TiIndicator
)
err := jsonx.NewDecoder(res.Body).Decode(&paging)
res.Body.Close()
if err != nil {
return nil, err
}
err = jsonx.Unmarshal(paging.Value, &value)
if err != nil {
return nil, err
}
values = append(values, value...)
if n >= 0 {
n--
}
if n == 0 || len(paging.NextLink) == 0 {
return values, nil
}
req, err = http.NewRequest("GET", paging.NextLink, nil)
if ctx != nil {
req = req.WithContext(ctx)
}
res, err = r.client.Do(req)
if err != nil {
return nil, err
}
}
}
//
func (r *TiIndicatorCollectionUpdateTiIndicatorsRequest) PostN(ctx context.Context, n int) ([]TiIndicator, error) {
return r.Paging(ctx, "POST", "", r.requestObject, n)
}
//
func (r *TiIndicatorCollectionUpdateTiIndicatorsRequest) Post(ctx context.Context) ([]TiIndicator, error) {
return r.Paging(ctx, "POST", "", r.requestObject, 0)
}
//
type TiIndicatorCollectionDeleteTiIndicatorsRequestBuilder struct{ BaseRequestBuilder }
// DeleteTiIndicators action undocumented
func (b *SecurityTiIndicatorsCollectionRequestBuilder) DeleteTiIndicators(reqObj *TiIndicatorCollectionDeleteTiIndicatorsRequestParameter) *TiIndicatorCollectionDeleteTiIndicatorsRequestBuilder {
bb := &TiIndicatorCollectionDeleteTiIndicatorsRequestBuilder{BaseRequestBuilder: b.BaseRequestBuilder}
bb.BaseRequestBuilder.baseURL += "/deleteTiIndicators"
bb.BaseRequestBuilder.requestObject = reqObj
return bb
}
//
type TiIndicatorCollectionDeleteTiIndicatorsRequest struct{ BaseRequest }
//
func (b *TiIndicatorCollectionDeleteTiIndicatorsRequestBuilder) Request() *TiIndicatorCollectionDeleteTiIndicatorsRequest {
return &TiIndicatorCollectionDeleteTiIndicatorsRequest{
BaseRequest: BaseRequest{baseURL: b.baseURL, client: b.client, requestObject: b.requestObject},
}
}
//
func (r *TiIndicatorCollectionDeleteTiIndicatorsRequest) Paging(ctx context.Context, method, path string, obj interface{}, n int) ([]ResultInfo, error) {
req, err := r.NewJSONRequest(method, path, obj)
if err != nil {
return nil, err
}
if ctx != nil {
req = req.WithContext(ctx)
}
res, err := r.client.Do(req)
if err != nil {
return nil, err
}
var values []ResultInfo
for {
if res.StatusCode != http.StatusOK {
b, _ := ioutil.ReadAll(res.Body)
res.Body.Close()
errRes := &ErrorResponse{Response: res}
err := jsonx.Unmarshal(b, errRes)
if err != nil {
return nil, fmt.Errorf("%s: %s", res.Status, string(b))
}
return nil, errRes
}
var (
paging Paging
value []ResultInfo
)
err := jsonx.NewDecoder(res.Body).Decode(&paging)
res.Body.Close()
if err != nil {
return nil, err
}
err = jsonx.Unmarshal(paging.Value, &value)
if err != nil {
return nil, err
}
values = append(values, value...)
if n >= 0 {
n--
}
if n == 0 || len(paging.NextLink) == 0 {
return values, nil
}
req, err = http.NewRequest("GET", paging.NextLink, nil)
if ctx != nil {
req = req.WithContext(ctx)
}
res, err = r.client.Do(req)
if err != nil {
return nil, err
}
}
}
//
func (r *TiIndicatorCollectionDeleteTiIndicatorsRequest) PostN(ctx context.Context, n int) ([]ResultInfo, error) {
return r.Paging(ctx, "POST", "", r.requestObject, n)
}
//
func (r *TiIndicatorCollectionDeleteTiIndicatorsRequest) Post(ctx context.Context) ([]ResultInfo, error) {
return r.Paging(ctx, "POST", "", r.requestObject, 0)
}
//
type TiIndicatorCollectionDeleteTiIndicatorsByExternalIDRequestBuilder struct{ BaseRequestBuilder }
// DeleteTiIndicatorsByExternalID action undocumented
func (b *SecurityTiIndicatorsCollectionRequestBuilder) DeleteTiIndicatorsByExternalID(reqObj *TiIndicatorCollectionDeleteTiIndicatorsByExternalIDRequestParameter) *TiIndicatorCollectionDeleteTiIndicatorsByExternalIDRequestBuilder {
bb := &TiIndicatorCollectionDeleteTiIndicatorsByExternalIDRequestBuilder{BaseRequestBuilder: b.BaseRequestBuilder}
bb.BaseRequestBuilder.baseURL += "/deleteTiIndicatorsByExternalId"
bb.BaseRequestBuilder.requestObject = reqObj
return bb
}
//
type TiIndicatorCollectionDeleteTiIndicatorsByExternalIDRequest struct{ BaseRequest }
//
func (b *TiIndicatorCollectionDeleteTiIndicatorsByExternalIDRequestBuilder) Request() *TiIndicatorCollectionDeleteTiIndicatorsByExternalIDRequest {
return &TiIndicatorCollectionDeleteTiIndicatorsByExternalIDRequest{
BaseRequest: BaseRequest{baseURL: b.baseURL, client: b.client, requestObject: b.requestObject},
}
}
//
func (r *TiIndicatorCollectionDeleteTiIndicatorsByExternalIDRequest) Paging(ctx context.Context, method, path string, obj interface{}, n int) ([]ResultInfo, error) {
req, err := r.NewJSONRequest(method, path, obj)
if err != nil {
return nil, err
}
if ctx != nil {
req = req.WithContext(ctx)
}
res, err := r.client.Do(req)
if err != nil {
return nil, err
}
var values []ResultInfo
for {
if res.StatusCode != http.StatusOK {
b, _ := ioutil.ReadAll(res.Body)
res.Body.Close()
errRes := &ErrorResponse{Response: res}
err := jsonx.Unmarshal(b, errRes)
if err != nil {
return nil, fmt.Errorf("%s: %s", res.Status, string(b))
}
return nil, errRes
}
var (
paging Paging
value []ResultInfo
)
err := jsonx.NewDecoder(res.Body).Decode(&paging)
res.Body.Close()
if err != nil {
return nil, err
}
err = jsonx.Unmarshal(paging.Value, &value)
if err != nil {
return nil, err
}
values = append(values, value...)
if n >= 0 {
n--
}
if n == 0 || len(paging.NextLink) == 0 {
return values, nil
}
req, err = http.NewRequest("GET", paging.NextLink, nil)
if ctx != nil {
req = req.WithContext(ctx)
}
res, err = r.client.Do(req)
if err != nil {
return nil, err
}
}
}
| }
//
func (r *TiIndicatorCollectionDeleteTiIndicatorsByExternalIDRequest) Post(ctx context.Context) ([]ResultInfo, error) {
return r.Paging(ctx, "POST", "", r.requestObject, 0)
} | //
func (r *TiIndicatorCollectionDeleteTiIndicatorsByExternalIDRequest) PostN(ctx context.Context, n int) ([]ResultInfo, error) {
return r.Paging(ctx, "POST", "", r.requestObject, n) |
font_request_builder.go | package font
import (
ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9 "github.com/microsoft/kiota/abstractions/go"
i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55 "github.com/microsoft/kiota/abstractions/go/serialization"
i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87 "github.com/microsoftgraph/msgraph-sdk-go/models/microsoft/graph"
)
// FontRequestBuilder builds and executes requests for operations under \workbooks\{driveItem-id}\workbook\worksheets\{workbookWorksheet-id}\charts\{workbookChart-id}\axes\seriesAxis\title\format\font
type FontRequestBuilder struct {
// Path parameters for the request
pathParameters map[string]string;
// The request adapter to use to execute the requests.
requestAdapter ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestAdapter;
// Url template to use to build the URL for the current request builder
urlTemplate string;
}
// FontRequestBuilderDeleteOptions options for Delete
type FontRequestBuilderDeleteOptions struct {
// Request headers
H map[string]string;
// Request options
O []ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestOption;
// Response handler to use in place of the default response handling provided by the core service
ResponseHandler ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ResponseHandler;
}
// FontRequestBuilderGetOptions options for Get
type FontRequestBuilderGetOptions struct {
// Request headers
H map[string]string;
// Request options
O []ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestOption;
// Request query parameters
Q *FontRequestBuilderGetQueryParameters;
// Response handler to use in place of the default response handling provided by the core service
ResponseHandler ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ResponseHandler;
}
// FontRequestBuilderGetQueryParameters represents the font attributes, such as font name, font size, color, etc. of chart axis title object. Read-only.
type FontRequestBuilderGetQueryParameters struct {
// Expand related entities
Expand []string;
// Select properties to be returned
Select_escaped []string;
}
// FontRequestBuilderPatchOptions options for Patch
type FontRequestBuilderPatchOptions struct {
//
Body *i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.WorkbookChartFont;
// Request headers
H map[string]string;
// Request options
O []ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestOption;
// Response handler to use in place of the default response handling provided by the core service
ResponseHandler ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ResponseHandler;
}
// NewFontRequestBuilderInternal instantiates a new FontRequestBuilder and sets the default values.
func NewFontRequestBuilderInternal(pathParameters map[string]string, requestAdapter ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestAdapter)(*FontRequestBuilder) |
// NewFontRequestBuilder instantiates a new FontRequestBuilder and sets the default values.
func NewFontRequestBuilder(rawUrl string, requestAdapter ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestAdapter)(*FontRequestBuilder) {
urlParams := make(map[string]string)
urlParams["request-raw-url"] = rawUrl
return NewFontRequestBuilderInternal(urlParams, requestAdapter)
}
// CreateDeleteRequestInformation represents the font attributes, such as font name, font size, color, etc. of chart axis title object. Read-only.
func (m *FontRequestBuilder) CreateDeleteRequestInformation(options *FontRequestBuilderDeleteOptions)(*ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestInformation, error) {
requestInfo := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.DELETE
if options != nil && options.H != nil {
requestInfo.Headers = options.H
}
if options != nil && len(options.O) != 0 {
err := requestInfo.AddRequestOptions(options.O...)
if err != nil {
return nil, err
}
}
return requestInfo, nil
}
// CreateGetRequestInformation represents the font attributes, such as font name, font size, color, etc. of chart axis title object. Read-only.
func (m *FontRequestBuilder) CreateGetRequestInformation(options *FontRequestBuilderGetOptions)(*ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestInformation, error) {
requestInfo := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.GET
if options != nil && options.Q != nil {
requestInfo.AddQueryParameters(*(options.Q))
}
if options != nil && options.H != nil {
requestInfo.Headers = options.H
}
if options != nil && len(options.O) != 0 {
err := requestInfo.AddRequestOptions(options.O...)
if err != nil {
return nil, err
}
}
return requestInfo, nil
}
// CreatePatchRequestInformation represents the font attributes, such as font name, font size, color, etc. of chart axis title object. Read-only.
func (m *FontRequestBuilder) CreatePatchRequestInformation(options *FontRequestBuilderPatchOptions)(*ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestInformation, error) {
requestInfo := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.PATCH
requestInfo.SetContentFromParsable(m.requestAdapter, "application/json", options.Body)
if options != nil && options.H != nil {
requestInfo.Headers = options.H
}
if options != nil && len(options.O) != 0 {
err := requestInfo.AddRequestOptions(options.O...)
if err != nil {
return nil, err
}
}
return requestInfo, nil
}
// Delete represents the font attributes, such as font name, font size, color, etc. of chart axis title object. Read-only.
func (m *FontRequestBuilder) Delete(options *FontRequestBuilderDeleteOptions)(error) {
requestInfo, err := m.CreateDeleteRequestInformation(options);
if err != nil {
return err
}
err = m.requestAdapter.SendNoContentAsync(*requestInfo, nil)
if err != nil {
return err
}
return nil
}
// Get represents the font attributes, such as font name, font size, color, etc. of chart axis title object. Read-only.
func (m *FontRequestBuilder) Get(options *FontRequestBuilderGetOptions)(*i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.WorkbookChartFont, error) {
requestInfo, err := m.CreateGetRequestInformation(options);
if err != nil {
return nil, err
}
res, err := m.requestAdapter.SendAsync(*requestInfo, func () i04eb5309aeaafadd28374d79c8471df9b267510b4dc2e3144c378c50f6fd7b55.Parsable { return i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.NewWorkbookChartFont() }, nil)
if err != nil {
return nil, err
}
return res.(*i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.WorkbookChartFont), nil
}
// Patch represents the font attributes, such as font name, font size, color, etc. of chart axis title object. Read-only.
func (m *FontRequestBuilder) Patch(options *FontRequestBuilderPatchOptions)(error) {
requestInfo, err := m.CreatePatchRequestInformation(options);
if err != nil {
return err
}
err = m.requestAdapter.SendNoContentAsync(*requestInfo, nil)
if err != nil {
return err
}
return nil
}
| {
m := &FontRequestBuilder{
}
m.urlTemplate = "{+baseurl}/workbooks/{driveItem_id}/workbook/worksheets/{workbookWorksheet_id}/charts/{workbookChart_id}/axes/seriesAxis/title/format/font{?select,expand}";
urlTplParams := make(map[string]string)
for idx, item := range pathParameters {
urlTplParams[idx] = item
}
m.pathParameters = pathParameters;
m.requestAdapter = requestAdapter;
return m
} |
sing_song.rs | /// https://atcoder.jp/contests/typical90/tasks/typical90_n
fn main() {
let n = io::read_one::<usize>();
let mut an = io::read::<isize>();
let mut bn = io::read::<isize>();
an.sort();
bn.sort();
let ans = (0..n).map(|i| (an[i] - bn[i]).abs()).sum::<isize>();
println!("{}", ans);
}
#[allow(unused)]
mod io {
use std::io;
pub fn read<T>() -> Vec<T>
where
T: std::str::FromStr,
T::Err: std::fmt::Debug,
|
pub fn read_one<T>() -> T
where
T: std::str::FromStr,
T::Err: std::fmt::Debug,
{
let mut buf = String::new();
io::stdin().read_line(&mut buf).unwrap();
buf.trim().parse().unwrap()
}
}
| {
let mut buf = String::new();
io::stdin().read_line(&mut buf).unwrap();
buf.split_whitespace()
.map(|s| s.trim().parse().unwrap())
.collect()
} |
a9a36e0e2149dc36ea813b7d08221b9744835751.js | mycallback( {"CONTRIBUTOR OCCUPATION": "Homemaker", "CONTRIBUTION AMOUNT (F3L Bundled)": "100.00", "ELECTION CODE": "G2010", "MEMO CODE": "", "CONTRIBUTOR EMPLOYER": "None", "DONOR CANDIDATE STATE": "", "CONTRIBUTOR STREET 1": "7 Braid Hill Dr", "CONTRIBUTOR MIDDLE NAME": "", "DONOR CANDIDATE FEC ID": "", "DONOR CANDIDATE MIDDLE NAME": "", "CONTRIBUTOR STATE": "KS", "DONOR CANDIDATE FIRST NAME": "", "CONTRIBUTOR FIRST NAME": "Teresa", "BACK REFERENCE SCHED NAME": "", "DONOR CANDIDATE DISTRICT": "", "CONTRIBUTION DATE": "20100915", "DONOR COMMITTEE NAME": "", "MEMO TEXT/DESCRIPTION": "", "Reference to SI or SL system code that identifies the Account": "", "FILER COMMITTEE ID NUMBER": "C00019380", "DONOR CANDIDATE LAST NAME": "", "CONTRIBUTOR LAST NAME": "Krusor", "_record_type": "fec.version.v7_0.SA", "CONDUIT STREET2": "", "CONDUIT STREET1": "", "DONOR COMMITTEE FEC ID": "", "CONTRIBUTION PURPOSE DESCRIP": "", "CONTRIBUTOR ZIP": "671566303", "CONTRIBUTOR STREET 2": "", "CONDUIT CITY": "", "ENTITY TYPE": "IND", "CONTRIBUTOR CITY": "Winfield", "CONTRIBUTOR SUFFIX": "", "TRANSACTION ID": "C2277560", "DONOR CANDIDATE SUFFIX": "", "DONOR CANDIDATE OFFICE": "", "CONTRIBUTION PURPOSE CODE": "", "ELECTION OTHER DESCRIPTION": "", "_src_file": "2011/20110504/727387.fec_1.yml", "CONDUIT STATE": "", "CONTRIBUTOR ORGANIZATION NAME": "", "BACK REFERENCE TRAN ID NUMBER": "", "DONOR CANDIDATE PREFIX": "", "CONTRIBUTOR PREFIX": "", "CONDUIT ZIP": "", "CONDUIT NAME": "", "CONTRIBUTION AGGREGATE F3L Semi-annual Bundled": "900.00", "FORM TYPE": "SA11AI"});
mycallback( {"CONTRIBUTOR OCCUPATION": "Homemaker", "CONTRIBUTION AMOUNT (F3L Bundled)": "100.00", "ELECTION CODE": "G2010", "MEMO CODE": "", "CONTRIBUTOR EMPLOYER": "None", "DONOR CANDIDATE STATE": "", "CONTRIBUTOR STREET 1": "7 Braid Hill Dr", "CONTRIBUTOR MIDDLE NAME": "", "DONOR CANDIDATE FEC ID": "", "DONOR CANDIDATE MIDDLE NAME": "", "CONTRIBUTOR STATE": "KS", "DONOR CANDIDATE FIRST NAME": "", "CONTRIBUTOR FIRST NAME": "Teresa", "BACK REFERENCE SCHED NAME": "", "DONOR CANDIDATE DISTRICT": "", "CONTRIBUTION DATE": "20100915", "DONOR COMMITTEE NAME": "", "MEMO TEXT/DESCRIPTION": "", "Reference to SI or SL system code that identifies the Account": "", "FILER COMMITTEE ID NUMBER": "C00019380", "DONOR CANDIDATE LAST NAME": "", "CONTRIBUTOR LAST NAME": "Krusor", "_record_type": "fec.version.v7_0.SA", "CONDUIT STREET2": "", "CONDUIT STREET1": "", "DONOR COMMITTEE FEC ID": "", "CONTRIBUTION PURPOSE DESCRIP": "", "CONTRIBUTOR ZIP": "671566303", "CONTRIBUTOR STREET 2": "", "CONDUIT CITY": "", "ENTITY TYPE": "IND", "CONTRIBUTOR CITY": "Winfield", "CONTRIBUTOR SUFFIX": "", "TRANSACTION ID": "C2277560", "DONOR CANDIDATE SUFFIX": "", "DONOR CANDIDATE OFFICE": "", "CONTRIBUTION PURPOSE CODE": "", "ELECTION OTHER DESCRIPTION": "", "_src_file": "2011/20110504/727387.fec_1.yml", "CONDUIT STATE": "", "CONTRIBUTOR ORGANIZATION NAME": "", "BACK REFERENCE TRAN ID NUMBER": "", "DONOR CANDIDATE PREFIX": "", "CONTRIBUTOR PREFIX": "", "CONDUIT ZIP": "", "CONDUIT NAME": "", "CONTRIBUTION AGGREGATE F3L Semi-annual Bundled": "900.00", "FORM TYPE": "SA11AI"}); |
||
lcdm16w.rs | #[doc = "Reader of register LCDM16W"]
pub type R = crate::R<u16, super::LCDM16W>;
#[doc = "Writer for register LCDM16W"]
pub type W = crate::W<u16, super::LCDM16W>;
#[doc = "Register LCDM16W `reset()`'s with value 0"]
impl crate::ResetValue for super::LCDM16W {
type Type = u16;
#[inline(always)]
fn reset_value() -> Self::Type { | impl R {}
impl W {} | 0
}
} |
updater.go | package update
import (
"context"
"fmt"
"os"
"os/exec"
"path/filepath"
"github.com/kbrew-dev/kbrew/pkg/util"
"github.com/kbrew-dev/kbrew/pkg/version" |
"github.com/pkg/errors"
)
const (
upgradeCmd = "curl -sfL https://raw.githubusercontent.com/kbrew-dev/kbrew/main/install.sh | sh"
)
func getBinDir() (string, error) {
path, err := os.Executable()
if err != nil {
return "", err
}
return filepath.Dir(path), nil
}
// IsAvailable checks if a new version of GitHub release available
func IsAvailable(ctx context.Context) (string, error) {
release, err := util.GetLatestVersion(ctx)
if err != nil {
return "", errors.Wrap(err, "failed to check for kbrew updates")
}
if version.Version != *release.TagName {
return *release.TagName, nil
}
return "", nil
}
// CheckRelease checks for the latest release
func CheckRelease(ctx context.Context) error {
release, err := IsAvailable(ctx)
if err != nil {
return errors.Wrap(err, "failed to check for kbrew updates")
}
if release == "" {
return nil
}
fmt.Printf("kbrew %s is available, upgrading...\n", release)
return upgradeKbrew(ctx)
}
func upgradeKbrew(ctx context.Context) error {
dir, err := getBinDir()
if err != nil {
return errors.Wrap(err, "failed to get executable dir")
}
os.Setenv("BINDIR", dir)
defer os.Unsetenv("BINDIR")
return execCommand(ctx, upgradeCmd)
}
func execCommand(ctx context.Context, cmd string) error {
c := exec.CommandContext(ctx, "sh", "-c", cmd)
c.Stdout = os.Stdout
c.Stderr = os.Stderr
return c.Run()
} | |
subscription.go | // *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package apimanagement
import (
"context"
"reflect"
"github.com/pkg/errors"
"github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
// Subscription details.
// API Version: 2019-12-01.
type Subscription struct {
pulumi.CustomResourceState
// Determines whether tracing is enabled
AllowTracing pulumi.BoolPtrOutput `pulumi:"allowTracing"`
// Subscription creation date. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
CreatedDate pulumi.StringOutput `pulumi:"createdDate"`
// The name of the subscription, or null if the subscription has no name.
DisplayName pulumi.StringPtrOutput `pulumi:"displayName"`
// Date when subscription was cancelled or expired. The setting is for audit purposes only and the subscription is not automatically cancelled. The subscription lifecycle can be managed by using the `state` property. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
EndDate pulumi.StringPtrOutput `pulumi:"endDate"`
// Subscription expiration date. The setting is for audit purposes only and the subscription is not automatically expired. The subscription lifecycle can be managed by using the `state` property. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
ExpirationDate pulumi.StringPtrOutput `pulumi:"expirationDate"`
// Resource name.
Name pulumi.StringOutput `pulumi:"name"`
// Upcoming subscription expiration notification date. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
NotificationDate pulumi.StringPtrOutput `pulumi:"notificationDate"`
// The user resource identifier of the subscription owner. The value is a valid relative URL in the format of /users/{userId} where {userId} is a user identifier.
OwnerId pulumi.StringPtrOutput `pulumi:"ownerId"`
// Subscription primary key. This property will not be filled on 'GET' operations! Use '/listSecrets' POST request to get the value.
PrimaryKey pulumi.StringPtrOutput `pulumi:"primaryKey"`
// Scope like /products/{productId} or /apis or /apis/{apiId}.
Scope pulumi.StringOutput `pulumi:"scope"`
// Subscription secondary key. This property will not be filled on 'GET' operations! Use '/listSecrets' POST request to get the value.
SecondaryKey pulumi.StringPtrOutput `pulumi:"secondaryKey"`
// Subscription activation date. The setting is for audit purposes only and the subscription is not automatically activated. The subscription lifecycle can be managed by using the `state` property. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
StartDate pulumi.StringPtrOutput `pulumi:"startDate"`
// Subscription state. Possible states are * active – the subscription is active, * suspended – the subscription is blocked, and the subscriber cannot call any APIs of the product, * submitted – the subscription request has been made by the developer, but has not yet been approved or rejected, * rejected – the subscription request has been denied by an administrator, * cancelled – the subscription has been cancelled by the developer or administrator, * expired – the subscription reached its expiration date and was deactivated.
State pulumi.StringOutput `pulumi:"state"`
// Optional subscription comment added by an administrator when the state is changed to the 'rejected'.
StateComment pulumi.StringPtrOutput `pulumi:"stateComment"`
// Resource type for API Management resource.
Type pulumi.StringOutput `pulumi:"type"`
}
// NewSubscription registers a new resource with the given unique name, arguments, and options.
func NewSubscription(ctx *pulumi.Context,
name string, args *SubscriptionArgs, opts ...pulumi.ResourceOption) (*Subscription, error) {
if args == nil {
return nil, errors.New("missing one or more required arguments")
}
if args.DisplayName == nil {
return nil, errors.New("invalid value for required argument 'DisplayName'")
}
if args.ResourceGroupName == nil {
return nil, errors.New("invalid value for required argument 'ResourceGroupName'")
}
if args.Scope == nil {
return nil, errors.New("invalid value for required argument 'Scope'")
}
if args.ServiceName == nil {
return nil, errors.New("invalid value for required argument 'ServiceName'")
}
aliases := pulumi.Aliases([]pulumi.Alias{
{
Type: pulumi.String("azure-nextgen:apimanagement/latest:Subscription"),
},
{
Type: pulumi.String("azure-nextgen:apimanagement/v20160707:Subscription"),
},
{
Type: pulumi.String("azure-nextgen:apimanagement/v20161010:Subscription"),
},
{
Type: pulumi.String("azure-nextgen:apimanagement/v20170301:Subscription"),
},
{
Type: pulumi.String("azure-nextgen:apimanagement/v20180101:Subscription"),
},
{
Type: pulumi.String("azure-nextgen:apimanagement/v20180601preview:Subscription"),
},
{
Type: pulumi.String("azure-nextgen:apimanagement/v20190101:Subscription"),
},
{ | Type: pulumi.String("azure-nextgen:apimanagement/v20191201:Subscription"),
},
{
Type: pulumi.String("azure-nextgen:apimanagement/v20191201preview:Subscription"),
},
{
Type: pulumi.String("azure-nextgen:apimanagement/v20200601preview:Subscription"),
},
})
opts = append(opts, aliases)
var resource Subscription
err := ctx.RegisterResource("azure-nextgen:apimanagement:Subscription", name, args, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// GetSubscription gets an existing Subscription resource's state with the given name, ID, and optional
// state properties that are used to uniquely qualify the lookup (nil if not required).
func GetSubscription(ctx *pulumi.Context,
name string, id pulumi.IDInput, state *SubscriptionState, opts ...pulumi.ResourceOption) (*Subscription, error) {
var resource Subscription
err := ctx.ReadResource("azure-nextgen:apimanagement:Subscription", name, id, state, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// Input properties used for looking up and filtering Subscription resources.
type subscriptionState struct {
// Determines whether tracing is enabled
AllowTracing *bool `pulumi:"allowTracing"`
// Subscription creation date. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
CreatedDate *string `pulumi:"createdDate"`
// The name of the subscription, or null if the subscription has no name.
DisplayName *string `pulumi:"displayName"`
// Date when subscription was cancelled or expired. The setting is for audit purposes only and the subscription is not automatically cancelled. The subscription lifecycle can be managed by using the `state` property. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
EndDate *string `pulumi:"endDate"`
// Subscription expiration date. The setting is for audit purposes only and the subscription is not automatically expired. The subscription lifecycle can be managed by using the `state` property. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
ExpirationDate *string `pulumi:"expirationDate"`
// Resource name.
Name *string `pulumi:"name"`
// Upcoming subscription expiration notification date. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
NotificationDate *string `pulumi:"notificationDate"`
// The user resource identifier of the subscription owner. The value is a valid relative URL in the format of /users/{userId} where {userId} is a user identifier.
OwnerId *string `pulumi:"ownerId"`
// Subscription primary key. This property will not be filled on 'GET' operations! Use '/listSecrets' POST request to get the value.
PrimaryKey *string `pulumi:"primaryKey"`
// Scope like /products/{productId} or /apis or /apis/{apiId}.
Scope *string `pulumi:"scope"`
// Subscription secondary key. This property will not be filled on 'GET' operations! Use '/listSecrets' POST request to get the value.
SecondaryKey *string `pulumi:"secondaryKey"`
// Subscription activation date. The setting is for audit purposes only and the subscription is not automatically activated. The subscription lifecycle can be managed by using the `state` property. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
StartDate *string `pulumi:"startDate"`
// Subscription state. Possible states are * active – the subscription is active, * suspended – the subscription is blocked, and the subscriber cannot call any APIs of the product, * submitted – the subscription request has been made by the developer, but has not yet been approved or rejected, * rejected – the subscription request has been denied by an administrator, * cancelled – the subscription has been cancelled by the developer or administrator, * expired – the subscription reached its expiration date and was deactivated.
State *string `pulumi:"state"`
// Optional subscription comment added by an administrator when the state is changed to the 'rejected'.
StateComment *string `pulumi:"stateComment"`
// Resource type for API Management resource.
Type *string `pulumi:"type"`
}
type SubscriptionState struct {
// Determines whether tracing is enabled
AllowTracing pulumi.BoolPtrInput
// Subscription creation date. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
CreatedDate pulumi.StringPtrInput
// The name of the subscription, or null if the subscription has no name.
DisplayName pulumi.StringPtrInput
// Date when subscription was cancelled or expired. The setting is for audit purposes only and the subscription is not automatically cancelled. The subscription lifecycle can be managed by using the `state` property. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
EndDate pulumi.StringPtrInput
// Subscription expiration date. The setting is for audit purposes only and the subscription is not automatically expired. The subscription lifecycle can be managed by using the `state` property. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
ExpirationDate pulumi.StringPtrInput
// Resource name.
Name pulumi.StringPtrInput
// Upcoming subscription expiration notification date. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
NotificationDate pulumi.StringPtrInput
// The user resource identifier of the subscription owner. The value is a valid relative URL in the format of /users/{userId} where {userId} is a user identifier.
OwnerId pulumi.StringPtrInput
// Subscription primary key. This property will not be filled on 'GET' operations! Use '/listSecrets' POST request to get the value.
PrimaryKey pulumi.StringPtrInput
// Scope like /products/{productId} or /apis or /apis/{apiId}.
Scope pulumi.StringPtrInput
// Subscription secondary key. This property will not be filled on 'GET' operations! Use '/listSecrets' POST request to get the value.
SecondaryKey pulumi.StringPtrInput
// Subscription activation date. The setting is for audit purposes only and the subscription is not automatically activated. The subscription lifecycle can be managed by using the `state` property. The date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO 8601 standard.
StartDate pulumi.StringPtrInput
// Subscription state. Possible states are * active – the subscription is active, * suspended – the subscription is blocked, and the subscriber cannot call any APIs of the product, * submitted – the subscription request has been made by the developer, but has not yet been approved or rejected, * rejected – the subscription request has been denied by an administrator, * cancelled – the subscription has been cancelled by the developer or administrator, * expired – the subscription reached its expiration date and was deactivated.
State pulumi.StringPtrInput
// Optional subscription comment added by an administrator when the state is changed to the 'rejected'.
StateComment pulumi.StringPtrInput
// Resource type for API Management resource.
Type pulumi.StringPtrInput
}
func (SubscriptionState) ElementType() reflect.Type {
return reflect.TypeOf((*subscriptionState)(nil)).Elem()
}
type subscriptionArgs struct {
// Determines whether tracing can be enabled
AllowTracing *bool `pulumi:"allowTracing"`
// Determines the type of application which send the create user request. Default is legacy publisher portal.
AppType *string `pulumi:"appType"`
// Subscription name.
DisplayName string `pulumi:"displayName"`
// Notify change in Subscription State.
// - If false, do not send any email notification for change of state of subscription
// - If true, send email notification of change of state of subscription
Notify *bool `pulumi:"notify"`
// User (user id path) for whom subscription is being created in form /users/{userId}
OwnerId *string `pulumi:"ownerId"`
// Primary subscription key. If not specified during request key will be generated automatically.
PrimaryKey *string `pulumi:"primaryKey"`
// The name of the resource group.
ResourceGroupName string `pulumi:"resourceGroupName"`
// Scope like /products/{productId} or /apis or /apis/{apiId}.
Scope string `pulumi:"scope"`
// Secondary subscription key. If not specified during request key will be generated automatically.
SecondaryKey *string `pulumi:"secondaryKey"`
// The name of the API Management service.
ServiceName string `pulumi:"serviceName"`
// Subscription entity Identifier. The entity represents the association between a user and a product in API Management.
Sid *string `pulumi:"sid"`
// Initial subscription state. If no value is specified, subscription is created with Submitted state. Possible states are * active – the subscription is active, * suspended – the subscription is blocked, and the subscriber cannot call any APIs of the product, * submitted – the subscription request has been made by the developer, but has not yet been approved or rejected, * rejected – the subscription request has been denied by an administrator, * cancelled – the subscription has been cancelled by the developer or administrator, * expired – the subscription reached its expiration date and was deactivated.
State *string `pulumi:"state"`
}
// The set of arguments for constructing a Subscription resource.
type SubscriptionArgs struct {
// Determines whether tracing can be enabled
AllowTracing pulumi.BoolPtrInput
// Determines the type of application which send the create user request. Default is legacy publisher portal.
AppType pulumi.StringPtrInput
// Subscription name.
DisplayName pulumi.StringInput
// Notify change in Subscription State.
// - If false, do not send any email notification for change of state of subscription
// - If true, send email notification of change of state of subscription
Notify pulumi.BoolPtrInput
// User (user id path) for whom subscription is being created in form /users/{userId}
OwnerId pulumi.StringPtrInput
// Primary subscription key. If not specified during request key will be generated automatically.
PrimaryKey pulumi.StringPtrInput
// The name of the resource group.
ResourceGroupName pulumi.StringInput
// Scope like /products/{productId} or /apis or /apis/{apiId}.
Scope pulumi.StringInput
// Secondary subscription key. If not specified during request key will be generated automatically.
SecondaryKey pulumi.StringPtrInput
// The name of the API Management service.
ServiceName pulumi.StringInput
// Subscription entity Identifier. The entity represents the association between a user and a product in API Management.
Sid pulumi.StringPtrInput
// Initial subscription state. If no value is specified, subscription is created with Submitted state. Possible states are * active – the subscription is active, * suspended – the subscription is blocked, and the subscriber cannot call any APIs of the product, * submitted – the subscription request has been made by the developer, but has not yet been approved or rejected, * rejected – the subscription request has been denied by an administrator, * cancelled – the subscription has been cancelled by the developer or administrator, * expired – the subscription reached its expiration date and was deactivated.
State *SubscriptionStateEnum
}
func (SubscriptionArgs) ElementType() reflect.Type {
return reflect.TypeOf((*subscriptionArgs)(nil)).Elem()
}
type SubscriptionInput interface {
pulumi.Input
ToSubscriptionOutput() SubscriptionOutput
ToSubscriptionOutputWithContext(ctx context.Context) SubscriptionOutput
}
func (*Subscription) ElementType() reflect.Type {
return reflect.TypeOf((*Subscription)(nil))
}
func (i *Subscription) ToSubscriptionOutput() SubscriptionOutput {
return i.ToSubscriptionOutputWithContext(context.Background())
}
func (i *Subscription) ToSubscriptionOutputWithContext(ctx context.Context) SubscriptionOutput {
return pulumi.ToOutputWithContext(ctx, i).(SubscriptionOutput)
}
type SubscriptionOutput struct {
*pulumi.OutputState
}
func (SubscriptionOutput) ElementType() reflect.Type {
return reflect.TypeOf((*Subscription)(nil))
}
func (o SubscriptionOutput) ToSubscriptionOutput() SubscriptionOutput {
return o
}
func (o SubscriptionOutput) ToSubscriptionOutputWithContext(ctx context.Context) SubscriptionOutput {
return o
}
func init() {
pulumi.RegisterOutputType(SubscriptionOutput{})
} | |
autofocus.directive.d.ts | import { ElementRef, OnChanges, OnDestroy } from '@angular/core';
import * as ɵngcc0 from '@angular/core';
export declare class AutofocusDirective implements OnChanges, OnDestroy {
private element;
private document;
isFocusActive: boolean;
private activeElement;
constructor(element: ElementRef, document: any); | static ɵdir: ɵngcc0.ɵɵDirectiveDefWithMeta<AutofocusDirective, "[timepickerAutofocus]", never, { "isFocusActive": "timepickerAutofocus"; }, {}, never>;
}
//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiYXV0b2ZvY3VzLmRpcmVjdGl2ZS5kLnRzIiwic291cmNlcyI6WyJhdXRvZm9jdXMuZGlyZWN0aXZlLmQudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IkFBQUE7O0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFDQTs7O0FBQ0EiLCJzb3VyY2VzQ29udGVudCI6WyJpbXBvcnQgeyBFbGVtZW50UmVmLCBPbkNoYW5nZXMsIE9uRGVzdHJveSB9IGZyb20gJ0Bhbmd1bGFyL2NvcmUnO1xuZXhwb3J0IGRlY2xhcmUgY2xhc3MgQXV0b2ZvY3VzRGlyZWN0aXZlIGltcGxlbWVudHMgT25DaGFuZ2VzLCBPbkRlc3Ryb3kge1xuICAgIHByaXZhdGUgZWxlbWVudDtcbiAgICBwcml2YXRlIGRvY3VtZW50O1xuICAgIGlzRm9jdXNBY3RpdmU6IGJvb2xlYW47XG4gICAgcHJpdmF0ZSBhY3RpdmVFbGVtZW50O1xuICAgIGNvbnN0cnVjdG9yKGVsZW1lbnQ6IEVsZW1lbnRSZWYsIGRvY3VtZW50OiBhbnkpO1xuICAgIG5nT25DaGFuZ2VzKCk6IHZvaWQ7XG4gICAgbmdPbkRlc3Ryb3koKTogdm9pZDtcbn1cbiJdfQ== | ngOnChanges(): void;
ngOnDestroy(): void;
static ɵfac: ɵngcc0.ɵɵFactoryDef<AutofocusDirective, [null, { optional: true; }]>; |
SVG-Colored-Textures.js | svgColoredTexture = (function() {
var svgColorFilterIds, svgTexturePatternIds;
function sv | {}
svgColorFilterIds = ["YlGn-1", "YlGn-2", "YlGn-3", "YlGn-4", "YlGn-5", "YlGn-6", "YlGn-7", "YlGn-8", "YlGn-9", "YlGnBu-1", "YlGnBu-2", "YlGnBu-3", "YlGnBu-4", "YlGnBu-5", "YlGnBu-6", "YlGnBu-7", "YlGnBu-8", "YlGnBu-9", "GnBu-1", "GnBu-2", "GnBu-3", "GnBu-4", "GnBu-5", "GnBu-6", "GnBu-7", "GnBu-8", "GnBu-9", "BuGn-1", "BuGn-2", "BuGn-3", "BuGn-4", "BuGn-5", "BuGn-6", "BuGn-7", "BuGn-8", "BuGn-9", "PuBuGn-1", "PuBuGn-2", "PuBuGn-3", "PuBuGn-4", "PuBuGn-5", "PuBuGn-6", "PuBuGn-7", "PuBuGn-8", "PuBuGn-9", "PuBu-1", "PuBu-2", "PuBu-3", "PuBu-4", "PuBu-5", "PuBu-6", "PuBu-7", "PuBu-8", "PuBu-9", "BuPu-1", "BuPu-2", "BuPu-3", "BuPu-4", "BuPu-5", "BuPu-6", "BuPu-7", "BuPu-8", "BuPu-9", "RdPu-1", "RdPu-2", "RdPu-3", "RdPu-4", "RdPu-5", "RdPu-6", "RdPu-7", "RdPu-8", "RdPu-9", "PuRd-1", "PuRd-2", "PuRd-3", "PuRd-4", "PuRd-5", "PuRd-6", "PuRd-7", "PuRd-8", "PuRd-9", "OrRd-1", "OrRd-2", "OrRd-3", "OrRd-4", "OrRd-5", "OrRd-6", "OrRd-7", "OrRd-8", "OrRd-9", "YlOrRd-1", "YlOrRd-2", "YlOrRd-3", "YlOrRd-4", "YlOrRd-5", "YlOrRd-6", "YlOrRd-7", "YlOrRd-8", "YlOrRd-9", "YlOrBr-1", "YlOrBr-2", "YlOrBr-3", "YlOrBr-4", "YlOrBr-5", "YlOrBr-6", "YlOrBr-7", "YlOrBr-8", "YlOrBr-9", "Purples-1", "Purples-2", "Purples-3", "Purples-4", "Purples-5", "Purples-6", "Purples-7", "Purples-8", "Purples-9", "Blues-1", "Blues-2", "Blues-3", "Blues-4", "Blues-5", "Blues-6", "Blues-7", "Blues-8", "Blues-9", "Greens-1", "Greens-2", "Greens-3", "Greens-4", "Greens-5", "Greens-6", "Greens-7", "Greens-8", "Greens-9", "Oranges-1", "Oranges-2", "Oranges-3", "Oranges-4", "Oranges-5", "Oranges-6", "Oranges-7", "Oranges-8", "Oranges-9", "Reds-1", "Reds-2", "Reds-3", "Reds-4", "Reds-5", "Reds-6", "Reds-7", "Reds-8", "Reds-9"];
svgTexturePatternIds = ["texture-scoreboard-2", "texture-scoreboard-1", "texture-tiny-x-1", "texture-tiny-x-2", "texture-tiny-x-3", "texture-ziggy", "texture-square", "texture-tverlapping-thread-1", "texture-tverlapping-thread-2", "texture-jeans-1", "texture-jeans-2", "texture-diagonal-stripes", "texture-construction-paper", "texture-zags-glow", "texture-cross-stripes", "texture-daenerys", "texture-triangles", "texture-cubes"];
svgColoredTexture.prototype.getRandomColorFilterId = function() {
return "url(#" + svgColorFilterIds[Math.floor(Math.random() * svgColorFilterIds.length)] + ")";
};
svgColoredTexture.prototype.getTexturePatternId = function() {
return "url(#" + svgTexturePatternIds[Math.floor(Math.random() * svgTexturePatternIds.length)] + ")";
};
return svgColoredTexture;
})(); | gColoredTexture() |
test_serialization.py | # Copyright 2020 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from rclpy.serialization import deserialize_message
from rclpy.serialization import serialize_message
from test_msgs.message_fixtures import get_test_msg
from test_msgs.msg import Arrays
from test_msgs.msg import BasicTypes
from test_msgs.msg import BoundedSequences
from test_msgs.msg import Builtins
from test_msgs.msg import Constants
from test_msgs.msg import Defaults
from test_msgs.msg import Empty
from test_msgs.msg import MultiNested
from test_msgs.msg import Nested
from test_msgs.msg import Strings
from test_msgs.msg import UnboundedSequences
from test_msgs.msg import WStrings
test_msgs = [
(get_test_msg('Arrays'), Arrays),
(get_test_msg('BasicTypes'), BasicTypes),
(get_test_msg('BoundedSequences'), BoundedSequences),
(get_test_msg('Builtins'), Builtins),
(get_test_msg('Constants'), Constants),
(get_test_msg('Defaults'), Defaults),
(get_test_msg('Empty'), Empty),
(get_test_msg('MultiNested'), MultiNested),
(get_test_msg('Nested'), Nested),
(get_test_msg('Strings'), Strings),
(get_test_msg('UnboundedSequences'), UnboundedSequences),
(get_test_msg('WStrings'), WStrings),
]
@pytest.mark.parametrize('msgs,msg_type', test_msgs)
def | (msgs, msg_type):
"""Test message serialization/deserialization."""
for msg in msgs:
msg_serialized = serialize_message(msg)
msg_deserialized = deserialize_message(msg_serialized, msg_type)
assert msg == msg_deserialized
def test_set_float32():
"""Test message serialization/deserialization of float32 type."""
# During (de)serialization we convert to a C float before converting to a PyObject.
# This can result in a loss of precision
msg = BasicTypes()
msg.float32_value = 1.125 # can be represented without rounding
msg_serialized = serialize_message(msg)
msg_deserialized = deserialize_message(msg_serialized, BasicTypes)
assert msg.float32_value == msg_deserialized.float32_value
msg = BasicTypes()
msg.float32_value = 3.14 # can NOT be represented without rounding
msg_serialized = serialize_message(msg)
msg_deserialized = deserialize_message(msg_serialized, BasicTypes)
assert msg.float32_value == round(msg_deserialized.float32_value, 2)
| test_serialize_deserialize |
solar_client.py | # -*- coding: utf-8 -*-
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.solar.v20181011 import solar_client as solar_client_v20181011
from tencentcloud.solar.v20181011 import models as models_v20181011
def doDescribeSubProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeSubProjectRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeSubProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProjectStock(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProjectStockRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProjectStock(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProjectRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeResourceTemplateHeaders(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeResourceTemplateHeadersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeResourceTemplateHeaders(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCustomer(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCustomerRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCustomer(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doSendWxTouchTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.SendWxTouchTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.SendWxTouchTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateProject(args, parsed_globals): |
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateProjectRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doReplenishProjectStock(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ReplenishProjectStockRequest()
model.from_json_string(json.dumps(args))
rsp = client.ReplenishProjectStock(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProjects(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProjectsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProjects(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doOffLineProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.OffLineProjectRequest()
model.from_json_string(json.dumps(args))
rsp = client.OffLineProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doExpireFlow(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ExpireFlowRequest()
model.from_json_string(json.dumps(args))
rsp = client.ExpireFlow(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteProjectRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCheckStaffChUser(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CheckStaffChUserRequest()
model.from_json_string(json.dumps(args))
rsp = client.CheckStaffChUser(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeCustomers(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeCustomersRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeCustomers(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doModifyProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ModifyProjectRequest()
model.from_json_string(json.dumps(args))
rsp = client.ModifyProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateSubProject(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateSubProjectRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateSubProject(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCopyActivityChannel(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.SolarClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CopyActivityChannelRequest()
model.from_json_string(json.dumps(args))
rsp = client.CopyActivityChannel(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20181011": solar_client_v20181011,
}
MODELS_MAP = {
"v20181011": models_v20181011,
}
ACTION_MAP = {
"DescribeSubProject": doDescribeSubProject,
"DescribeProjectStock": doDescribeProjectStock,
"DescribeProject": doDescribeProject,
"DescribeResourceTemplateHeaders": doDescribeResourceTemplateHeaders,
"DescribeCustomer": doDescribeCustomer,
"SendWxTouchTask": doSendWxTouchTask,
"CreateProject": doCreateProject,
"ReplenishProjectStock": doReplenishProjectStock,
"DescribeProjects": doDescribeProjects,
"OffLineProject": doOffLineProject,
"ExpireFlow": doExpireFlow,
"DeleteProject": doDeleteProject,
"CheckStaffChUser": doCheckStaffChUser,
"DescribeCustomers": doDescribeCustomers,
"ModifyProject": doModifyProject,
"CreateSubProject": doCreateSubProject,
"CopyActivityChannel": doCopyActivityChannel,
}
AVAILABLE_VERSION_LIST = [
"v20181011",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["solar"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["solar"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return g_param | g_param = parse_global_arg(parsed_globals) |
clustering.py | """
Copied and modified from the dev branch of:
https://github.com/genepattern/HierarchicalClustering
on 2018-01-31
"""
import sys
import numpy as np
from statistics import mode
from sklearn.metrics import pairwise
from sklearn import metrics
from scipy.cluster.hierarchy import dendrogram
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
import itertools
from sklearn.cluster import AgglomerativeClustering
import scipy
import itertools
from collections import defaultdict
from .elemental import *
from .information import *
# check if these are repeated:
import os
import sys
tasklib_path = os.path.dirname(os.path.realpath(sys.argv[0]))
# sys.path.append(tasklib_path + "/ccalnoir")
# 2018-02-06 Maybe uncomment these next two
# import matplotlib as mpl
# mpl.use('Agg')
# This is forprinting the hyperlink
from IPython.core.display import display, HTML
# import pandas as pd
# import numpy as np
import scipy
import seaborn as sns
from matplotlib import pyplot as plt
from matplotlib import gridspec
from sklearn.cluster import AgglomerativeClustering
# from time import time
# import cuzcatlan as cusca
sns.set_style("white")
import matplotlib as mpl
mpl.rcParams['ytick.labelsize'] = 16
mpl.rcParams['xtick.labelsize'] = 16
mpl.rcParams['axes.titlesize'] = 24
mpl.rcParams['axes.labelsize'] = 20
SIGNIFICANT_DIGITS = 7
input_col_distance_dict = {
# These are the values I expect
"No column clustering": "No_column_clustering",
"Uncentered correlation": "uncentered_pearson",
"Pearson correlation": "pearson",
"Uncentered correlation, absolute value": "absolute_uncentered_pearson",
"Pearson correlation, absolute value": "absolute_pearson",
"Spearman's rank correlation": "spearman",
"Kendall's tau": "kendall",
"Euclidean distance": "euclidean",
"City-block distance": "manhattan",
"No_column_clustering": "No_column_clustering",
# These are the values the GpUnit tests give
"0": "No_column_clustering",
"1": "uncentered_pearson",
"2": "pearson",
"3": "absolute_uncentered_pearson",
"4": "absolute_pearson",
"5": "spearman",
"6": "kendall",
"7": "euclidean",
"8": "manhattan",
"9": "information_coefficient",
# These are the values I expect from the comand line
"no_col": "No_column_clustering",
"uncentered_pearson": "uncentered_pearson",
"pearson": "pearson",
"absolute_uncentered_pearson": "absolute_uncentered_pearson",
"absolute_pearson": "absolute_pearson",
"spearman": "spearman",
"kendall": "kendall",
"euclidean": "euclidean",
"manhattan": "manhattan",
"Cosine": "cosine",
"cosine": "cosine",
"ic": "information_coefficient",
"information_coefficient": "information_coefficient",
"Information Coefficient": "information_coefficient",
}
input_row_distance_dict = {
# These are the values I expect
"No row clustering": "No_row_clustering",
"Uncentered correlation": "uncentered_pearson",
"Pearson correlation": "pearson",
"Uncentered correlation, absolute value": "absolute_uncentered_pearson",
"Pearson correlation, absolute value": "absolute_pearson",
"Spearman's rank correlation": "spearman",
"Kendall's tau": "kendall",
"Euclidean distance": "euclidean",
"City-block distance": "manhattan",
"No_row_clustering": "No_row_clustering",
# These are the values the GpUnit tests give
"0": "No_row_clustering",
"1": "uncentered_pearson",
"2": "pearson",
"3": "absolute_uncentered_pearson",
"4": "absolute_pearson",
"5": "spearman",
"6": "kendall",
"7": "euclidean",
"8": "manhattan",
"9": "information_coefficient",
# These are the values I expect from the comand line
"no_row": "No_row_clustering",
"uncentered_pearson": "uncentered_pearson",
"pearson": "pearson",
"absolute_uncentered_pearson": "absolute_uncentered_pearson",
"absolute_pearson": "absolute_pearson",
"spearman": "spearman",
"kendall": "kendall",
"euclidean": "euclidean",
"manhattan": "manhattan",
"Cosine": "cosine",
"cosine": "cosine",
"ic": "information_coefficient",
"information_coefficient": "information_coefficient",
"Information Coefficient": "information_coefficient",
}
input_clustering_method = {
# These are the values I expect
'Pairwise complete-linkage': 'complete',
'Pairwise average-linkage': 'average',
'Pairwise ward-linkage': 'ward',
# These are the values the GpUnit test give
'm': 'complete',
'a': 'average', # I think this is the default
}
input_row_centering = {
# These are the values I expect
'No': None,
'Subtract the mean from each row': 'Mean',
'Subtract the median from each row': 'Median',
# These are the values the GpUnit test give
'None': None,
'Median': 'Median',
'Mean': 'Mean',
}
input_row_normalize = {
# These are the values I expect
'No': False,
'Yes': True,
# These are the values the GpUnit test give
'False': False,
'True': True,
}
input_col_centering = {
# These are the values I expect
'No': None,
'Subtract the mean from each column': 'Mean',
'Subtract the median from each column': 'Median',
# These are the values the GpUnit test give
'None': None,
'Median': 'Median',
'Mean': 'Mean',
}
input_col_normalize = {
# These are the values I expect
'No': False,
'Yes': True,
# These are the values the GpUnit test give
'False': False,
'True': True,
}
def parse_inputs(args=sys.argv):
# inp = []
# inp = args
# Error handling:
arg_n = len(args)
if arg_n == 1:
sys.exit("Not enough parameters files were provided. This module needs a GCT file to work.")
elif arg_n == 2:
gct_name = args[1]
col_distance_metric = 'euclidean'
output_distances = False
row_distance_metric = 'No_row_clustering'
clustering_method = 'Pairwise average-linkage'
output_base_name = 'HC_out'
row_normalization = False
col_normalization = False
row_centering = None
col_centering = None
print("Using:")
print("\tgct_name =", gct_name)
print("\tcol_distance_metric = euclidean (default value)")
print("\toutput_distances =", output_distances, "(default: not computing it and creating a file)")
print("\trow_distance_metric =", row_distance_metric, "(default: No row clustering)")
print("\tclustering_method =", clustering_method, "(default: Pairwise average-linkage)")
print("\toutput_base_name =", output_base_name, "(default: HC_out)")
print("\trow_normalization =", row_normalization, "(default: False)")
print("\tcol_normalization =", col_normalization, "(default: False)")
print("\trow_centering =", row_centering, "(default: None)")
print("\tcol_centering =", col_centering, "(default: None)")
elif arg_n == 3:
gct_name = args[1]
col_distance_metric = args[2]
output_distances = False
row_distance_metric = 'No_row_clustering'
clustering_method = 'Pairwise average-linkage'
output_base_name = 'HC_out'
row_normalization = False
col_normalization = False
row_centering = None
col_centering = None
print("Using:")
print("\tgct_name =", gct_name)
print("\tcol_distance_metric =", input_col_distance_dict[col_distance_metric])
print("\toutput_distances =", output_distances, "(default: not computing it and creating a file)")
print("\trow_distance_metric =", row_distance_metric, "(default: No row clustering)")
print("\tclustering_method =", clustering_method, "(default: Pairwise average-linkage)")
print("\toutput_base_name =", output_base_name, "(default: HC_out)")
print("\trow_normalization =", row_normalization, "(default: False)")
print("\tcol_normalization =", col_normalization, "(default: False)")
print("\trow_centering =", row_centering, "(default: None)")
print("\tcol_centering =", col_centering, "(default: None)")
elif arg_n == 4:
gct_name = args[1]
col_distance_metric = args[2]
output_distances = args[3]
row_distance_metric = 'No_row_clustering'
clustering_method = 'Pairwise average-linkage'
output_base_name = 'HC_out'
row_normalization = False
col_normalization = False
row_centering = None
col_centering = None
col_distance_metric = input_col_distance_dict[col_distance_metric]
if (output_distances == 'False') or (output_distances == 'F') \
or (output_distances == 'false') or (output_distances == 'f'):
output_distances = False
else:
output_distances = True
print("Using:")
print("\tgct_name =", gct_name)
print("\tcol_distance_metric =", col_distance_metric)
print("\toutput_distances =", output_distances)
print("\trow_distance_metric =", row_distance_metric, "(default: No row clustering)")
print("\tclustering_method =", clustering_method, "(default: Pairwise average-linkage)")
print("\toutput_base_name =", output_base_name, "(default: HC_out)")
print("\trow_normalization =", row_normalization, "(default: False)")
print("\tcol_normalization =", col_normalization, "(default: False)")
print("\trow_centering =", row_centering, "(default: None)")
print("\tcol_centering =", col_centering, "(default: None)")
elif arg_n == 5:
gct_name = args[1]
col_distance_metric = args[2]
output_distances = args[3]
row_distance_metric = args[4]
clustering_method = 'Pairwise average-linkage'
# clustering_method = 'Pairwise complete-linkage'
output_base_name = 'HC_out'
row_normalization = False
col_normalization = False
row_centering = None
col_centering = None
col_distance_metric = input_col_distance_dict[col_distance_metric]
row_distance_metric = input_row_distance_dict[row_distance_metric]
if (output_distances == 'False') or (output_distances == 'F') \
or (output_distances == 'false') or (output_distances == 'f'):
output_distances = False
else:
output_distances = True
print("Using:")
print("\tgct_name =", gct_name)
print("\tcol_distance_metric =", col_distance_metric)
print("\toutput_distances =", output_distances)
print("\trow_distance_metric =", row_distance_metric)
print("\tclustering_method =", clustering_method, "(default: Pairwise average-linkage)")
print("\toutput_base_name =", output_base_name, "(default: HC_out)")
print("\trow_normalization =", row_normalization, "(default: False)")
print("\tcol_normalization =", col_normalization, "(default: False)")
print("\trow_centering =", row_centering, "(default: None)")
print("\tcol_centering =", col_centering, "(default: None)")
elif arg_n == 6:
gct_name = args[1]
col_distance_metric = args[2]
output_distances = args[3]
row_distance_metric = args[4]
clustering_method = args[5]
col_distance_metric = input_col_distance_dict[col_distance_metric]
row_distance_metric = input_row_distance_dict[row_distance_metric]
clustering_method = input_clustering_method[clustering_method]
if clustering_method not in linkage_dic.keys():
exit("Clustering method chosen not supported. This should not have happened.")
if (linkage_dic[clustering_method] == 'ward') and (col_distance_metric != 'average'):
exit("When choosing 'Pairwise ward-linkage' the distance metric *must* be 'average' ")
output_base_name = 'HC_out'
row_normalization = False
col_normalization = False
row_centering = None
col_centering = None
if (output_distances == 'False') or (output_distances == 'F') \
or (output_distances == 'false') or (output_distances == 'f'):
output_distances = False
else:
output_distances = True
print("Using:")
print("\tgct_name =", gct_name)
print("\tcol_distance_metric =", col_distance_metric)
print("\toutput_distances =", output_distances)
print("\trow_distance_metric =", row_distance_metric)
print("\tclustering_method =", clustering_method)
print("\toutput_base_name =", output_base_name, "(default: HC_out)")
print("\trow_normalization =", row_normalization, "(default: False)")
print("\tcol_normalization =", col_normalization, "(default: False)")
print("\trow_centering =", row_centering, "(default: None)")
print("\tcol_centering =", col_centering, "(default: None)")
elif arg_n == 7:
gct_name = args[1]
col_distance_metric = args[2]
output_distances = args[3]
row_distance_metric = args[4]
clustering_method = args[5]
output_base_name = args[6]
row_normalization = False
col_normalization = False
row_centering = None
col_centering = None
col_distance_metric = input_col_distance_dict[col_distance_metric]
row_distance_metric = input_row_distance_dict[row_distance_metric]
clustering_method = input_clustering_method[clustering_method]
if (output_distances == 'False') or (output_distances == 'F') \
or (output_distances == 'false') or (output_distances == 'f'):
output_distances = False
else:
output_distances = True
print("Using:")
print("\tgct_name =", gct_name)
print("\tcol_distance_metric =", col_distance_metric)
print("\toutput_distances =", output_distances)
print("\trow_distance_metric =", row_distance_metric)
print("\tclustering_method =", clustering_method)
print("\toutput_base_name =", output_base_name)
print("\trow_normalization =", row_normalization, "(default: False)")
print("\tcol_normalization =", col_normalization, "(default: False)")
print("\trow_centering =", row_centering, "(default: None)")
print("\tcol_centering =", col_centering, "(default: None)")
elif arg_n == 8:
gct_name = args[1]
col_distance_metric = args[2]
output_distances = args[3]
row_distance_metric = args[4]
clustering_method = args[5]
output_base_name = args[6]
row_normalization = args[7]
col_normalization = False
row_centering = None
col_centering = None
col_distance_metric = input_col_distance_dict[col_distance_metric]
row_distance_metric = input_row_distance_dict[row_distance_metric]
clustering_method = input_clustering_method[clustering_method]
if (output_distances == 'False') or (output_distances == 'F') \
or (output_distances == 'false') or (output_distances == 'f'):
output_distances = False
else:
output_distances = True
row_normalization = input_row_normalize[row_normalization]
# if (row_normalization == 'False') or (row_normalization == 'F') \
# or (row_normalization == 'false') or (row_normalization == 'f'):
# row_normalization = False
# else:
# row_normalization = True
print("Using:")
print("\tgct_name =", gct_name)
print("\tcol_distance_metric =", col_distance_metric)
print("\toutput_distances =", output_distances)
print("\trow_distance_metric =", row_distance_metric)
print("\tclustering_method =", clustering_method)
print("\toutput_base_name =", output_base_name)
print("\trow_normalization =", row_normalization)
print("\tcol_normalization =", col_normalization, "(default: False)")
print("\trow_centering =", row_centering, "(default: None)")
print("\tcol_centering =", col_centering, "(default: None)")
elif arg_n == 9:
gct_name = args[1]
col_distance_metric = args[2]
output_distances = args[3]
row_distance_metric = args[4]
clustering_method = args[5]
output_base_name = args[6]
row_normalization = args[7]
col_normalization = args[8]
row_centering = None
col_centering = None
col_distance_metric = input_col_distance_dict[col_distance_metric]
row_distance_metric = input_row_distance_dict[row_distance_metric]
clustering_method = input_clustering_method[clustering_method]
if (output_distances == 'False') or (output_distances == 'F') \
or (output_distances == 'false') or (output_distances == 'f'):
output_distances = False
else:
output_distances = True
# Row normalization
row_normalization = input_row_normalize[row_normalization]
# if (row_normalization == 'False') or (row_normalization == 'F') \
# or (row_normalization == 'false') or (row_normalization == 'f'):
# row_normalization = False
# else:
# row_normalization = True
# Column normalization
col_normalization = input_col_normalize[col_normalization]
# if (col_normalization == 'False') or (col_normalization == 'F') \
# or (col_normalization == 'false') or (col_normalization == 'f'):
# col_normalization = False
# else:
# col_normalization = True
print("Using:")
print("\tgct_name =", gct_name)
print("\tcol_distance_metric =", col_distance_metric)
print("\toutput_distances =", output_distances)
print("\trow_distance_metric =", row_distance_metric)
print("\tclustering_method =", clustering_method)
print("\toutput_base_name =", output_base_name)
print("\trow_normalization =", row_normalization)
print("\tcol_normalization =", col_normalization)
print("\trow_centering =", row_centering, "(default: None)")
print("\tcol_centering =", col_centering, "(default: None)")
elif arg_n == 10:
gct_name = args[1]
col_distance_metric = args[2]
output_distances = args[3]
row_distance_metric = args[4]
clustering_method = args[5]
output_base_name = args[6]
row_normalization = args[7]
col_normalization = args[8]
row_centering = args[9]
col_centering = None
col_distance_metric = input_col_distance_dict[col_distance_metric]
row_distance_metric = input_row_distance_dict[row_distance_metric]
clustering_method = input_clustering_method[clustering_method]
if (output_distances == 'False') or (output_distances == 'F') \
or (output_distances == 'false') or (output_distances == 'f'):
output_distances = False
else:
output_distances = True
# Row normalization
row_normalization = input_row_normalize[row_normalization]
# if (row_normalization == 'False') or (row_normalization == 'F') \
# or (row_normalization == 'false') or (row_normalization == 'f'):
# row_normalization = False
# else:
# row_normalization = True
# Column normalization
col_normalization = input_col_normalize[col_normalization]
# if (col_normalization == 'False') or (col_normalization == 'F') \
# or (col_normalization == 'false') or (col_normalization == 'f'):
# col_normalization = False
# else:
# col_normalization = True
# row_centering
row_centering = input_row_centering[row_centering]
if (row_centering == 'None') or (col_normalization == 'N') \
or (row_centering == 'none') or (col_normalization == 'n'):
col_normalization = None
print("Using:")
print("\tgct_name =", gct_name)
print("\tcol_distance_metric =", col_distance_metric)
print("\toutput_distances =", output_distances)
print("\trow_distance_metric =", row_distance_metric)
print("\tclustering_method =", clustering_method)
print("\toutput_base_name =", output_base_name)
print("\trow_normalization =", row_normalization)
print("\tcol_normalization =", col_normalization)
print("\trow_centering =", row_centering)
print("\tcol_centering =", col_centering, "(default: None)")
elif arg_n == 11:
gct_name = args[1]
col_distance_metric = args[2]
output_distances = args[3]
row_distance_metric = args[4]
clustering_method = args[5]
output_base_name = args[6]
row_normalization = args[7]
col_normalization = args[8]
row_centering = args[9]
col_centering = args[10]
col_distance_metric = input_col_distance_dict[col_distance_metric]
row_distance_metric = input_row_distance_dict[row_distance_metric]
clustering_method = input_clustering_method[clustering_method]
if (output_distances == 'False') or (output_distances == 'F') \
or (output_distances == 'false') or (output_distances == 'f'):
output_distances = False
else:
output_distances = True
# Row normalization
row_normalization = input_row_normalize[row_normalization]
# if (row_normalization == 'False') or (row_normalization == 'F') \
# or (row_normalization == 'false') or (row_normalization == 'f'):
# row_normalization = False
# else:
# row_normalization = True
# Column normalization
col_normalization = input_col_normalize[col_normalization]
# if (col_normalization == 'False') or (col_normalization == 'F') \
# or (col_normalization == 'false') or (col_normalization == 'f'):
# col_normalization = False
# else:
# col_normalization = True
# row_centering
row_centering = input_row_centering[row_centering]
if (row_centering == 'None') or (col_normalization == 'N') \
or (row_centering == 'none') or (col_normalization == 'n'):
col_normalization = None
# col_centering
col_centering = input_col_centering[col_centering]
if (col_centering == 'None') or (col_centering == 'N') \
or (col_centering == 'none') or (col_centering == 'n'):
col_centering = None
print("Using:")
print("\tgct_name =", gct_name)
print("\tcol_distance_metric =", col_distance_metric)
print("\toutput_distances =", output_distances)
print("\trow_distance_metric =", row_distance_metric)
print("\tclustering_method =", clustering_method)
print("\toutput_base_name =", output_base_name)
print("\trow_normalization =", row_normalization)
print("\tcol_normalization =", col_normalization)
print("\trow_centering =", row_centering)
print("\tcol_centering =", col_centering)
else:
sys.exit("Too many inputs. This module needs only a GCT file to work, "
"plus an optional input choosing between Pearson Correlation or Information Coefficient.")
print(args)
return gct_name, col_distance_metric, output_distances, row_distance_metric, clustering_method, output_base_name, \
row_normalization, col_normalization, row_centering, col_centering
def plot_dendrogram(model, data, tree, axis, dist=mydist, clustering_method='average',
title='no_title.png', color_threshold=None, orientation='top', **kwargs):
# plt.clf()
# modified from https://github.com/scikit-learn/scikit-learn/pull/3464/files
# Children of hierarchical clustering
children = model.children_
# Distances between each pair of children
# TODO: Fix this mydist
# distance = dendodist(children, euclidian_similarity)
# distance = dendodist(children, dist)
og_distances = better_dendodist(children, dist, tree, data, axis=axis, clustering_method=clustering_method)
# print(og_distances)
# og_distances = [abs(temp) for temp in og_distances]
# Turn similarity into non-negative value Scipy's dendrogram needs this
if dist in [custom_euclidean_sim, absolute_uncentered_pearson_corr, absolute_pearson_corr]:
# These similarities are already nonnegative [0,inf) or [0,1]
# og_distances = og_distances
pass
else: # all the correlation similarities [-1,-1]
og_distances = [temp + 1 for temp in og_distances]
# Now that all similarities are nonnegative, we turn them into a distance for plotting purposes
og_distances = [1 / temp for temp in og_distances]
# print(og_distances)
distance = np.cumsum(og_distances)
# distance = og_distances
# distance = better_dendodist(children, dist, tree, data, axis=axis)
# norm_distances = []
# for value in distance:
# norm_distances.append(1/value)
# norm_distances = distance
list_of_children = list(get_children(tree, leaves_are_self_children=False).values())
no_of_observations = [len(i) for i in list_of_children if i]
no_of_observations.append(len(no_of_observations) + 1)
# print(len(no_of_observations))
# print(children)
# print(list(tree.values()))
# print(norm_distances)
# print(distance)
if all(value == 0 for value in distance):
# If all distances are zero, then use uniform distance
distance = np.arange(len(distance))
# print(distance)
# print(np.cumsum(distance))
# The number of observations contained in each cluster level
# no_of_observations = np.arange(2, children.shape[0]+2)
# print(no_of_observations)
# Create linkage matrix and then plot the dendrogram
# linkage_matrix = np.column_stack([children, distance, no_of_observations]).astype(float)
# linkage_matrix = np.column_stack([children, np.cumsum(distance), no_of_observations]).astype(float)
linkage_matrix = np.column_stack([children, distance, no_of_observations]).astype(float)
# linkage_matrix = np.column_stack([children, norm_distances, no_of_observations]).astype(float)
# print(linkage_matrix)
# Plot the corresponding dendrogram
# print(scipy.cluster.hierarchy.cut_tree(linkage_matrix, n_clusters=5))
# print(color_threshold)
# find what the height at which to cut the dendrogram
if color_threshold is not None:
if color_threshold == 1:
color_threshold = 2
if color_threshold > (len(linkage_matrix) + 1):
color_threshold = (len(linkage_matrix) + 1)
# print('Finding the right cut')
color_threshold = linkage_matrix[-(color_threshold - 1)][2] - np.finfo(float).eps
# color_threshold = linkage_matrix[-(color_threshold - 1)][2] + 10*np.finfo(float).eps # Adding more wiggle room
# print(color_threshold)
R = dendrogram(linkage_matrix, color_threshold=color_threshold, orientation=orientation, **kwargs)
# R = dendrogram(linkage_matrix, **kwargs)
# [label.set_rotation(90) for label in plt.gca().get_xticklabels()]
order_of_columns = R['ivl']
# # print(order_of_columns)
# plt.gca().get_yaxis().set_visible(False)
# plt.savefig(title, dpi=300)
# plt.show()
# n = len(linkage_matrix) + 1
# cache = dict()
# for k in range(len(linkage_matrix)):
# c1, c2 = int(linkage_matrix[k][0]), int(linkage_matrix[k][1])
# c1 = [c1] if c1 < n else cache.pop(c1)
# c2 = [c2] if c2 < n else cache.pop(c2)
# cache[n + k] = c1 + c2
# order_of_columns = cache[2 * len(linkage_matrix)]
# print(order_of_columns)
# print(linkage_matrix)
# print("---")
# print(no_of_observations)
# print("---")
# print(list_of_children)
# print("---")
#
# print(len(order_of_columns))
# print(color_threshold)
# clusters2idxs, idxs2clusters = get_cluster_classes(R)
#
# print(clusters2idxs)
# print(idxs2clusters)
# print("---")
# print(get_children(tree, leaves_are_self_children=False))
# print("---")
# print(get_children(tree, leaves_are_self_children=False, only_leaves_are_children=False))
return order_of_columns, linkage_matrix
def get_clusters(tree):
return
def get_cluster_classes(den, label='ivl'):
# from http://www.nxn.se/valent/extract-cluster-elements-by-color-in-python
clusters2idxs = defaultdict(list)
idxs2clusters = {}
# for c, pi in zip(den['color_list'], den['icoord']):
# for leg in pi[1:3]:
# i = (leg - 5.0) / 10.0
# if abs(i - int(i)) < 1e-5:
# clusters2idxs[c].append(int(i))
# idxs2clusters[int(i)] = c
# # print(c, i)
# cluster_classes = Clusters()
# for c, l in cluster_idxs.items():
# i_l = [den[label][i] for i in l]
# cluster_classes[c] = i_l
# Trying something new:
print(den.keys())
print(len(den['icoord']))
print(len(den['dcoord']))
print(len(den['ivl']))
print(len(den['leaves']))
print(den['leaves'])
print(len(den['color_list']))
print(den['color_list'])
return clusters2idxs, idxs2clusters
def order_leaves(model, data, tree, labels, axis=0, dist=mydist, reverse=False):
# Adapted from here: https://stackoverflow.com/questions/12572436/calculate-ordering-of-dendrogram-leaves
children = model.children_
# distance = better_dendodist(children, dist, tree, data, axis=axis)
# if all(value == 0 for value in distance):
# distance = np.arange(len(distance))
# list_of_children = list(get_children(tree, leaves_are_self_children=False).values())
# no_of_observations = [len(i) for i in list_of_children if i]
# no_of_observations.append(len(no_of_observations)+1)
# Create linkage matrix and then plot the dendrogram
# linkage_matrix = np.column_stack([children, distance, no_of_observations]).astype(float)
pseudo_linkage_matrix = np.column_stack([children]).astype(float)
n = len(pseudo_linkage_matrix) + 1
# This orders leaves by number of clusters
cache = dict()
for k in range(len(pseudo_linkage_matrix)):
c1, c2 = int(pseudo_linkage_matrix[k][0]), int(pseudo_linkage_matrix[k][1])
c1 = [c1] if c1 < n else cache.pop(c1)
c2 = [c2] if c2 < n else cache.pop(c2)
cache[n + k] = c1 + c2
numeric_order_of_leaves = cache[2 * len(pseudo_linkage_matrix)]
if reverse:
numeric_order_of_leaves = list(reversed(numeric_order_of_leaves))
return [labels[i] for i in numeric_order_of_leaves]
def two_plot_two_dendrogram(model, dist=mydist, **kwargs):
# modified from https://github.com/scikit-learn/scikit-learn/pull/3464/files
# Children of hierarchical clustering
children = model.children_
# Distances between each pair of children
distance = dendodist(children, dist)
if all(value == 0 for value in distance):
# If all distances are zero, then use uniform distance
distance = np.arange(len(distance))
# The number of observations contained in each cluster level
no_of_observations = np.arange(2, children.shape[0] + 2)
# Create linkage matrix and then plot the dendrogram
linkage_matrix = np.column_stack([children, distance, no_of_observations]).astype(float)
# Plot the corresponding dendrogram
R = dendrogram(linkage_matrix, color_threshold=0, orientation='left', **kwargs)
# [label.set_rotation(90) for label in plt.gca().get_xticklabels()]
order_of_rows = R['ivl']
# print(order_of_columns)
plt.gca().get_xaxis().set_visible(False)
return list(reversed(order_of_rows))
def my_affinity_generic(M, metric):
return np.array([np.array([metric(a, b) for a in M]) for b in M])
def my_affinity_i(M):
return np.array([[information_coefficient_dist(a, b) for a in M] for b in M])
def my_affinity_ai(M):
return np.array([[absolute_information_coefficient_dist(a, b) for a in M] for b in M])
def my_affinity_p(M):
return np.array([[custom_pearson_dist(a, b) for a in M] for b in M])
def my_affinity_s(M):
return np.array([[custom_spearman_dist(a, b) for a in M] for b in M])
def my_affinity_k(M):
return np.array([[custom_kendall_tau_dist(a, b) for a in M] for b in M])
def my_affinity_ap(M):
return np.array([[absolute_pearson_dist(a, b) for a in M] for b in M])
def my_affinity_u(M):
return np.array([[uncentered_pearson_dist(a, b) for a in M] for b in M])
def my_affinity_au(M):
return np.array([[absolute_uncentered_pearson_dist(a, b) for a in M] for b in M])
def my_affinity_l1(M):
return np.array([[custom_manhattan_dist(a, b) for a in M] for b in M])
def my_affinity_l2(M):
return np.array([[custom_euclidean_dist(a, b) for a in M] for b in M])
def my_affinity_m(M):
return np.array([[custom_manhattan_dist(a, b) for a in M] for b in M])
def my_affinity_c(M):
return np.array([[custom_cosine_dist(a, b) for a in M] for b in M])
def my_affinity_e(M):
# global dist_matrix
# dist_matrix = np.array([[mydist(a, b) for a in M]for b in M])
# return dist_matrix
return np.array([[custom_euclidean_dist(a, b) for a in M] for b in M])
def count_diff(x):
count = 0
compare = x[0]
for i in x:
if i != compare:
count += 1
return count
def | (labels, true_labels):
# 2017-08-17: I will make the assumption that clusters have only 2 values.
# clusters = np.unique(true_labels)
# mislabels = 0
# for curr_clust in clusters:
# print("for label", curr_clust)
# print("\t", labels[(true_labels == curr_clust)])
# compare_to = mode(labels[(true_labels == curr_clust)])
# print("\tcompare to:", compare_to, "mislables: ", np.count_nonzero(labels[(true_labels == curr_clust)] != compare_to))
# mislabels += np.count_nonzero(labels[(true_labels == curr_clust)] != compare_to)
set_a = labels[true_labels == 0]
set_b = labels[true_labels == 1]
if len(set_a) <= len(set_b):
shorter = set_a
longer = set_b
else:
shorter = set_b
longer = set_a
long_mode = mode(longer) # this what the label of the longer cluster should be.
short_mode = 1 if long_mode == 0 else 0 # Choose the other value for the label of the shorter cluster
# start with the longer vector:
# print("The long set is", longer, "it has", np.count_nonzero(longer != long_mode), 'mislabels.')
# print("The short set is", shorter, "it has", np.count_nonzero(shorter != short_mode), 'mislabels.')
# np.count_nonzero(longer != long_mode) + np.count_nonzero(shorter != short_mode)
return np.count_nonzero(longer != long_mode) + np.count_nonzero(shorter != short_mode)
def plot_heatmap(df, col_order, row_order, top=5, title_text='differentially expressed genes per phenotype'):
if not (len(col_order), len(list(df))):
exit("Number of columns in dataframe do not match the columns provided for ordering.")
if not (len(row_order), len(df)):
exit("Number of rows in dataframe do not match the columns provided for ordering.")
# print(list(df), col_order)
df = df[col_order]
df = df.reindex(row_order)
plt.clf()
sns.heatmap(df.iloc[np.r_[0:top, -top:0], :], cmap='viridis')
plt.yticks(rotation=0)
plt.xticks(rotation=90)
plt.title('Top {} {}'.format(top, title_text))
plt.ylabel('Genes')
plt.xlabel('Sample')
plt.savefig('heatmap.png', dpi=300, bbox_inches="tight")
def parse_data(gct_name, row_normalization=False, col_normalization=False, row_centering=None, col_centering=None):
# if validators.url(gct_name):
# urlfile, __ = urllib.request.urlretrieve(gct_name)
# else:
# urlfile = gct_name
# f = open(urlfile)
# f.readline()
# size = f.readline().strip('\n').split('\t')
try:
data_df = pd.read_csv(gct_name, sep='\t', skiprows=2)
except ValueError:
data_df = gct_name
# print(size)
# print(list(data_df))
# exit(data_df.shape)
if data_df.index.name is 'Name':
data_df['Name'] = data_df.index
else:
if 'Name' not in list(data_df):
data_df['Name'] = data_df.iloc[:, 0]
data_df.drop(data_df.columns[0], axis=1, inplace=True)
if 'Description' not in list(data_df):
data_df['Description'] = data_df['Name']
data_df.set_index(data_df['Name'], inplace=True)
og_full_gct = data_df.copy()
og_full_gct.drop(['Name'], axis=1, inplace=True)
data_df.drop(['Name', 'Description'], axis=1, inplace=True)
plot_labels = list(og_full_gct.drop(['Description'], axis=1, inplace=False))
data = data_df.as_matrix()
row_labels = data_df.index.values
og_data = data.copy()
# if row_centering is not None:
# if row_centering == 'Mean':
# row_means = np.mean(data, axis=1)
# row_means_col_vec = row_means.reshape((data.shape[0], 1))
# data = data - row_means_col_vec
# if row_centering == 'Median':
# row_medians = np.median(data, axis=1)
# row_medians_col_vec = row_medians.reshape((data.shape[0], 1))
# data = data - row_medians_col_vec
#
# if row_normalization:
# row_norm = np.sum(data * data, axis=1)
# row_norm_col_vec = row_norm.reshape((data.shape[0], 1))
# data = data / np.sqrt(row_norm_col_vec)
#
# if col_centering is not None:
# if col_centering == 'Mean':
# col_means = np.mean(data, axis=0)
# data = data - col_means
# if col_centering == 'Median':
# col_medians = np.median(data, axis=0)
# data = data - col_medians
#
# if col_normalization:
# col_norm = np.sum(data*data, axis=0)
# data = data/np.sqrt(col_norm)
data = normalize_dataframe(data_df, log_normalize=None,
row_centering=row_centering, row_normalization=row_normalization,
col_centering=col_centering, col_normalization=col_normalization).as_matrix()
# print(data_df)
# print(data)
new_data_df = pd.DataFrame(data=data, index=data_df.index, columns=list(data_df))
# print(new_data_df)
# print(og_full_gct)
new_full_gct = new_data_df.copy()
new_full_gct.insert(0, column='Description', value=og_full_gct['Description'])
# print(new_full_gct)
# exit()
return og_data, data_df, data, new_data_df, plot_labels, row_labels, og_full_gct, new_full_gct
str2func = {
'custom_euclidean': my_affinity_e,
'uncentered_pearson': my_affinity_u,
'absolute_uncentered_pearson': my_affinity_au,
'information_coefficient': my_affinity_i,
'pearson': my_affinity_p,
'spearman': my_affinity_s,
'kendall': my_affinity_k,
'absolute_pearson': my_affinity_ap,
'l1': 'l1',
'l2': 'l2',
'manhattan': 'manhattan',
'cosine': 'cosine',
'euclidean': 'euclidean',
}
str2affinity_func = {
'custom_euclidean': my_affinity_e,
'uncentered_pearson': my_affinity_u,
'absolute_uncentered_pearson': my_affinity_au,
'information_coefficient': my_affinity_i,
'pearson': my_affinity_p,
'spearman': my_affinity_s,
'kendall': my_affinity_k,
'absolute_pearson': my_affinity_ap,
'l1': my_affinity_l1,
'l2': my_affinity_l2,
'manhattan': my_affinity_m,
'cosine': my_affinity_c,
'euclidean': my_affinity_e,
}
str2dist = {
'custom_euclidean': custom_euclidean_dist,
'uncentered_pearson': uncentered_pearson_dist,
'absolute_uncentered_pearson': absolute_uncentered_pearson_dist,
'information_coefficient': information_coefficient_dist,
'pearson': custom_pearson_dist,
'spearman': custom_spearman_dist,
'kendall': custom_kendall_tau_dist,
'absolute_pearson': absolute_pearson_dist,
'l1': custom_manhattan_dist,
'l2': custom_euclidean_dist,
'manhattan': custom_manhattan_dist,
'cosine': custom_cosine_dist,
'euclidean': custom_euclidean_dist,
}
str2similarity = {
'custom_euclidean': custom_euclidean_sim,
'uncentered_pearson': uncentered_pearson_corr,
'absolute_uncentered_pearson': absolute_uncentered_pearson_corr,
'information_coefficient': information_coefficient,
'pearson': custom_pearson_corr,
'spearman': custom_spearman_corr,
'kendall': custom_kendall_tau_corr,
'absolute_pearson': absolute_pearson_corr,
'l1': custom_manhattan_sim,
'l2': custom_euclidean_sim,
'manhattan': custom_manhattan_sim,
'cosine': custom_cosine_sim,
# 'euclidean': pairwise.paired_euclidean_distances,
'euclidean': custom_euclidean_sim,
# 'euclidean': custom_euclidean_dist,
}
linkage_dic = {
'Pairwise average-linkage': 'average',
'Pairwise complete-linkage': 'complete',
'Pairwise ward-linkage': 'ward',
'average': 'average',
'complete': 'complete',
'ward': 'ward',
}
def make_tree(model, data=None):
"""
Modified from:
https://stackoverflow.com/questions/27386641/how-to-traverse-a-tree-from-sklearn-agglomerativeclustering
import numpy as np
from sklearn.cluster import AgglomerativeClustering
import itertools
X = np.concatenate([np.random.randn(3, 10), np.random.randn(2, 10) + 100])
model = AgglomerativeClustering(linkage="average", affinity="cosine")
model.fit(X)
ii = itertools.count(X.shape[0])
[{'node_id': next(ii), 'left': x[0], 'right':x[1]} for x in model.children_]
---
You can also do dict(enumerate(model.children_, model.n_leaves_))
which will give you a dictionary where the each key is the ID of a node
and the value is the pair of IDs of its children. – user76284
:param model:
:return: a dictionary where the each key is the ID of a node and the value is the pair of IDs of its children.
"""
# ii = itertools.count(data.shape[0]) # Setting the counter at the number of leaves.
# tree = [{'node_id': next(ii), 'left': x[0], 'right':x[1]} for x in model.children_]
# print(tree)
# return tree
return dict(enumerate(model.children_, model.n_leaves_))
# return dict(enumerate(model.children_, 1))
def make_cdt(data, order_of_columns, order_of_rows, name='test.cdt', atr_companion=True, gtr_companion=False):
# TODO: if order_of_columns == None, then do arange(len(list(data)))
# TODO: if order_of_rows == None, then do arange(len(list(data)))
# exit(data.to_csv())
data.index.name = "ID"
data.rename(columns={'Description': 'Name'}, inplace=True)
temp = np.ones(len(data))
data.insert(loc=1, column='GWEIGHT', value=temp) # adding an extra column
# These three lines add a row
data.loc['EWEIGHT'] = list(np.ones(len(list(data))))
newIndex = ['EWEIGHT'] + [ind for ind in data.index if ind != 'EWEIGHT']
data = data.reindex(index=newIndex)
if atr_companion:
new_AID = ['', '']
for element in range(len(order_of_columns)):
temp = 'ARRY' + str(element) + 'X'
new_AID.append(temp)
data.loc['AID'] = new_AID
newIndex = ['AID'] + [ind for ind in data.index if ind != 'AID']
data = data.reindex(index=newIndex)
data = data[['Name', 'GWEIGHT'] + order_of_columns]
if gtr_companion:
new_GID = ['']
if atr_companion:
new_GID = ['AID', 'EWEIGHT'] # This is to make sure we fit the CDT format
# for element in np.sort(np.unique(GID)):
# if 'NODE' in element:
# # print(element, 'GTR delete')
# pass
# else:
# new_GID.append(element)
for element in range(len(order_of_rows)):
temp = 'GENE' + str(element) + 'X'
new_GID.append(temp)
data.insert(loc=0, column='GID', value=new_GID) # adding an extra column
data.insert(loc=0, column=data.index.name, value=data.index) # Making the index a column
# reorder to match dendogram
temp = ['AID', 'EWEIGHT'] + order_of_rows
# data = data.loc[temp]
# print(data['GID'])
data = data.reindex(temp)
# print(data['GID'])
# print(list(data.index))
# print(data['GID'])
# print(data['Name'])
# Making the 'GID' the index -- for printing purposes
data.index = data['GID']
data.index.name = 'GID'
data.drop(['GID'], axis=1, inplace=True)
# print(list(data.index))
# The first three lines need to be written separately due to a quirk in the CDT file format:
# print(data.to_csv(sep='\t', index=True, header=True))
f = open(name, 'w')
f.write(data.to_csv(sep='\t', index=True, header=True))
# f.write(data.to_csv(sep='\t', index=True, header=True))
f.close()
# pd.options.display.float_format = '{:3.3f}'.format
data = data.round(2)
# print(data.to_csv())
# exit()
# exit(data.to_csv(sep=' ', index=True, header=True, float_format='2',))
return
def make_atr(col_tree_dic, data, dist, clustering_method='average', file_name='test.atr'):
max_val = len(col_tree_dic)
# AID = []
# compute distances
distance_dic = {}
for node, children in col_tree_dic.items():
val = centroid_distances(children[0], children[1], tree=col_tree_dic, data=data, axis=1,
distance=dist, clustering_method=clustering_method)
# print(dist, children, val)
# print("Value is", val)
distance_dic[node] = val
# if dist == custom_euclidean_sim:
# print("Euclidean distance is especial, normalizing using this scheme:")
# low_norm = min(distance_dic.values())
# high_norm = max(distance_dic.values())
# for key in distance_dic.keys():
# # distance -= norm
# # distance_dic[key] = distance_dic[key]/high_norm
# # distance_dic[key] = (distance_dic[key]-low_norm)/high_norm
# # distance_dic[key] = distance_dic[key]/high_norm
# # distance_dic[key] = ((1/distance_dic[key])-high_norm)/low_norm
# print(distance_dic[key])
f = open(file_name, 'w')
for node, children in col_tree_dic.items():
elements = [translate_tree(node, max_val, 'atr'), translate_tree(children[0], max_val, 'atr'),
translate_tree(children[1], max_val, 'atr'),
"{num:.{width}f}".format(num=distance_dic[node], width=SIGNIFICANT_DIGITS)]
# print('\t', '\t'.join(elements))
# AID.append(translate_tree(children[0], max_val, 'atr'))
# AID.append(translate_tree(children[1], max_val, 'atr'))
f.write('\t'.join(elements) + '\n')
# print('\t'.join(elements) + '\n')
f.close()
return
def make_gtr(row_tree_dic, data, dist, clustering_method='average', file_name='test.gtr'):
max_val = len(row_tree_dic)
# GID = []
# compute distances
distance_dic = {}
for node, children in row_tree_dic.items():
val = centroid_distances(children[0], children[1], tree=row_tree_dic, data=data, axis=0,
distance=dist, clustering_method=clustering_method)
distance_dic[node] = val
f = open(file_name, 'w')
for node, children in row_tree_dic.items():
elements = [translate_tree(node, max_val, 'gtr'), translate_tree(children[0], max_val, 'gtr'),
translate_tree(children[1], max_val, 'gtr'),
"{num:.{width}f}".format(num=distance_dic[node], width=SIGNIFICANT_DIGITS)]
# GID.append(translate_tree(children[0], max_val, 'gtr'))
# GID.append(translate_tree(children[1], max_val, 'gtr'))
f.write('\t'.join(elements) + '\n')
# val -= 1
f.close()
return
def translate_tree(what, length, g_or_a):
if 'a' in g_or_a:
if what <= length:
translation = 'ARRY' + str(what) + 'X'
else:
translation = 'NODE' + str(what - length) + 'X'
elif 'g' in g_or_a:
if what <= length:
translation = 'GENE' + str(what) + 'X'
else:
translation = 'NODE' + str(what - length) + 'X'
else:
translation = []
print('This function does not support g_or_a=', g_or_a)
return translation
# def get_children_recursively(k, model, node_dict, leaf_count, n_samples, data, verbose=False, left=None, right=None):
# # print(k)
# i, j = model.children_[k]
#
# if k in node_dict:
# return node_dict[k]['children']
#
# if i < leaf_count:
# # print("i if")
# left = [i]
# else:
# # print("i else")
# # read the AgglomerativeClustering doc. to see why I select i-n_samples
# left, node_dict = get_children_recursively(i - n_samples, model, node_dict,
# leaf_count, n_samples, data, verbose, left, right)
#
# if j < leaf_count:
# # print("j if")
# right = [j]
# else:
# # print("j else")
# right, node_dict = get_children_recursively(j - n_samples, model, node_dict,
# leaf_count, n_samples, data, verbose, left, right)
#
# if verbose:
# print(k, i, j, left, right)
# temp = map(lambda ii: data[ii], left)
# left_pos = np.mean(list(temp), axis=0)
# temp = map(lambda ii: data[ii], right)
# right_pos = np.mean(list(temp), axis=0)
#
# # this assumes that agg_cluster used euclidean distances
# dist = metrics.pairwise_distances([left_pos, right_pos], metric='euclidean')[0, 1]
#
# all_children = [x for y in [left, right] for x in y]
# pos = np.mean(list(map(lambda ii: data[ii], all_children)), axis=0)
#
# # store the results to speed up any additional or recursive evaluations
# node_dict[k] = {'top_child': [i, j], 'children': all_children, 'pos': pos, 'dist': dist,
# 'node_i': k + n_samples}
# return all_children, node_dict
# def recursive_atr
def get_children(tree, leaves_are_self_children=False):
# this is a recursive function
expanded_tree = {}
for node in range(max(tree.keys())):
if node <= len(tree):
if leaves_are_self_children:
expanded_tree[node] = [node]
else:
expanded_tree[node] = []
else:
# expanded_tree[node] = list_children_single_node(node, tree)
expanded_tree[node] = list_children_single_node(node, tree, leaves_are_self_children)
return expanded_tree
def list_children_single_node(node, tree, leaves_are_self_children=False, only_leaves_are_children=True):
# children = []
if node <= len(tree):
if leaves_are_self_children:
children = [node]
else:
children = []
else:
children = list(tree[node])
# Check each child, and add their children to the list
for child in children:
if child <= len(tree):
pass
else:
children += list_children_single_node(child, tree, only_leaves_are_children=True)
if only_leaves_are_children:
# print(sorted(np.unique(i for i in children if i <= len(tree))))
# print()
return [i for i in sorted(np.unique(children)) if i <= len(tree)]
else:
return sorted(np.unique(children))
def centroid_distances(node_a, node_b, tree, data, axis=0, distance=mydist, clustering_method='average'):
if axis == 0:
pass
elif axis == 1:
data = np.transpose(data)
else:
exit("Variable 'data' does not have that many axises (╯°□°)╯︵ ┻━┻")
children_of_a = list_children_single_node(node_a, tree=tree, leaves_are_self_children=True)
children_of_b = list_children_single_node(node_b, tree=tree, leaves_are_self_children=True)
# if distance == custom_euclidean_sim:
# print("Euclidean distance is especial, normalizing using this scheme:")
# distance = custom_euclidean_dist
distances_list = []
if clustering_method == 'average':
for pair in itertools.product(data[children_of_a], data[children_of_b]):
distances_list.append(distance(pair[0], pair[1]))
return np.average(distances_list)
elif clustering_method == 'complete':
for pair in itertools.product(data[children_of_a], data[children_of_b]):
distances_list.append(distance(pair[0], pair[1]))
return np.min(distances_list)
else:
exit("Ony 'average' and 'complete' clustering methods are accepted at the moment (>_<)")
def euclidian_similarity(x, y):
dist = mydist(x, y)
# return 1/(1+dist)
return 1 / (np.exp(dist))
def better_dendodist(children, distance, tree, data, axis, clustering_method='average'):
distances_list = []
for pair in children:
distances_list.append(centroid_distances(pair[0], pair[1], tree, data, axis, distance=distance,
clustering_method=clustering_method))
# print(distance, pair, distances_list[-1])
return distances_list
def HierarchicalClustering(pwd: "The current directory",
gct_name: "Gene expression data filename (.gct file) or Pandas DataFrame "
"where rows are genes and columns are samples",
col_distance_metric: "The function to be used when comparing the distance/similarity of "
"the columns in the gct_name dataset",
row_distance_metric: "The function to be used when comparing the distance/similarity of "
"the rows in the gct_name dataset",
clustering_method: "Type of linkage to use" = 'average',
output_base_name: "Base name for output file" = 'HC_output',
row_normalization: "Whether to normalize each row (gene) in the data" = False,
col_normalization: "Whether to normalize each column (sample) in the data" = False,
row_centering: "How to center each row (gene) in the data" = 'Mean',
col_centering: "How to center each column (sample) in the data" = 'Mean',
output_distances: "Whether or not output the pair-wise distance matrix. "
"If true, the distance between each column will be called, "
"which can be very computationally intensive. "
"If unsure, leave as False." = False,
custom_plot: "Plot the dendrograms by Genes, Samples, or Both" = 'Both',
clusters_to_highlight: "How many clusters to highlight in the dendrogram" = 2,
show: "Whether to show the plot at the end" = False):
"""
This function performs hierarchical clustering to group samples (columns) with similar phenotypes
and/or genes (rows) with similar expression profiles.
:param pwd: The current directory
:param gct_name: Gene expression data filename (.gct file) or Pandas DataFrame where rows are genes and
columns are samples
:param col_distance_metric: The function to be used when comparing the distance/similarity of
the columns in the gct_name dataset
:param row_distance_metric: The function to be used when comparing the distance/similarity of
the rows in the gct_name dataset
:param clustering_method: Type of linkage to use
:param output_base_name: Base name for output file
:param row_normalization: Whether to normalize each row (gene) in the data
:param col_normalization: Whether to normalize each column (sample) in the data
:param row_centering: How to center each row (gene) in the data
:param col_centering: How to center each column (sample) in the data
:param output_distances: Whether or not output the pair-wise distance matrix.
If true, the distance between each column will be called,
which can be very computationally intensive.
If unsure, leave as False
:param custom_plot: Plot the dendrograms by Genes, Samples, or Both
:param clusters_to_highlight: How many clusters to highlight in the dendrogram
:param show: Whether to show the plot at the end
:return:
"""
# gct_name, col_distance_metric, output_distances, row_distance_metric, clustering_method, output_base_name, \
# row_normalization, col_normalization, row_centering, col_centering = parse_inputs(sys.argv)
if col_distance_metric == "No_column_clustering":
custom_plot = 'Genes'
if row_distance_metric == "No_row_clustering":
custom_plot = 'Samples'
og_data, og_data_df, data, data_df, col_labels, row_labels, og_full_gct, new_full_gct = \
parse_data(gct_name, row_normalization, col_normalization, row_centering, col_centering)
order_of_columns = list(data_df)
order_of_rows = list(data_df.index)
data_transpose = np.transpose(data)
# print(data)
# print(data_df)
atr_companion = False
col_model = None
col_tree = None
gtr_companion = False
row_model = None
row_tree = None
AID = None
GID = None
if col_distance_metric != 'No_column_clustering':
atr_companion = True
col_model = AgglomerativeClustering(linkage=linkage_dic[clustering_method], n_clusters=clusters_to_highlight,
affinity=str2func[col_distance_metric])
col_model.fit(data_transpose)
col_tree = make_tree(col_model)
order_of_columns = order_leaves(col_model, tree=col_tree, data=data_transpose,
dist=str2similarity[col_distance_metric], labels=col_labels, reverse=True)
path_to_atr = output_base_name + '.atr'
make_atr(col_tree, file_name=path_to_atr, data=data,
dist=str2similarity[col_distance_metric], clustering_method=linkage_dic[clustering_method])
if row_distance_metric != 'No_row_clustering':
gtr_companion = True
row_model = AgglomerativeClustering(linkage=linkage_dic[clustering_method], n_clusters=clusters_to_highlight,
affinity=str2func[row_distance_metric])
# y_col = row_model.fit_predict(np.transpose(data))
# print(y_col)
row_model.fit(data)
row_tree = make_tree(row_model)
order_of_rows = order_leaves(row_model, tree=row_tree, data=data,
dist=str2similarity[row_distance_metric], labels=row_labels)
path_to_gtr = output_base_name + '.gtr'
make_gtr(row_tree, data=data, file_name=output_base_name + '.gtr', dist=str2similarity[row_distance_metric])
if output_distances:
# TODO: check which col or row was selected, or both
row_distance_matrix = str2affinity_func[row_distance_metric](data)
# col_distance_matrix = str2affinity_func[col_distance_metric](np.transpose(data))
dist_file = open(output_base_name + '_pairwise_distances.csv', 'w')
dist_file.write('labels,')
dist_file.write(",".join(col_model.labels_.astype(str)) + "\n")
dist_file.write('samples,')
dist_file.write(",".join(list(data_df)) + "\n")
i = 0
for row in row_distance_matrix:
dist_file.write('distances row=' + str(i) + "," + ",".join(row.astype(str)) + "\n")
i += 1
path_to_cdt = output_base_name + '.cdt'
make_cdt(data=new_full_gct, name=path_to_cdt, atr_companion=atr_companion,
gtr_companion=gtr_companion,
order_of_columns=order_of_columns, order_of_rows=order_of_rows)
if custom_plot == 'Samples':
# Plotting the heatmap with dendrogram
plt.clf()
# fig = plt.figure(figsize=(16, 9), dpi=300)
fig = plt.figure(figsize=(16, 9))
gs = gridspec.GridSpec(2, 1, height_ratios=[1, 5])
gs.update(wspace=0.0, hspace=0.0)
ax0 = plt.subplot(gs[0]) # Doing dendrogram first
ax0.axis('off')
col_order, link = plot_dendrogram(col_model, data, col_tree, axis=1,
dist=str2similarity[col_distance_metric],
clustering_method=clustering_method,
color_threshold=clusters_to_highlight,
title='no_title.png', orientation='top')
col_order = [int(i) for i in col_order]
# print(col_order)
named_col_order = [col_labels[i] for i in col_order]
# print(named_col_order)
# print(col_order)
# print(col_model.labels_)
ax1 = plt.subplot(gs[1])
# Row-normalizing for display purposes only:
data_df = data_df.subtract(data_df.min(axis=1), axis=0)
data_df = data_df.div(data_df.max(axis=1), axis=0)
sns.heatmap(data_df[named_col_order], ax=ax1, cbar=False, cmap='bwr')
# ax1.xaxis.tick_top()
[label.set_rotation(90) for label in ax1.get_xticklabels()]
file_path_plot = output_base_name + '.pdf'
plt.savefig(file_path_plot, bbox_inches='tight')
print("----------------------------------------------------------------------")
print("The PDF of this heatmap can be downloaded here:")
display(HTML('<a href="' + file_path_plot + '" target="_blank">PDF of the heatmap</a>'))
print("----------------------------------------------------------------------")
print("The CDF which is compatible with HierarchicalClusteringViewer is here:")
display(HTML('<a href="' + path_to_cdt + '" target="_blank">TXT containing the output data</a>'))
print("----------------------------------------------------------------------")
print("The ATR which is compatible with HierarchicalClusteringViewer is here:")
display(HTML('<a href="' + path_to_atr + '" target="_blank">TXT containing the output data</a>'))
print("----------------------------------------------------------------------")
if show:
# plt.show()
pass
# col_order = [int(i) for i in col_order]
# print(col_order)
# named_col_order = [col_labels[i] for i in col_order]
# print(named_col_order)
# print(col_order)
# print(idxs2clusters)
cls_list = col_model.labels_
# for i in range(len(col_order)):
# cls_list.append(idxs2clusters[i])
# print(cls_list)
# order_by = [col_order.index(i) for i in range(len(col_order))]
# list2intlist(cls_list, custom_order=order_by)
# in_list = np.array(cls_list)
# print(cls_list)
# print(np.array(list2intlist(cls_list, custom_order=order_by)))
list2cls(np.array(list2intlist(cls_list)), name_of_out=output_base_name+'.cls', sep=' ')
if custom_plot == 'Genes':
# Plotting the heatmap with dendrogram
plt.clf()
# fig = plt.figure(figsize=(16, 9), dpi=300)
fig = plt.figure(figsize=(16, 9))
gs = gridspec.GridSpec(1, 2, width_ratios=[5, 1])
gs.update(wspace=0.0, hspace=0.0)
ax0 = plt.subplot(gs[1]) # Doing dendrogram first
ax0.axis('off')
row_order, link = plot_dendrogram(row_model, data_transpose, row_tree, axis=1,
dist=str2similarity[row_distance_metric],
clustering_method=clustering_method,
color_threshold=clusters_to_highlight,
orientation='right', title='no_title.png')
# row_order = [int(i) for i in row_order]
# named_row_order = [row_labels[i] for i in row_order]
ax1 = plt.subplot(gs[0])
# Row-normalizing for display purposes only:
data_df = data_df.subtract(data_df.min(axis=1), axis=0)
data_df = data_df.div(data_df.max(axis=1), axis=0)
sns.heatmap(data_df.iloc[row_order], ax=ax1, cbar=False, cmap='bwr')
# ax1.xaxis.tick_top()
[label.set_rotation(90) for label in ax1.get_xticklabels()]
file_path_plot = output_base_name + '.pdf'
plt.savefig(file_path_plot, bbox_inches='tight')
print("----------------------------------------------------------------------")
print("The PDF of this heatmap can be downloaded here:")
display(HTML('<a href="' + file_path_plot + '" target="_blank">PDF of the heatmap</a>'))
print("----------------------------------------------------------------------")
print("The CDF which is compatible with HierarchicalClusteringViewer is here:")
display(HTML('<a href="' + path_to_cdt + '" target="_blank">TXT containing the output data</a>'))
print("----------------------------------------------------------------------")
print("The GTR which is compatible with HierarchicalClusteringViewer is here:")
display(HTML('<a href="' + path_to_gtr + '" target="_blank">TXT containing the output data</a>'))
print("----------------------------------------------------------------------")
if show:
plt.show()
if custom_plot == 'Both':
# Plotting the heatmap with dendrogram
plt.clf()
# fig = plt.figure(figsize=(16, 9), dpi=300)
fig = plt.figure(figsize=(16, 9))
gs = gridspec.GridSpec(2, 2, width_ratios=[5, 1], height_ratios=[1, 5])
gs.update(wspace=0.0, hspace=0.0)
# Doing TOP dendrogram first
ax0 = plt.subplot(gs[0])
ax0.axis('off')
col_order, link = plot_dendrogram(col_model, data, col_tree, axis=1,
dist=str2similarity[col_distance_metric],
clustering_method=clustering_method,
color_threshold=clusters_to_highlight,
title='no_title.png', orientation='top')
col_order = [int(i) for i in col_order]
named_col_order = [col_labels[i] for i in col_order]
# Doing RIGHT dendrogram
ax3 = plt.subplot(gs[3])
ax3.axis('off')
row_order, link = plot_dendrogram(row_model, data_transpose, row_tree, axis=1,
dist=str2similarity[row_distance_metric],
clustering_method=clustering_method,
color_threshold=clusters_to_highlight,
orientation='right', title='no_title.png')
# Plotting the heatmap now
ax1 = plt.subplot(gs[2])
# Row-normalizing for display purposes only:
data_df = data_df.subtract(data_df.min(axis=1), axis=0)
data_df = data_df.div(data_df.max(axis=1), axis=0)
sns.heatmap(data_df[named_col_order].iloc[row_order], ax=ax1, cbar=False, cmap='bwr')
# ax1.xaxis.tick_top()
[label.set_rotation(90) for label in ax1.get_xticklabels()]
file_path_plot = output_base_name + '.pdf'
plt.savefig(file_path_plot, bbox_inches='tight')
print("----------------------------------------------------------------------")
print("The PDF of this heatmap can be downloaded here:")
display(HTML('<a href="' + file_path_plot + '" target="_blank">PDF of the heatmap</a>'))
print("----------------------------------------------------------------------")
print("The CDF which is compatible with HierarchicalClusteringViewer is here:")
display(HTML('<a href="' + path_to_cdt + '" target="_blank">TXT containing the output data</a>'))
print("----------------------------------------------------------------------")
print("The GTR which is compatible with HierarchicalClusteringViewer is here:")
display(HTML('<a href="' + path_to_gtr + '" target="_blank">TXT containing the output data</a>'))
print("----------------------------------------------------------------------")
if show:
plt.show()
return col_model, row_model
def hc_samples(
input_gene_expression: "gene expression data filename (.gct file) where rows are genes and columns are samples",
clustering_type: "single or consensus -- Only single is suported at the moment",
distance_metric: "the function to be used when comparing the distance/similarity of the columns in the "
"input_gene_expression dataset",
file_basename: "the name to use when naming output files" = 'HC_out',
clusters_to_highlight: "how many clusters to highlight in the dendrogram" = None):
"""
Perform hierarchical clustering to group samples with similar phenotypes.
:param input_gene_expression: str; gene expression data filename (.gct file)
where rows are genes and columns are samples
:param clustering_type: str; single or consensus
:param distance_metric: str; the function to be used when comparing the distance/similarity of the columns
in the input_gene_expression dataset
:param file_basename: str; the name to use when naming output files
:param clusters_to_highlight: int; how many clusters to highlight in the dendrogram
:return: object; Sklearn's AgglomerativeClustering fitted model
"""
print("Currenty clustering_type is being ignored, only 'single' is supported.")
pwd = '.'
gct_name = input_gene_expression
col_distance_metric = distance_metric
output_distances = False
row_distance_metric = 'No_row_clustering'
clustering_method = 'average'
output_base_name = file_basename
row_normalization = False
col_normalization = False
row_centering = 'Mean'
col_centering = 'Mean'
custom_plot = 'Samples'
show = True
# print("This are the parameters to be used (for debugging purposes)")
# print("""
# pwd = '.'
# gct_name = {gct_name}
# col_distance_metric = {col_distance_metric}
# output_distances = {output_distances}
# row_distance_metric = {row_distance_metric}
# clustering_method = {clustering_method}
# output_base_name = {output_base_name}
# row_normalization = {row_normalization}
# col_normalization = {col_normalization}
# row_centering = {row_centering}
# col_centering = {col_centering}
# """.format(
# gct_name=gct_name, col_distance_metric=col_distance_metric,
# output_distances=str(output_distances),
# row_distance_metric=row_distance_metric, clustering_method=clustering_method,
# output_base_name=output_base_name,
# row_normalization=str(row_normalization), col_normalization=str(col_normalization),
# row_centering=row_centering, col_centering=col_centering
# )
# )
print("Now we will start performing hierarchical clustering, this may take a little while.")
col_model, row_model = HierarchicalClustering(pwd,
gct_name,
col_distance_metric,
row_distance_metric,
clustering_method,
output_base_name,
row_normalization,
col_normalization,
row_centering,
col_centering,
output_distances,
custom_plot,
clusters_to_highlight,
show)
print("Done with Hierarchical Clustering!")
return col_model
def hc_genes(
input_gene_expression: "gene expression data filename (.gct file) where rows are genes and columns are samples",
clustering_type: "single or consensus -- Only single is suported at the moment",
distance_metric: "the function to be used when comparing the distance/similarity of the rows in the "
"input_gene_expression dataset",
file_basename: "the name to use when naming output files" = 'HC_out',
clusters_to_highlight: "how many clusters to highlight in the dendrogram" = None):
"""
Perform hierarchical clustering to group genes with similar expression profile.
:param input_gene_expression: str; gene expression data filename (.gct file)
where rows are genes and columns are samples
:param clustering_type: str; single or consensus
:param distance_metric: str; the function to be used when comparing the distance/similarity of the rows
in the input_gene_expression dataset
:param file_basename: str; the name to use when naming output files
:param clusters_to_highlight: int; how many clusters to highlight in the dendrogram
:return: object; Sklearn's AgglomerativeClustering fitted model
"""
print("Currenty clustering_type is being ignored, only 'single' is supported.")
pwd = '.'
gct_name = input_gene_expression
col_distance_metric = 'No_column_clustering'
output_distances = False
row_distance_metric = distance_metric
clustering_method = 'average'
output_base_name = file_basename
row_normalization = False
col_normalization = False
row_centering = 'Mean'
col_centering = 'Mean'
custom_plot = 'Genes'
show = True
# print("This are the parameters to be used (for debugging purposes)")
# print("""
# pwd = '.'
# gct_name = {gct_name}
# col_distance_metric = {col_distance_metric}
# output_distances = {output_distances}
# row_distance_metric = {row_distance_metric}
# clustering_method = {clustering_method}
# output_base_name = {output_base_name}
# row_normalization = {row_normalization}
# col_normalization = {col_normalization}
# row_centering = {row_centering}
# col_centering = {col_centering}
# """.format(
# gct_name=gct_name, col_distance_metric=col_distance_metric,
# output_distances=str(output_distances),
# row_distance_metric=row_distance_metric, clustering_method=clustering_method,
# output_base_name=output_base_name,
# row_normalization=str(row_normalization), col_normalization=str(col_normalization),
# row_centering=row_centering, col_centering=col_centering
# )
# )
print("Now we will start performing hierarchical clustering, this may take a little while.")
col_model, row_model = HierarchicalClustering(pwd,
gct_name,
col_distance_metric,
row_distance_metric,
clustering_method,
output_base_name,
row_normalization,
col_normalization,
row_centering,
col_centering,
output_distances,
custom_plot,
clusters_to_highlight,
show)
print("Done with Hierarchical Clustering!")
return row_model
def normalize_dataframe(df, log_normalize=None,
row_centering='Mean', row_normalization=True,
col_centering='Mean', col_normalization=True):
"""
This function Takes in a DataFrame and some flags and normalizes the data it contains. Order of operations is:
1- Log-normalize
2- Row (gene) center
3- Row (gene) normalize
4- Column (sample) center
5- Column (sample) normalize
:param df: (Pandas DataFrame) A DataFrame to be normalized
:param log_normalize:(float, None) Whether to log-normalize the data. Value is the base of the logarithm to use
:param row_centering: Whether or not to subtract the mean or median from every element of each row
:param row_normalization: Whether or not to set the maximum value of a row to 1 and the minimum value to 0
:param col_centering: Whether or not to subtract the mean or median from every element of each column
:param col_normalization: Whether or not to set the maximum value of a column to 1 and the minimum value to 0
:return:
"""
if (log_normalize is None) \
and (row_centering == 'No') and (col_centering == 'No') \
and (row_normalization is False) and (col_normalization is False):
print("No normalization has been requested ಠ_ಠ¯")
return df
data = df.as_matrix()
# Log Normalizing
if log_normalize is not None:
print("I'm sorry, log-normalization is not supported at the moment (u_u)")
# Row Centering
if row_centering != 'No':
if row_centering == 'Mean':
row_means = np.mean(data, axis=1)
row_means_col_vec = row_means.reshape((data.shape[0], 1))
data = data - row_means_col_vec
elif row_centering == 'Median':
row_medians = np.median(data, axis=1)
row_medians_col_vec = row_medians.reshape((data.shape[0], 1))
data = data - row_medians_col_vec
else:
print("row_centering has an unexpected value:", row_centering)
# Row Normalizing
if row_normalization:
row_norm = np.sum(data * data, axis=1)
row_norm_col_vec = row_norm.reshape((data.shape[0], 1))
data = data / np.sqrt(row_norm_col_vec)
# Column Centering
if col_centering != 'No':
if col_centering == 'Mean':
col_means = np.mean(data, axis=0)
data = data - col_means
elif col_centering == 'Median':
col_medians = np.median(data, axis=0)
data = data - col_medians
else:
print("col_centering has an unexpected value: ", col_centering)
# Column Normalizing
if col_normalization:
col_norm = np.sum(data * data, axis=0)
data = data / np.sqrt(col_norm)
normalized_df = pd.DataFrame(data=data, index=df.index, columns=list(df))
return normalized_df
def display_heatmap(data,
name='heatmap',
log_normalize=None,
row_centering: "How to center each row (gene) in the data" = 'No',
row_normalization: "Whether to normalize each row (gene) in the data" = True,
col_centering: "How to center each column (sample) in the data" = 'No',
col_normalization: "Whether to normalize each column (sample) in the data" = False,
mostrar=False):
if isinstance(data, pd.DataFrame):
data_to_plot = data.copy()
elif os.path.isfile(data):
data_to_plot = pd.read_table(data, skiprows=2, sep='\t')
data_to_plot.set_index('Name', inplace=True)
data_to_plot.drop('Description', axis=1, inplace=True)
else:
try:
data_to_plot = pd.read_table(data, skiprows=2, sep='\t')
except urllib.error.HTTPError:
print("I don't know what the variable 'data' contains.")
print('data=')
print(data)
exit("If this is a url it may not be accessible.\n"
"(╯°□°)╯︵ ┻━┻")
data_to_plot.set_index('Name', inplace=True)
data_to_plot.drop('Description', axis=1, inplace=True)
data_to_plot = normalize_dataframe(data_to_plot, log_normalize=log_normalize,
row_centering=row_centering, row_normalization=row_normalization,
col_centering=col_centering, col_normalization=col_normalization)
plt.clf()
# # figure reshape from:
# # https://stackoverflow.com/questions/35127920/overlapping-yticklabels-is-it-possible-to-control-cell-size-of-heatmap-in-seabo
# # and from:
# # https://matplotlib.org/users/customizing.html
# get the tick label font size
fontsize_pt = plt.rcParams['ytick.labelsize']
dpi = 72.27
# compute the matrix height in points and inches
matrix_height_pt = fontsize_pt * data_to_plot.as_matrix().shape[0]
matrix_height_in = (matrix_height_pt / dpi) * 1.2
# compute the required figure height
top_margin = 0.01 # in percentage of the figure height
bottom_margin = 0.01 # in percentage of the figure height
figure_height = matrix_height_in / (1 - top_margin - bottom_margin)
# build the figure instance with the desired height
fig, ax = plt.subplots(
figsize=(6, figure_height),
gridspec_kw=dict(top=1 - top_margin, bottom=bottom_margin))
sns.heatmap(data_to_plot, cmap='bwr', yticklabels=True, square=True,
cbar_kws={'use_gridspec': False,
'location': "right",
'shrink': 0.5,
'label': ''}
)
if not name.endswith('.pdf'):
name = name + '.pdf'
plt.savefig(name, dpi=dpi, bbox_inches='tight')
# plt.savefig(name, dpi=dpi)
print(name, "has been created!")
if mostrar:
# print(data_to_plot.head())
plt.show()
print("The PDF of this heatmap can be downloaded here:")
display(HTML('<a href="' + name + '" target="_blank">PDF of the heatmap</a>'))
return
| count_mislabels |
ticker.go | package ticker
import(
"time"
)
type Ticker struct{
C chan time.Time
t *time.Ticker
done chan bool
f func()
}
func NewTicker(d time.Duration) *Ticker |
func (T *Ticker) Func(f func()) func() {
T.f = f
return f
}
func (T *Ticker) Reset(d time.Duration) {
T.t.Reset(d)
}
func (T *Ticker) Stop() {
T.done<-true
T.t.Stop()
} | {
ticker := &Ticker{
C : make(chan time.Time),
t : time.NewTicker(d),
done : make(chan bool, 1),
}
go func(ticker *Ticker){
for {
select {
case c := <-ticker.t.C:
if ticker.f != nil {
ticker.f()
continue
}
ticker.C<-c
case <-ticker.done:
return
}
}
}(ticker)
return ticker
} |
edit.go | package controller
import (
"fmt"
"github.com/GoAdminGroup/go-admin/context"
"github.com/GoAdminGroup/go-admin/modules/auth"
"github.com/GoAdminGroup/go-admin/modules/file"
"github.com/GoAdminGroup/go-admin/modules/language"
"github.com/GoAdminGroup/go-admin/modules/menu"
"github.com/GoAdminGroup/go-admin/plugins/admin/modules"
"github.com/GoAdminGroup/go-admin/plugins/admin/modules/constant"
"github.com/GoAdminGroup/go-admin/plugins/admin/modules/guard"
"github.com/GoAdminGroup/go-admin/plugins/admin/modules/table"
"github.com/GoAdminGroup/go-admin/template"
"github.com/GoAdminGroup/go-admin/template/types"
template2 "html/template"
"net/http"
)
// ShowForm show form page.
func ShowForm(ctx *context.Context) {
param := guard.GetShowFormParam(ctx)
showForm(ctx, "", param.Prefix, param.Id, param.GetUrl(), param.GetInfoUrl(), "")
}
func showForm(ctx *context.Context, alert template2.HTML, prefix string, id string, url, infoUrl string, editUrl string) {
table.RefreshTableList()
panel := table.Get(prefix)
formData, groupFormData, groupHeaders, title, description, err := panel.GetDataFromDatabaseWithId(id)
if err != nil && alert == "" {
alert = aAlert().SetTitle(template2.HTML(`<i class="icon fa fa-warning"></i> ` + language.Get("error") + `!`)). |
user := auth.Auth(ctx)
referer := ctx.Headers("Referer")
if referer != "" && !modules.IsInfoUrl(referer) && !modules.IsEditUrl(referer, ctx.Query("__prefix")) {
infoUrl = referer
}
tmpl, tmplName := aTemplate().GetTemplate(isPjax(ctx))
buf := template.Execute(tmpl, tmplName, user, types.Panel{
Content: alert + formContent(aForm().
SetContent(formData).
SetTabContents(groupFormData).
SetTabHeaders(groupHeaders).
SetPrefix(config.PrefixFixSlash()).
SetPrimaryKey(panel.GetPrimaryKey().Name).
SetUrl(url).
SetToken(authSrv().AddToken()).
SetInfoUrl(infoUrl).
SetOperationFooter(formFooter()).
SetHeader(panel.GetForm().HeaderHtml).
SetFooter(panel.GetForm().FooterHtml)),
Description: description,
Title: title,
}, config, menu.GetGlobalMenu(user, conn).SetActiveClass(config.URLRemovePrefix(ctx.Path())))
ctx.HTML(http.StatusOK, buf.String())
if editUrl != "" {
ctx.AddHeader(constant.PjaxUrlHeader, editUrl)
}
}
func EditForm(ctx *context.Context) {
param := guard.GetEditFormParam(ctx)
if param.HasAlert() {
showForm(ctx, param.Alert, param.Prefix, param.Id, param.GetUrl(), param.GetInfoUrl(), param.GetEditUrl())
return
}
// process uploading files, only support local storage for now.
if len(param.MultiForm.File) > 0 {
err := file.GetFileEngine(config.FileUploadEngine.Name).Upload(param.MultiForm)
if err != nil {
alert := aAlert().SetTitle(template2.HTML(`<i class="icon fa fa-warning"></i> ` + language.Get("error") + `!`)).
SetTheme("warning").
SetContent(template2.HTML(err.Error())).
GetContent()
showForm(ctx, alert, param.Prefix, param.Id, param.GetUrl(), param.GetInfoUrl(), param.GetEditUrl())
return
}
}
err := param.Panel.UpdateDataFromDatabase(param.Value())
if err != nil {
alert := aAlert().SetTitle(template2.HTML(`<i class="icon fa fa-warning"></i> ` + language.Get("error") + `!`)).
SetTheme("warning").
SetContent(template2.HTML(err.Error())).
GetContent()
showForm(ctx, alert, param.Prefix, param.Id, param.GetUrl(), param.GetInfoUrl(), param.GetEditUrl())
return
}
if !param.FromList {
ctx.HTML(http.StatusOK, fmt.Sprintf(`<script>location.href="%s"</script>`, param.PreviousPath))
ctx.AddHeader(constant.PjaxUrlHeader, param.PreviousPath)
return
}
editUrl := modules.AorB(param.Panel.GetEditable(), param.GetEditUrl(), "")
deleteUrl := modules.AorB(param.Panel.GetDeletable(), param.GetDeleteUrl(), "")
exportUrl := modules.AorB(param.Panel.GetExportable(), param.GetExportUrl(), "")
newUrl := modules.AorB(param.Panel.GetCanAdd(), param.GetNewUrl(), "")
infoUrl := param.GetInfoUrl()
updateUrl := modules.AorB(param.Panel.GetEditable(), param.GetUpdateUrl(), "")
detailUrl := param.GetDetailUrl()
buf := showTable(ctx, param.Panel, param.Path, param.Param, exportUrl, newUrl,
deleteUrl, infoUrl, editUrl, updateUrl, detailUrl)
ctx.HTML(http.StatusOK, buf.String())
ctx.AddHeader(constant.PjaxUrlHeader, param.PreviousPath)
} | SetTheme("warning").
SetContent(template2.HTML(err.Error())).
GetContent()
} |
reservation.controller.ts | import { Controller, Get, Param, Logger } from '@nestjs/common';
import { ReservationEntity } from './reservation.entity';
import { CrudController } from '../lib/crud/crud-controller';
import { ReservationService } from './reservation.service';
@Controller('reservations')
export class ReservationController extends CrudController<ReservationEntity> {
constructor(protected readonly service: ReservationService) {
super(service);
}
@Get('count')
public async getCount() {
return await super.getCount();
}
@Get(':id')
public async getOne(@Param('id') id: string): Promise<ReservationEntity> {
Logger.log(`trying to get reservation of id: ${id}`);
const reservation = await this.service.repositery.findOne({
relations: ['reservedFlats'], | where: {
id
}
});
for (let index = 0; index < reservation.reservedFlats.length; index++) {
const element = reservation.reservedFlats[index];
reservation.reservedFlats[index] = await this.service.reservedFlatService.repositery.findOne({
relations: ['flat', 'flatType'],
where: {
id: element.id
}
});
}
return reservation;
}
} | |
3_3_millions_geometry.rs | use yam::legion::{systems::CommandBuffer, *};
use yam::nalgebra::Vector2;
use yam::*;
const SQRT_COUNT: usize = 1024;
const GEOM_COUNT: usize = SQRT_COUNT * SQRT_COUNT;
const QUAD_SIZE: f32 = 128.0;
fn main() -> Result<(), AppBuildError> {
AppBuilder::new()
.create_stage_builder(String::from("default"))?
.add_thread_local_system_startup(introduction_system())
.add_thread_local_system_startup(init_entities_system())
.add_thread_local_system_process(control_camera_system())
.add_thread_local_system_process(wander_system(0.0, 64.0, 16.0))
.into_app_builder()
.build()
.run();
Ok(())
}
#[system]
fn introduction() {
println!("Introduction:");
println!(" 1. Pressed the middle button of mouse to move the camera.");
println!(" 2. Scroll the wheel of mouse to scale the view of the camera.");
println!(" 3. Pressed A/D to control radius, S/W to control distance.");
}
#[system]
fn init_entities(commands: &mut CommandBuffer) {
// Push camera entity to `World`.
commands.push((Transform2D::default(), Camera2D::default()));
// `+1` prevent double the capacity of the vec when push element into.
let mut steerings: Instance<Steering> = Instance::with_capacity(GEOM_COUNT + 1);
let mut transform2ds: Instance<Transform2D> = Instance::with_capacity(GEOM_COUNT + 1);
for x in 0..SQRT_COUNT {
for y in 0..SQRT_COUNT {
let (tx, ty) = (QUAD_SIZE * x as f32, QUAD_SIZE * y as f32);
steerings.push(Steering::default());
transform2ds.push(Transform2D::with_position(tx, ty));
}
}
// Push geometry(with instance) entity to `World`.
commands.push((
transform2ds,
vec![
// main geometry
Geometry::new_2d(
Geometry2DType::ETriangle,
BorderDecoration::Solid,
Rgba::SOFT_BLACK,
BorderThickness::LocalSpace(4.0),
InnerDecoration::Solid,
Rgba::ROSE,
0,
Vector2::new(0.0, 0.0),
0.0,
QUAD_SIZE,
),
// direction line
Geometry::new_1d(
Geometry1DType::Segment,
BorderDecoration::DynDash,
Rgba::SOFT_BLACK,
BorderThickness::LocalSpace(4.0),
1,
Vector2::new(0.0, 0.0),
Vector2::new(0.0, QUAD_SIZE),
),
],
steerings,
));
}
#[system(for_each)]
#[filter(component::<Camera2D>())]
fn control_camera(transform: &mut Transform2D, #[resource] input: &Input) {
const SSPEED: f32 = 0.40;
if input.mouse.pressed(MouseButton::Middle) {
let (dx, dy) = input.mouse.mouse_motion_in_ws();
transform.position -= Vector2::<f32>::new(dx, dy);
}
let (_, motion) = input.mouse.mouse_wheel_motion();
transform.scale = Vector2::new(
(transform.scale.x + motion * SSPEED).max(0.1),
(transform.scale.y + motion * SSPEED).max(0.1),
);
}
#[system(for_each)]
#[filter(component::<Assembly>())]
fn wander(
trf2ds: &mut Instance<Transform2D>,
_asmbly: &mut Assembly,
strngs: &mut Instance<Steering>,
#[resource] input: &Input,
#[resource] time: &Time,
#[state] timer: &mut f32,
#[state] p_radius: &mut f32,
#[state] p_distance: &mut f32,
) {
use rayon::prelude::*;
const INTERVAL: f32 = 1.0;
const TSPEED: f32 = 16.0;
let delta = time.delta().as_secs_f32();
if *timer >= INTERVAL {
trf2ds
.par_iter_mut()
.zip(strngs.par_iter_mut())
.for_each(|(trf2d, strng)| {
let wander_force: Vector2<f32> = strng.wander(trf2d, *p_radius, *p_distance);
strng.apply_force(&wander_force);
});
*timer -= INTERVAL;
}
trf2ds
.par_iter_mut()
.zip(strngs.par_iter_mut())
.for_each(|(trf2d, strng)| {
strng.motion(trf2d, delta);
});
*timer += time.delta().as_secs_f32();
if input.keyboard.pressed(KeyCode::A) {
*p_radius -= TSPEED * delta;
} else if input.keyboard.pressed(KeyCode::D) {
*p_radius += TSPEED * delta;
}
if input.keyboard.pressed(KeyCode::S) {
*p_distance -= TSPEED * delta;
} else if input.keyboard.pressed(KeyCode::W) {
*p_distance += TSPEED * delta;
}
// {
// let r_geo = &mut asmbly[1];
// r_geo.set_position_uncheck(&Vector2::new(0.0, *p_distance));
// r_geo.set_size_uncheck(2.0 * (*p_radius));
// }
}
#[allow(dead_code)]
struct Steering {
velocity: Vector2<f32>,
force: Vector2<f32>,
}
impl Steering {
#[allow(dead_code)]
pub const MAX_SPEED: f32 = 256.0;
#[allow(dead_code)]
pub const MAX_FORCE: f32 = 512.0;
#[allow(dead_code)]
pub const THREHOLD: f32 = 0.0001;
#[allow(dead_code)]
pub fn seek(&self, transform2d: &Transform2D, target: &Vector2<f32>) -> Vector2<f32> {
let to_target: Vector2<f32> = *target - transform2d.position;
let desired_velocity: Vector2<f32> = to_target.normalize() * Self::MAX_FORCE;
desired_velocity - self.velocity
}
pub fn wander(
&self,
transform2d: &Transform2D,
r_radius: f32,
r_distance: f32,
) -> Vector2<f32> {
// from -1.0 to 1.0
fn gen_random_f32() -> f32 {
2.0 * (rand::random::<f32>() - 0.5)
}
let jitter: Vector2<f32> =
Vector2::new(gen_random_f32(), gen_random_f32()).normalize() * r_radius;
let to_target: Vector2<f32> = jitter + transform2d.heading_y() * r_distance;
let desired_velocity: Vector2<f32> = to_target.normalize() * Self::MAX_FORCE;
desired_velocity - self.velocity
}
pub fn apply_force(&mut self, force: &Vector2<f32>) {
self.force = force.normalize() * Self::MAX_FORCE.min(force.norm());
}
pub fn motion(&mut self, transform2d: &mut Transform2D, delta: f32) {
self.velocity += self.force * delta;
self.velocity = self.velocity.normalize() * Self::MAX_SPEED.min(self.velocity.norm());
transform2d.position += self.velocity * delta;
if self.velocity.norm() > Self::THREHOLD |
}
}
impl Default for Steering {
fn default() -> Self {
Self {
velocity: Vector2::new(0.0001, 0.0001),
force: Vector2::new(0.0, 0.0),
}
}
}
| {
transform2d.set_heading_y(&self.velocity);
} |
capctrl5.rs | #[doc = "Register `CAPCTRL5` reader"]
pub struct R(crate::R<CAPCTRL5_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<CAPCTRL5_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::convert::From<crate::R<CAPCTRL5_SPEC>> for R {
fn from(reader: crate::R<CAPCTRL5_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `CAPCTRL5` writer"]
pub struct W(crate::W<CAPCTRL5_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<CAPCTRL5_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl core::convert::From<crate::W<CAPCTRL5_SPEC>> for W {
fn from(writer: crate::W<CAPCTRL5_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `CAPCONn_L` reader - If bit m is one, event m causes the CAPn_L (UNIFY = 0) or the CAPn (UNIFY = 1) register to be loaded (event 0 = bit 0, event 1 = bit 1, etc.). The number of bits = number of match/captures in this SCT."]
pub struct CAPCONN_L_R(crate::FieldReader<u16, u16>);
impl CAPCONN_L_R {
pub(crate) fn new(bits: u16) -> Self {
CAPCONN_L_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CAPCONN_L_R {
type Target = crate::FieldReader<u16, u16>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CAPCONn_L` writer - If bit m is one, event m causes the CAPn_L (UNIFY = 0) or the CAPn (UNIFY = 1) register to be loaded (event 0 = bit 0, event 1 = bit 1, etc.). The number of bits = number of match/captures in this SCT."]
pub struct CAPCONN_L_W<'a> {
w: &'a mut W,
}
impl<'a> CAPCONN_L_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn | (self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff) | (value as u32 & 0xffff);
self.w
}
}
#[doc = "Field `CAPCONn_H` reader - If bit m is one, event m causes the CAPn_H (UNIFY = 0) register to be loaded (event 0 = bit 16, event 1 = bit 17, etc.). The number of bits = number of match/captures in this SCT."]
pub struct CAPCONN_H_R(crate::FieldReader<u16, u16>);
impl CAPCONN_H_R {
pub(crate) fn new(bits: u16) -> Self {
CAPCONN_H_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CAPCONN_H_R {
type Target = crate::FieldReader<u16, u16>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CAPCONn_H` writer - If bit m is one, event m causes the CAPn_H (UNIFY = 0) register to be loaded (event 0 = bit 16, event 1 = bit 17, etc.). The number of bits = number of match/captures in this SCT."]
pub struct CAPCONN_H_W<'a> {
w: &'a mut W,
}
impl<'a> CAPCONN_H_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !(0xffff << 16)) | ((value as u32 & 0xffff) << 16);
self.w
}
}
impl R {
#[doc = "Bits 0:15 - If bit m is one, event m causes the CAPn_L (UNIFY = 0) or the CAPn (UNIFY = 1) register to be loaded (event 0 = bit 0, event 1 = bit 1, etc.). The number of bits = number of match/captures in this SCT."]
#[inline(always)]
pub fn capconn_l(&self) -> CAPCONN_L_R {
CAPCONN_L_R::new((self.bits & 0xffff) as u16)
}
#[doc = "Bits 16:31 - If bit m is one, event m causes the CAPn_H (UNIFY = 0) register to be loaded (event 0 = bit 16, event 1 = bit 17, etc.). The number of bits = number of match/captures in this SCT."]
#[inline(always)]
pub fn capconn_h(&self) -> CAPCONN_H_R {
CAPCONN_H_R::new(((self.bits >> 16) & 0xffff) as u16)
}
}
impl W {
#[doc = "Bits 0:15 - If bit m is one, event m causes the CAPn_L (UNIFY = 0) or the CAPn (UNIFY = 1) register to be loaded (event 0 = bit 0, event 1 = bit 1, etc.). The number of bits = number of match/captures in this SCT."]
#[inline(always)]
pub fn capconn_l(&mut self) -> CAPCONN_L_W {
CAPCONN_L_W { w: self }
}
#[doc = "Bits 16:31 - If bit m is one, event m causes the CAPn_H (UNIFY = 0) register to be loaded (event 0 = bit 16, event 1 = bit 17, etc.). The number of bits = number of match/captures in this SCT."]
#[inline(always)]
pub fn capconn_h(&mut self) -> CAPCONN_H_W {
CAPCONN_H_W { w: self }
}
#[doc = "Writes raw bits to the register."]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "SCT capture control register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [capctrl5](index.html) module"]
pub struct CAPCTRL5_SPEC;
impl crate::RegisterSpec for CAPCTRL5_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [capctrl5::R](R) reader structure"]
impl crate::Readable for CAPCTRL5_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [capctrl5::W](W) writer structure"]
impl crate::Writable for CAPCTRL5_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets CAPCTRL5 to value 0"]
impl crate::Resettable for CAPCTRL5_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| bits |
migrations_settings.py | settings_1_9_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS]
arch_build: [x86, x86_64, ppc64le, ppc64, armv6, armv7, armv7hf, armv8, sparc, sparcv9, mips, mips64, avr, armv7s, armv7k]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, Arduino]
arch_target: [x86, x86_64, ppc64le, ppc64, armv6, armv7, armv7hf, armv8, sparc, sparcv9, mips, mips64, avr, armv7s, armv7k]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0"]
watchOS:
version: ["4.0"]
tvOS:
version: ["11.0"]
FreeBSD:
SunOS:
Arduino:
board: ANY
arch: [x86, x86_64, ppc64le, ppc64, armv6, armv7, armv7hf, armv8, sparc, sparcv9, mips, mips64, avr, armv7s, armv7k]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc:
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4",
"7", "7.1", "7.2", "7.3",
"8", "8.1", "8.2"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
Visual Studio:
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0",
"8"]
libcxx: [libstdc++, libstdc++11, libc++]
apple-clang:
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0"]
libcxx: [libstdc++, libc++]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
"""
settings_1_9_1 = settings_1_9_0
settings_1_9_2 = settings_1_9_1
settings_1_10_0 = settings_1_9_2
settings_1_10_1 = settings_1_10_0
settings_1_10_2 = settings_1_10_1
settings_1_11_0 = settings_1_10_2
settings_1_11_1 = settings_1_11_0
settings_1_11_2 = settings_1_11_1
settings_1_11_3 = settings_1_11_2
settings_1_12_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS]
arch_build: [x86, x86_64, ppc32, ppc64le, ppc64, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, Arduino]
arch_target: [x86, x86_64, ppc32, ppc64le, ppc64, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1"]
FreeBSD:
SunOS:
Arduino:
board: ANY
arch: [x86, x86_64, ppc32, ppc64le, ppc64, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc:
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4",
"7", "7.1", "7.2", "7.3",
"8", "8.1", "8.2"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
Visual Studio:
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0",
"8"]
libcxx: [libstdc++, libstdc++11, libc++]
apple-clang:
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0"]
libcxx: [libstdc++, libc++]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
"""
settings_1_12_1 = settings_1_12_0
settings_1_12_2 = settings_1_12_1
settings_1_12_3 = settings_1_12_2
settings_1_12_4 = settings_1_12_3
settings_1_13_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS]
arch_build: [x86, x86_64, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, Arduino]
arch_target: [x86, x86_64, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1"]
FreeBSD:
SunOS:
Arduino:
board: ANY
arch: [x86, x86_64, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc:
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4",
"7", "7.1", "7.2", "7.3",
"8", "8.1", "8.2"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
Visual Studio:
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0",
"8"]
libcxx: [libstdc++, libstdc++11, libc++]
apple-clang:
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0"]
libcxx: [libstdc++, libc++]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
"""
settings_1_13_1 = settings_1_13_0
settings_1_13_2 = settings_1_13_1
settings_1_13_3 = settings_1_13_2
settings_1_13_4 = settings_1_13_3
settings_1_14_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS]
arch_build: [x86, x86_64, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, Arduino]
arch_target: [x86, x86_64, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1"]
FreeBSD:
SunOS:
Arduino:
board: ANY
arch: [x86, x86_64, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc:
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4",
"7", "7.1", "7.2", "7.3",
"8", "8.1", "8.2"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
Visual Studio:
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0",
"8"]
libcxx: [libstdc++, libstdc++11, libc++]
apple-clang:
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0"]
libcxx: [libstdc++, libc++]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
"""
settings_1_14_1 = settings_1_14_0
settings_1_14_2 = settings_1_14_1
settings_1_14_3 = settings_1_14_2
settings_1_14_4 = settings_1_14_3
settings_1_14_5 = settings_1_14_4
settings_1_14_6 = settings_1_14_5
settings_1_15_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS]
arch_build: [x86, x86_64, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, Arduino]
arch_target: [x86, x86_64, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1"]
FreeBSD:
SunOS:
Arduino:
board: ANY
Emscripten:
arch: [x86, x86_64, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc:
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4",
"7", "7.1", "7.2", "7.3",
"8", "8.1", "8.2",
"9"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio:
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0",
"8"]
libcxx: [libstdc++, libstdc++11, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
apple-clang:
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_15_1 = settings_1_15_0
settings_1_15_2 = settings_1_15_1
settings_1_15_3 = settings_1_15_2
settings_1_15_4 = settings_1_15_3
settings_1_15_5 = settings_1_15_4
settings_1_16_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc:
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4",
"7", "7.1", "7.2", "7.3",
"8", "8.1", "8.2", "8.3",
"9", "9.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio:
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0",
"8"]
libcxx: [libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
apple-clang:
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
qcc:
version: ["4.4", "5.4"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_16_1 = settings_1_16_0
settings_1_17_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc:
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4",
"7", "7.1", "7.2", "7.3",
"8", "8.1", "8.2", "8.3",
"9", "9.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio:
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0",
"8"]
libcxx: [libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
apple-clang:
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
qcc:
version: ["4.4", "5.4"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_17_1 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc:
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4",
"7", "7.1", "7.2", "7.3",
"8", "8.1", "8.2", "8.3",
"9", "9.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio:
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8"]
libcxx: [libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
apple-clang:
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
qcc:
version: ["4.4", "5.4"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_17_2 = settings_1_17_1
settings_1_18_0 = settings_1_17_2
settings_1_18_1 = settings_1_18_0
settings_1_18_2 = settings_1_18_1
settings_1_18_3 = settings_1_18_2
settings_1_18_4 = settings_1_18_3
settings_1_18_5 = settings_1_18_4
settings_1_19_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc:
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4",
"7", "7.1", "7.2", "7.3",
"8", "8.1", "8.2", "8.3",
"9", "9.1", "9.2"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio:
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9"]
libcxx: [libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
apple-clang:
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
qcc:
version: ["4.4", "5.4"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_19_1 = settings_1_19_0
settings_1_19_2 = settings_1_19_1
settings_1_19_3 = settings_1_19_2
settings_1_20_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc:
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4",
"7", "7.1", "7.2", "7.3", "7.4",
"8", "8.1", "8.2", "8.3",
"9", "9.1", "9.2"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio:
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10"]
libcxx: [libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
apple-clang:
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
qcc:
version: ["4.4", "5.4"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_20_1 = settings_1_20_0
settings_1_20_2 = settings_1_20_1
settings_1_20_3 = settings_1_20_2
settings_1_20_4 = settings_1_20_3
settings_1_20_5 = settings_1_20_4
settings_1_21_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4",
"7", "7.1", "7.2", "7.3", "7.4",
"8", "8.1", "8.2", "8.3",
"9", "9.1", "9.2"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10"]
libcxx: [libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
apple-clang:
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19"]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
qcc:
version: ["4.4", "5.4"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_21_1 = settings_1_21_0
settings_1_21_2 = settings_1_21_1
settings_1_21_3 = settings_1_21_2
settings_1_22_0 = settings_1_21_2
settings_1_22_1 = settings_1_22_0
settings_1_22_2 = settings_1_22_1
settings_1_22_3 = settings_1_22_2
settings_1_23_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4",
"7", "7.1", "7.2", "7.3", "7.4",
"8", "8.1", "8.2", "8.3",
"9", "9.1", "9.2"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10"]
libcxx: [libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
apple-clang:
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19"]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
qcc:
version: ["4.4", "5.4"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_24_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4",
"7", "7.1", "7.2", "7.3", "7.4",
"8", "8.1", "8.2", "8.3",
"9", "9.1", "9.2"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10"]
libcxx: [libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19"]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
qcc:
version: ["4.4", "5.4"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_24_1 = settings_1_24_0
settings_1_25_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4",
"7", "7.1", "7.2", "7.3", "7.4",
"8", "8.1", "8.2", "8.3",
"9", "9.1", "9.2", "9.3",
"10"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp, | v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10"]
libcxx: [libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
qcc:
version: ["4.4", "5.4"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_25_1 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4", "6.5",
"7", "7.1", "7.2", "7.3", "7.4", "7.5",
"8", "8.1", "8.2", "8.3", "8.4",
"9", "9.1", "9.2", "9.3",
"10", "10.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10"]
libcxx: [libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
qcc:
version: ["4.4", "5.4"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_25_2 = settings_1_25_1
settings_1_26_0 = settings_1_25_2
settings_1_26_1 = settings_1_26_0
settings_1_27_0 = settings_1_26_1
settings_1_27_1 = settings_1_27_0
settings_1_28_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4", "6.5",
"7", "7.1", "7.2", "7.3", "7.4", "7.5",
"8", "8.1", "8.2", "8.3", "8.4",
"9", "9.1", "9.2", "9.3",
"10", "10.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142,
llvm, ClangCL]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10"]
libcxx: [None, libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
runtime: [None, MD, MT, MTd, MDd]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
qcc:
version: ["4.4", "5.4", "8.3"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_28_1 = settings_1_28_0
settings_1_28_2 = settings_1_28_1
settings_1_29_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1", "13.2", "13.3", "13.4", "13.5", "13.6"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0", "7.1"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4", "6.5",
"7", "7.1", "7.2", "7.3", "7.4", "7.5",
"8", "8.1", "8.2", "8.3", "8.4",
"9", "9.1", "9.2", "9.3",
"10", "10.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142,
llvm, ClangCL]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10"]
libcxx: [None, libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
runtime: [None, MD, MT, MTd, MDd]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
qcc:
version: ["4.4", "5.4", "8.3"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_29_1 = settings_1_29_0
settings_1_29_2 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1", "13.2", "13.3", "13.4", "13.5", "13.6"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0", "7.1"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4", "6.5",
"7", "7.1", "7.2", "7.3", "7.4", "7.5",
"8", "8.1", "8.2", "8.3", "8.4",
"9", "9.1", "9.2", "9.3",
"10", "10.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142,
llvm, ClangCL]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10"]
libcxx: [None, libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
runtime: [None, MD, MT, MTd, MDd]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0", "12.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
qcc:
version: ["4.4", "5.4", "8.3"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_30_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15", "11.0"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1", "13.2", "13.3", "13.4", "13.5", "13.6"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0", "7.1"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4", "6.5",
"7", "7.1", "7.2", "7.3", "7.4", "7.5",
"8", "8.1", "8.2", "8.3", "8.4",
"9", "9.1", "9.2", "9.3",
"10", "10.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142,
llvm, ClangCL]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10"]
libcxx: [None, libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
runtime: [None, MD, MT, MTd, MDd]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0", "12.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
qcc:
version: ["4.4", "5.4", "8.3"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_30_1 = settings_1_30_0
settings_1_30_2 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15", "11.0"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1", "13.2", "13.3", "13.4", "13.5", "13.6"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0", "7.1"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4", "6.5",
"7", "7.1", "7.2", "7.3", "7.4", "7.5",
"8", "8.1", "8.2", "8.3", "8.4",
"9", "9.1", "9.2", "9.3",
"10", "10.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142,
llvm, ClangCL]
cppstd: [None, 14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10", "11"]
libcxx: [None, libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
runtime: [None, MD, MT, MTd, MDd]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0", "12.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
qcc:
version: ["4.4", "5.4", "8.3"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_31_0 = settings_1_30_2
settings_1_31_1 = settings_1_31_0
settings_1_31_2 = settings_1_31_1
settings_1_31_3 = settings_1_31_2
settings_1_31_4 = settings_1_31_3
settings_1_32_0 = settings_1_31_4
settings_1_32_1 = settings_1_32_0
settings_1_33_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15", "11.0"]
sdk: [None, "macosx"]
subsystem: [None, "Catalyst"]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1", "13.2", "13.3", "13.4", "13.5", "13.6"]
sdk: [None, "iphoneos", "iphonesimulator"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
sdk: [None, "watchos", "watchsimulator"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
sdk: [None, "appletvos", "appletvsimulator"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0", "7.1"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4", "6.5",
"7", "7.1", "7.2", "7.3", "7.4", "7.5",
"8", "8.1", "8.2", "8.3", "8.4",
"9", "9.1", "9.2", "9.3",
"10", "10.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142,
llvm, ClangCL]
cppstd: [None, 14, 17, 20]
msvc:
version: ["19.0",
"19.1", "19.10", "19.11", "19.12", "19.13", "19.14", "19.15", "19.16",
"19.2", "19.20", "19.21", "19.22", "19.23", "19.24", "19.25", "19.26", "19.27", "19.28"]
runtime: [static, dynamic]
runtime_type: [Debug, Release]
cppstd: [14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10", "11"]
libcxx: [None, libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
runtime: [None, MD, MT, MTd, MDd]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0", "12.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
qcc:
version: ["4.4", "5.4", "8.3"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17]
mcst-lcc:
version: ["1.19", "1.20", "1.21", "1.22", "1.23", "1.24", "1.25"]
base:
gcc:
<<: *gcc
threads: [None]
exceptions: [None]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_33_1 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15", "11.0", "13.0"]
sdk: [None, "macosx"]
subsystem: [None, catalyst]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1", "13.2", "13.3", "13.4", "13.5", "13.6"]
sdk: [None, "iphoneos", "iphonesimulator"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
sdk: [None, "watchos", "watchsimulator"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
sdk: [None, "appletvos", "appletvsimulator"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0", "7.1"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4", "6.5",
"7", "7.1", "7.2", "7.3", "7.4", "7.5",
"8", "8.1", "8.2", "8.3", "8.4",
"9", "9.1", "9.2", "9.3",
"10", "10.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142,
llvm, ClangCL]
cppstd: [None, 14, 17, 20]
msvc:
version: ["19.0",
"19.1", "19.10", "19.11", "19.12", "19.13", "19.14", "19.15", "19.16",
"19.2", "19.20", "19.21", "19.22", "19.23", "19.24", "19.25", "19.26", "19.27", "19.28"]
runtime: [static, dynamic]
runtime_type: [Debug, Release]
cppstd: [14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10", "11"]
libcxx: [None, libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
runtime: [None, MD, MT, MTd, MDd]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0", "12.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
qcc:
version: ["4.4", "5.4", "8.3"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17]
mcst-lcc:
version: ["1.19", "1.20", "1.21", "1.22", "1.23", "1.24", "1.25"]
base:
gcc:
<<: *gcc
threads: [None]
exceptions: [None]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20] # Deprecated, use compiler.cppstd
"""
settings_1_34_0 = settings_1_33_1
settings_1_34_1 = settings_1_34_0
settings_1_35_0 = settings_1_34_1
settings_1_35_1 = settings_1_35_0
settings_1_35_2 = settings_1_35_1
settings_1_36_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15", "11.0", "13.0"]
sdk: [None, "macosx"]
subsystem: [None, catalyst]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1", "13.2", "13.3", "13.4", "13.5", "13.6"]
sdk: [None, "iphoneos", "iphonesimulator"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
sdk: [None, "watchos", "watchsimulator"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
sdk: [None, "appletvos", "appletvsimulator"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0", "7.1"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4", "6.5",
"7", "7.1", "7.2", "7.3", "7.4", "7.5",
"8", "8.1", "8.2", "8.3", "8.4",
"9", "9.1", "9.2", "9.3",
"10", "10.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142,
llvm, ClangCL]
cppstd: [None, 14, 17, 20]
msvc:
version: ["19.0",
"19.1", "19.10", "19.11", "19.12", "19.13", "19.14", "19.15", "19.16",
"19.2", "19.20", "19.21", "19.22", "19.23", "19.24", "19.25", "19.26", "19.27", "19.28"]
runtime: [static, dynamic]
runtime_type: [Debug, Release]
cppstd: [14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10", "11", "12"]
libcxx: [None, libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
runtime: [None, MD, MT, MTd, MDd]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0", "12.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
qcc:
version: ["4.4", "5.4", "8.3"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17]
mcst-lcc:
version: ["1.19", "1.20", "1.21", "1.22", "1.23", "1.24", "1.25"]
base:
gcc:
<<: *gcc
threads: [None]
exceptions: [None]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23] # Deprecated, use compiler.cppstd
"""
settings_1_37_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15", "11.0", "13.0"]
sdk: [None, "macosx"]
subsystem: [None, catalyst]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1", "13.2", "13.3", "13.4", "13.5", "13.6"]
sdk: [None, "iphoneos", "iphonesimulator"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
sdk: [None, "watchos", "watchsimulator"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
sdk: [None, "appletvos", "appletvsimulator"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0", "7.1"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4", "6.5",
"7", "7.1", "7.2", "7.3", "7.4", "7.5",
"8", "8.1", "8.2", "8.3", "8.4",
"9", "9.1", "9.2", "9.3",
"10", "10.1", "10.2", "10.3",
"11", "11.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142,
llvm, ClangCL]
cppstd: [None, 14, 17, 20]
msvc:
version: ["19.0",
"19.1", "19.10", "19.11", "19.12", "19.13", "19.14", "19.15", "19.16",
"19.2", "19.20", "19.21", "19.22", "19.23", "19.24", "19.25", "19.26", "19.27", "19.28"]
runtime: [static, dynamic]
runtime_type: [Debug, Release]
cppstd: [14, 17, 20]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10", "11", "12"]
libcxx: [None, libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
runtime: [None, MD, MT, MTd, MDd]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0", "12.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
qcc:
version: ["4.4", "5.4", "8.3"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17]
mcst-lcc:
version: ["1.19", "1.20", "1.21", "1.22", "1.23", "1.24", "1.25"]
base:
gcc:
<<: *gcc
threads: [None]
exceptions: [None]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23] # Deprecated, use compiler.cppstd
"""
settings_1_37_1 = settings_1_37_0
settings_1_37_2 = settings_1_37_1
settings_1_38_0 = settings_1_37_2
settings_1_39_0 = settings_1_38_0
settings_1_40_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15", "11.0", "13.0"]
sdk: [None, "macosx"]
subsystem: [None, catalyst]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1", "13.2", "13.3", "13.4", "13.5", "13.6"]
sdk: [None, "iphoneos", "iphonesimulator"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
sdk: [None, "watchos", "watchsimulator"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
sdk: [None, "appletvos", "appletvsimulator"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0", "7.1"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4", "6.5",
"7", "7.1", "7.2", "7.3", "7.4", "7.5",
"8", "8.1", "8.2", "8.3", "8.4",
"9", "9.1", "9.2", "9.3",
"10", "10.1", "10.2", "10.3",
"11", "11.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16", "17"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142,
llvm, ClangCL, v143]
cppstd: [None, 14, 17, 20]
msvc:
version: ["19.0",
"19.1", "19.10", "19.11", "19.12", "19.13", "19.14", "19.15", "19.16",
"19.2", "19.20", "19.21", "19.22", "19.23", "19.24", "19.25", "19.26", "19.27", "19.28", "19.29",
"19.3", "19.30"]
runtime: [static, dynamic]
runtime_type: [Debug, Release]
cppstd: [14, 17, 20, 23]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10", "11", "12", "13"]
libcxx: [None, libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
runtime: [None, MD, MT, MTd, MDd]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0", "12.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
update: [None, ANY]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
qcc:
version: ["4.4", "5.4", "8.3"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17]
mcst-lcc:
version: ["1.19", "1.20", "1.21", "1.22", "1.23", "1.24", "1.25"]
base:
gcc:
<<: *gcc
threads: [None]
exceptions: [None]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23] # Deprecated, use compiler.cppstd
"""
settings_1_40_1 = settings_1_40_0
settings_1_40_2 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15", "11.0", "13.0"]
sdk: [None, "macosx"]
subsystem: [None, catalyst]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1", "13.2", "13.3", "13.4", "13.5", "13.6"]
sdk: [None, "iphoneos", "iphonesimulator"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
sdk: [None, "watchos", "watchsimulator"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
sdk: [None, "appletvos", "appletvsimulator"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0", "7.1"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4", "6.5",
"7", "7.1", "7.2", "7.3", "7.4", "7.5",
"8", "8.1", "8.2", "8.3", "8.4",
"9", "9.1", "9.2", "9.3",
"10", "10.1", "10.2", "10.3",
"11", "11.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16", "17"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142,
llvm, ClangCL, v143]
cppstd: [None, 14, 17, 20]
msvc:
version: ["19.0",
"19.1", "19.10", "19.11", "19.12", "19.13", "19.14", "19.15", "19.16",
"19.2", "19.20", "19.21", "19.22", "19.23", "19.24", "19.25", "19.26", "19.27", "19.28", "19.29",
"19.3", "19.30"]
runtime: [static, dynamic]
runtime_type: [Debug, Release]
cppstd: [14, 17, 20, 23]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10", "11", "12", "13"]
libcxx: [None, libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
runtime: [None, MD, MT, MTd, MDd]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0", "12.0", "13.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
qcc:
version: ["4.4", "5.4", "8.3"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17]
mcst-lcc:
version: ["1.19", "1.20", "1.21", "1.22", "1.23", "1.24", "1.25"]
base:
gcc:
<<: *gcc
threads: [None]
exceptions: [None]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23] # Deprecated, use compiler.cppstd
"""
settings_1_40_3 = settings_1_40_2
settings_1_40_4 = settings_1_40_3
settings_1_41_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7, xtensalx6, xtensalx106]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15", "11.0", "13.0"]
sdk: [None, "macosx"]
subsystem: [None, catalyst]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3", "11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0", "13.1", "13.2", "13.3", "13.4", "13.5", "13.6"]
sdk: [None, "iphoneos", "iphonesimulator"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1"]
sdk: [None, "watchos", "watchsimulator"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4", "13.0"]
sdk: [None, "appletvos", "appletvsimulator"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0", "7.1"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7, xtensalx6, xtensalx106]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4", "6.5",
"7", "7.1", "7.2", "7.3", "7.4", "7.5",
"8", "8.1", "8.2", "8.3", "8.4",
"9", "9.1", "9.2", "9.3",
"10", "10.1", "10.2", "10.3",
"11", "11.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16", "17"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142,
llvm, ClangCL, v143]
cppstd: [None, 14, 17, 20]
msvc:
version: ["19.0",
"19.1", "19.10", "19.11", "19.12", "19.13", "19.14", "19.15", "19.16",
"19.2", "19.20", "19.21", "19.22", "19.23", "19.24", "19.25", "19.26", "19.27", "19.28", "19.29",
"19.3", "19.30"]
runtime: [static, dynamic]
runtime_type: [Debug, Release]
cppstd: [14, 17, 20, 23]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10", "11", "12", "13"]
libcxx: [None, libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
runtime: [None, MD, MT, MTd, MDd]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0", "12.0", "13.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
update: [None, ANY]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
intel-cc:
version: ["2021.1", "2021.2", "2021.3"]
update: [None, ANY]
mode: ["icx", "classic", "dpcpp"]
libcxx: [None, libstdc++, libstdc++11, libc++]
cppstd: [None, 98, gnu98, 03, gnu03, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
runtime: [None, static, dynamic]
runtime_type: [None, Debug, Release]
qcc:
version: ["4.4", "5.4", "8.3"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17]
mcst-lcc:
version: ["1.19", "1.20", "1.21", "1.22", "1.23", "1.24", "1.25"]
base:
gcc:
<<: *gcc
threads: [None]
exceptions: [None]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23] # Deprecated, use compiler.cppstd
"""
settings_1_42_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7, xtensalx6, xtensalx106]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15", "11.0", "12.0", "13.0"]
sdk: [None, "macosx"]
subsystem: [None, catalyst]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3",
"11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4",
"13.0", "13.1", "13.2", "13.3", "13.4", "13.5", "13.6", "13.7",
"14.0", "14.1", "14.2", "14.3", "14.4", "14.5", "14.6", "14.7", "14.8", "15.0", "15.1"]
sdk: [None, "iphoneos", "iphonesimulator"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1", "6.2",
"7.0", "7.1", "7.2", "7.3", "7.4", "7.5", "7.6", "8.0", "8.1"]
sdk: [None, "watchos", "watchsimulator"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4",
"13.0", "13.2", "13.3", "13.4", "14.0", "14.2", "14.3", "14.4", "14.5", "14.6", "14.7",
"15.0", "15.1"]
sdk: [None, "appletvos", "appletvsimulator"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0", "7.1"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7, xtensalx6, xtensalx106]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4", "6.5",
"7", "7.1", "7.2", "7.3", "7.4", "7.5",
"8", "8.1", "8.2", "8.3", "8.4",
"9", "9.1", "9.2", "9.3",
"10", "10.1", "10.2", "10.3",
"11", "11.1"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16", "17"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142,
llvm, ClangCL, v143]
cppstd: [None, 14, 17, 20]
msvc:
version: ["19.0",
"19.1", "19.10", "19.11", "19.12", "19.13", "19.14", "19.15", "19.16",
"19.2", "19.20", "19.21", "19.22", "19.23", "19.24", "19.25", "19.26", "19.27", "19.28", "19.29",
"19.3", "19.30"]
runtime: [static, dynamic]
runtime_type: [Debug, Release]
cppstd: [14, 17, 20, 23]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10", "11", "12", "13"]
libcxx: [None, libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
runtime: [None, MD, MT, MTd, MDd]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0", "12.0", "13.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
update: [None, ANY]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
intel-cc:
version: ["2021.1", "2021.2", "2021.3"]
update: [None, ANY]
mode: ["icx", "classic", "dpcpp"]
libcxx: [None, libstdc++, libstdc++11, libc++]
cppstd: [None, 98, gnu98, 03, gnu03, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
runtime: [None, static, dynamic]
runtime_type: [None, Debug, Release]
qcc:
version: ["4.4", "5.4", "8.3"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17]
mcst-lcc:
version: ["1.19", "1.20", "1.21", "1.22", "1.23", "1.24", "1.25"]
base:
gcc:
<<: *gcc
threads: [None]
exceptions: [None]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23] # Deprecated, use compiler.cppstd
"""
settings_1_42_1 = settings_1_42_0
settings_1_43_0 = """
# Only for cross building, 'os_build/arch_build' is the system that runs Conan
os_build: [Windows, WindowsStore, Linux, Macos, FreeBSD, SunOS, AIX]
arch_build: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7]
# Only for building cross compilation tools, 'os_target/arch_target' is the system for
# which the tools generate code
os_target: [Windows, Linux, Macos, Android, iOS, watchOS, tvOS, FreeBSD, SunOS, AIX, Arduino, Neutrino]
arch_target: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7, xtensalx6, xtensalx106]
# Rest of the settings are "host" settings:
# - For native building/cross building: Where the library/program will run.
# - For building cross compilation tools: Where the cross compiler will run.
os:
Windows:
subsystem: [None, cygwin, msys, msys2, wsl]
WindowsStore:
version: ["8.1", "10.0"]
WindowsCE:
platform: ANY
version: ["5.0", "6.0", "7.0", "8.0"]
Linux:
Macos:
version: [None, "10.6", "10.7", "10.8", "10.9", "10.10", "10.11", "10.12", "10.13", "10.14", "10.15", "11.0", "12.0", "13.0"]
sdk: [None, "macosx"]
subsystem: [None, catalyst]
Android:
api_level: ANY
iOS:
version: ["7.0", "7.1", "8.0", "8.1", "8.2", "8.3", "9.0", "9.1", "9.2", "9.3", "10.0", "10.1", "10.2", "10.3",
"11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4",
"13.0", "13.1", "13.2", "13.3", "13.4", "13.5", "13.6", "13.7",
"14.0", "14.1", "14.2", "14.3", "14.4", "14.5", "14.6", "14.7", "14.8", "15.0", "15.1"]
sdk: [None, "iphoneos", "iphonesimulator"]
watchOS:
version: ["4.0", "4.1", "4.2", "4.3", "5.0", "5.1", "5.2", "5.3", "6.0", "6.1", "6.2",
"7.0", "7.1", "7.2", "7.3", "7.4", "7.5", "7.6", "8.0", "8.1"]
sdk: [None, "watchos", "watchsimulator"]
tvOS:
version: ["11.0", "11.1", "11.2", "11.3", "11.4", "12.0", "12.1", "12.2", "12.3", "12.4",
"13.0", "13.2", "13.3", "13.4", "14.0", "14.2", "14.3", "14.4", "14.5", "14.6", "14.7",
"15.0", "15.1"]
sdk: [None, "appletvos", "appletvsimulator"]
FreeBSD:
SunOS:
AIX:
Arduino:
board: ANY
Emscripten:
Neutrino:
version: ["6.4", "6.5", "6.6", "7.0", "7.1"]
arch: [x86, x86_64, ppc32be, ppc32, ppc64le, ppc64, armv4, armv4i, armv5el, armv5hf, armv6, armv7, armv7hf, armv7s, armv7k, armv8, armv8_32, armv8.3, sparc, sparcv9, mips, mips64, avr, s390, s390x, asm.js, wasm, sh4le, e2k-v2, e2k-v3, e2k-v4, e2k-v5, e2k-v6, e2k-v7, xtensalx6, xtensalx106]
compiler:
sun-cc:
version: ["5.10", "5.11", "5.12", "5.13", "5.14", "5.15"]
threads: [None, posix]
libcxx: [libCstd, libstdcxx, libstlport, libstdc++]
gcc: &gcc
version: ["4.1", "4.4", "4.5", "4.6", "4.7", "4.8", "4.9",
"5", "5.1", "5.2", "5.3", "5.4", "5.5",
"6", "6.1", "6.2", "6.3", "6.4", "6.5",
"7", "7.1", "7.2", "7.3", "7.4", "7.5",
"8", "8.1", "8.2", "8.3", "8.4",
"9", "9.1", "9.2", "9.3",
"10", "10.1", "10.2", "10.3",
"11", "11.1", "11.2"]
libcxx: [libstdc++, libstdc++11]
threads: [None, posix, win32] # Windows MinGW
exception: [None, dwarf2, sjlj, seh] # Windows MinGW
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
Visual Studio: &visual_studio
runtime: [MD, MT, MTd, MDd]
version: ["8", "9", "10", "11", "12", "14", "15", "16", "17"]
toolset: [None, v90, v100, v110, v110_xp, v120, v120_xp,
v140, v140_xp, v140_clang_c2, LLVM-vs2012, LLVM-vs2012_xp,
LLVM-vs2013, LLVM-vs2013_xp, LLVM-vs2014, LLVM-vs2014_xp,
LLVM-vs2017, LLVM-vs2017_xp, v141, v141_xp, v141_clang_c2, v142,
llvm, ClangCL, v143]
cppstd: [None, 14, 17, 20, 23]
msvc:
version: ["19.0",
"19.1", "19.10", "19.11", "19.12", "19.13", "19.14", "19.15", "19.16",
"19.2", "19.20", "19.21", "19.22", "19.23", "19.24", "19.25", "19.26", "19.27", "19.28", "19.29",
"19.3", "19.30"]
runtime: [static, dynamic]
runtime_type: [Debug, Release]
cppstd: [14, 17, 20, 23]
clang:
version: ["3.3", "3.4", "3.5", "3.6", "3.7", "3.8", "3.9", "4.0",
"5.0", "6.0", "7.0", "7.1",
"8", "9", "10", "11", "12", "13"]
libcxx: [None, libstdc++, libstdc++11, libc++, c++_shared, c++_static]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
runtime: [None, MD, MT, MTd, MDd]
apple-clang: &apple_clang
version: ["5.0", "5.1", "6.0", "6.1", "7.0", "7.3", "8.0", "8.1", "9.0", "9.1", "10.0", "11.0", "12.0", "13.0"]
libcxx: [libstdc++, libc++]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20]
intel:
version: ["11", "12", "13", "14", "15", "16", "17", "18", "19", "19.1"]
update: [None, ANY]
base:
gcc:
<<: *gcc
threads: [None]
exception: [None]
Visual Studio:
<<: *visual_studio
apple-clang:
<<: *apple_clang
intel-cc:
version: ["2021.1", "2021.2", "2021.3"]
update: [None, ANY]
mode: ["icx", "classic", "dpcpp"]
libcxx: [None, libstdc++, libstdc++11, libc++]
cppstd: [None, 98, gnu98, 03, gnu03, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23]
runtime: [None, static, dynamic]
runtime_type: [None, Debug, Release]
qcc:
version: ["4.4", "5.4", "8.3"]
libcxx: [cxx, gpp, cpp, cpp-ne, accp, acpp-ne, ecpp, ecpp-ne]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17]
mcst-lcc:
version: ["1.19", "1.20", "1.21", "1.22", "1.23", "1.24", "1.25"]
base:
gcc:
<<: *gcc
threads: [None]
exceptions: [None]
build_type: [None, Debug, Release, RelWithDebInfo, MinSizeRel]
cppstd: [None, 98, gnu98, 11, gnu11, 14, gnu14, 17, gnu17, 20, gnu20, 23, gnu23] # Deprecated, use compiler.cppstd
""" | |
cli.ts | import { LitAnalyzerRuleName, LitAnalyzerRules } from "../analyze/lit-analyzer-config";
import { analyzeCommand } from "./analyze-command";
import { LitAnalyzerCliConfig } from "./lit-analyzer-cli-config";
import { parseCliArguments } from "./parse-cli-arguments";
import { camelToDashCase } from "./util";
const DEFAULT_GLOB = "src/**/*.{js,jsx,ts,tsx}";
const DEFAULT_CONFIG: LitAnalyzerCliConfig = {
noColor: false,
quiet: false,
maxWarnings: 0,
debug: false,
help: false,
failFast: false,
format: "code",
strict: false,
rules: {}
};
/**
* The main function of the cli.
*/
export async function cli() {
const { _: args, ...rest } = parseCliArguments(process.argv.slice(2));
const globs = args.length > 0 ? args : [DEFAULT_GLOB];
const config: LitAnalyzerCliConfig = { ...DEFAULT_CONFIG, ...rest };
if (config.debug) {
console.log("CLI Config", config);
}
// Always convert "rules" to "dash case" because "rules" expects it.
config.rules = Object.entries(config.rules || {}).reduce(
(acc, [k, v]) => {
acc[camelToDashCase(k) as LitAnalyzerRuleName] = v;
return acc;
},
{} as LitAnalyzerRules
);
if (config.help) {
console.log(`
Usage
lit-analyzer [<file|directory|glob>]
Options
--help Print this message.
--format FORMAT Specify output format. The possible options are:
o code Highlight problems in the code (default)
o list Short and precise list of problems
o markdown Markdown format
--noColor Print results without color
--outFile FILE Emit all output to a single file
--maxWarnings NUMBER Fail only when the number of warnings is larger than this number
--quiet Report only errors and not warnings | --failFast Exit the process right after the first problem has been found
--strict Enable strict mode. This change the default ruleset.
--rules.___ SEVERITY Enable or disable a rule (example: --rules.no-unknown-tag-name off).
Severity can be: "off" | "warn" | "error". The possible rules are:
o no-unknown-tag-name
o no-missing-import
o no-unclosed-tag
o no-unknown-attribute
o no-unknown-property
o no-unknown-event
o no-unknown-slot
o no-invalid-boolean-binding
o no-expressionless-property-binding
o no-noncallable-event-binding
o no-boolean-in-attribute-binding
o no-complex-attribute-binding
o no-nullable-attribute-binding
o no-incompatible-type-binding
o no-invalid-directive-binding
o no-incompatible-property-type
o no-unknown-property-converter
o no-invalid-attribute-name
o no-invalid-tag-name
o no-invalid-css
Examples
lit-analyzer src
lit-analyzer "src/**/*.{js,ts}"
lit-analyzer my-element.js
`);
return;
}
const success = await analyzeCommand(globs, config);
process.exit(success ? 0 : 1);
} | |
checkupdates.go | package action
import (
"context"
"time"
"github.com/syndesisio/syndesis/install/operator/pkg"
"github.com/syndesisio/syndesis/install/operator/pkg/apis/syndesis/v1beta2"
"github.com/syndesisio/syndesis/install/operator/pkg/syndesis/clienttools"
"sigs.k8s.io/controller-runtime/pkg/manager"
)
// Checks if the syndesis installation should be upgraded and move to the "Upgrading" status.
type checkUpdatesAction struct {
baseAction
operatorVersion string
}
func | (mgr manager.Manager, clientTools *clienttools.ClientTools) SyndesisOperatorAction {
return checkUpdatesAction{
newBaseAction(mgr, clientTools, "check-updates"),
"",
}
}
func (a checkUpdatesAction) CanExecute(syndesis *v1beta2.Syndesis) bool {
return syndesisPhaseIs(syndesis,
v1beta2.SyndesisPhaseInstalled,
v1beta2.SyndesisPhaseStartupFailed)
}
func (a checkUpdatesAction) Execute(ctx context.Context, syndesis *v1beta2.Syndesis) error {
if a.operatorVersion == "" {
a.operatorVersion = pkg.DefaultOperatorTag
}
if syndesis.Status.Version == a.operatorVersion {
// Everything fine
return nil
} else {
return a.setPhaseToUpgrading(ctx, syndesis)
}
}
/*
* Following functions have a sleep after updating the custom resource. This is
* needed to avoid race conditions where k8s wasn't able to update or
* kubernetes didn't change the object yet
*/
func (a checkUpdatesAction) setPhaseToUpgrading(ctx context.Context, syndesis *v1beta2.Syndesis) (err error) {
target := syndesis.DeepCopy()
target.Status.Phase = v1beta2.SyndesisPhaseUpgrading
target.Status.TargetVersion = a.operatorVersion
target.Status.Reason = v1beta2.SyndesisStatusReasonMissing
target.Status.Description = "Upgrading from " + syndesis.Status.Version + " to " + a.operatorVersion
target.Status.LastUpgradeFailure = nil
target.Status.UpgradeAttempts = 0
target.Status.ForceUpgrade = false
client, _ := a.clientTools.RuntimeClient()
err = client.Update(ctx, target)
time.Sleep(3 * time.Second)
return
}
| newCheckUpdatesAction |
__init__.py | """:mod:`flask_aiohttp` --- Asynchronous Flask with aiohttp
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Provides Flask extension for asynchronous I/O.
With this extension, we can use `asyncio.coroutine` as Flask's view function.
So, we can add
asyncio-redis <https://github.com/jonathanslenders/asyncio-redis>`_, or
websocket support to your application.
To make view asynchronous, just simply add :func:`helper.async` decorator to
your view function ::
@app.route('/foo')
@async
def lazy():
yield from asyncio.sleep(3)
return 'Done'
You have to run your flask application with :class:`AioHTTP` ::
aio = AioHTTP(app)
aio.run(app)
And you can also use gunicorn ::
aio = AioHTTP(flask_app)
app = aio.create_aiohttp_app(flask_app)
# Run gunicorn by
#
# gunicorn your_module:app -k aiohttp.worker.GunicornWebWorker
# -b localhost:8080
You can even use aiohttp's websocket in your Flask application using
:func:`helper.websocket` ::
aio = AioHTTP(flask_app)
@app.route('echo')
@websocket
def echo():
while True:
msg = yield from aio.ws.receive_msg()
if msg.tp == aiohttp.MsgType.text:
aio.ws.send_str(msg.data)
elif msg.tp == aiohttp.MsgType.close:
print('websocket connection closed')
break
elif msg.tp == aiohttp.MsgType.error:
print('ws connection closed with exception %s',
aio.ws.exception())
break
"""
import os
import asyncio
import logging
import flask
import aiohttp.web
from flask import request
from werkzeug.debug import DebuggedApplication
from werkzeug.serving import run_with_reloader
from .helper import async, websocket, has_websocket, wrap_wsgi_middleware
from .handler import WSGIHandlerBase, WSGIWebSocketHandler
__all__ = ['AioHTTP', 'async', 'websocket', 'has_websocket',
'wrap_wsgi_middleware']
class AioHTTP(object):
"""Flask middleware for aiohttp"""
def __init__(self, app: flask.Flask=None, *,
handler_factory=WSGIWebSocketHandler):
"""
:param app:
Flask application
:param handler_factory:
aiohttp request handler factory. Factory should accept a single
flask application.
"""
self.handler_factory = handler_factory
if app is not None:
self.init_app(app)
def init_app(self, app: flask.Flask):
"""Init Flask app
:param app: Flask application
"""
app.aiohttp_app = self.create_aiohttp_app(app)
def create_aiohttp_app(self, app: flask.Flask) -> aiohttp.web.Application:
|
@staticmethod
def run(app: flask.Flask, *,
host='127.0.0.1', port=None, debug=False, loop=None):
"""Run Flask application on aiohttp
:param app: Flask application
:param host: host name or ip
:param port: port (default is 5000)
:param debug: debug?
"""
# Check initialization status of flask app.
if getattr(app, 'aiohttp_app', None) is None:
raise RuntimeError(
"This application is not initialized for Flask-aiohttp. "
"Please initialize the app by `aio.init_app(app)`.")
# Configure args
if port is None:
server_name = app.config['SERVER_NAME']
if server_name and ':' in server_name:
port = int(server_name.rsplit(':', 1)[-1])
else:
port = 5000
loop = loop or asyncio.get_event_loop()
# Define run_server
def run_server():
# run_server can be called in another thread
asyncio.set_event_loop(loop)
coroutine = loop.create_server(
app.aiohttp_app.make_handler(), host, port)
loop.run_until_complete(coroutine)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
# Configure logging
file_handler = logging.StreamHandler()
app.logger.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
if debug:
# Logging
app.logger.setLevel(logging.DEBUG)
# Wrap WSGI app with werkzeug debugger.
app.wsgi_app = wrap_wsgi_middleware(DebuggedApplication)(
app.wsgi_app)
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true':
app.logger.info(' * Running on http://{}:{}/'
.format(host, port))
# Run with reloader
run_with_reloader(run_server)
else:
app.logger.info(' * Running on http://{}:{}/'.format(host, port))
run_server()
@property
def ws(self) -> aiohttp.web.WebSocketResponse:
"""Websocket response of aiohttp"""
ws = request.environ.get('wsgi.websocket', None)
if ws is None:
raise RuntimeError('Request context is not a WebSocket context.')
return ws
| """Create aiohttp web application from Flask application
:param app: Flask application
:returns: aiohttp web application
"""
# aiohttp web application instance
aio_app = aiohttp.web.Application()
# WSGI handler for aiohttp
wsgi_handler = self.handler_factory(app)
# aiohttp's router should accept any possible HTTP method of request.
aio_app.router.add_route('*', r'/{path:.*}', wsgi_handler)
return aio_app |
availablePrivateEndpointTypes.ts | /*
* Copyright (c) Microsoft Corporation.
* Licensed under the MIT License.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is regenerated.
*/
import * as coreHttp from "@azure/core-http";
import * as Mappers from "../models/mappers";
import * as Parameters from "../models/parameters";
import { NetworkManagementClient } from "../networkManagementClient";
import {
AvailablePrivateEndpointTypesListResponse,
AvailablePrivateEndpointTypesListByResourceGroupResponse,
AvailablePrivateEndpointTypesListNextResponse,
AvailablePrivateEndpointTypesListByResourceGroupNextResponse
} from "../models";
/**
* Class representing a AvailablePrivateEndpointTypes.
*/
export class | {
private readonly client: NetworkManagementClient;
/**
* Initialize a new instance of the class AvailablePrivateEndpointTypes class.
* @param client Reference to the service client
*/
constructor(client: NetworkManagementClient) {
this.client = client;
}
/**
* Returns all of the resource types that can be linked to a Private Endpoint in this subscription in
* this region.
* @param location The location of the domain name.
* @param options The options parameters.
*/
list(
location: string,
options?: coreHttp.OperationOptions
): Promise<AvailablePrivateEndpointTypesListResponse> {
const operationArguments: coreHttp.OperationArguments = {
location,
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
};
return this.client.sendOperationRequest(
operationArguments,
listOperationSpec
) as Promise<AvailablePrivateEndpointTypesListResponse>;
}
/**
* Returns all of the resource types that can be linked to a Private Endpoint in this subscription in
* this region.
* @param location The location of the domain name.
* @param resourceGroupName The name of the resource group.
* @param options The options parameters.
*/
listByResourceGroup(
location: string,
resourceGroupName: string,
options?: coreHttp.OperationOptions
): Promise<AvailablePrivateEndpointTypesListByResourceGroupResponse> {
const operationArguments: coreHttp.OperationArguments = {
location,
resourceGroupName,
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
};
return this.client.sendOperationRequest(
operationArguments,
listByResourceGroupOperationSpec
) as Promise<AvailablePrivateEndpointTypesListByResourceGroupResponse>;
}
/**
* ListNext
* @param location The location of the domain name.
* @param nextLink The nextLink from the previous successful call to the List method.
* @param options The options parameters.
*/
listNext(
location: string,
nextLink: string,
options?: coreHttp.OperationOptions
): Promise<AvailablePrivateEndpointTypesListNextResponse> {
const operationArguments: coreHttp.OperationArguments = {
location,
nextLink,
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
};
return this.client.sendOperationRequest(
operationArguments,
listNextOperationSpec
) as Promise<AvailablePrivateEndpointTypesListNextResponse>;
}
/**
* ListByResourceGroupNext
* @param location The location of the domain name.
* @param resourceGroupName The name of the resource group.
* @param nextLink The nextLink from the previous successful call to the ListByResourceGroup method.
* @param options The options parameters.
*/
listByResourceGroupNext(
location: string,
resourceGroupName: string,
nextLink: string,
options?: coreHttp.OperationOptions
): Promise<AvailablePrivateEndpointTypesListByResourceGroupNextResponse> {
const operationArguments: coreHttp.OperationArguments = {
location,
resourceGroupName,
nextLink,
options: coreHttp.operationOptionsToRequestOptionsBase(options || {})
};
return this.client.sendOperationRequest(
operationArguments,
listByResourceGroupNextOperationSpec
) as Promise<AvailablePrivateEndpointTypesListByResourceGroupNextResponse>;
}
}
// Operation Specifications
const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);
const listOperationSpec: coreHttp.OperationSpec = {
path:
"/subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/availablePrivateEndpointTypes",
httpMethod: "GET",
responses: {
200: {
bodyMapper: Mappers.AvailablePrivateEndpointTypesResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
queryParameters: [Parameters.apiVersion],
urlParameters: [
Parameters.$host,
Parameters.subscriptionId,
Parameters.location
],
headerParameters: [Parameters.accept],
serializer
};
const listByResourceGroupOperationSpec: coreHttp.OperationSpec = {
path:
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/locations/{location}/availablePrivateEndpointTypes",
httpMethod: "GET",
responses: {
200: {
bodyMapper: Mappers.AvailablePrivateEndpointTypesResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
queryParameters: [Parameters.apiVersion],
urlParameters: [
Parameters.$host,
Parameters.resourceGroupName,
Parameters.subscriptionId,
Parameters.location
],
headerParameters: [Parameters.accept],
serializer
};
const listNextOperationSpec: coreHttp.OperationSpec = {
path: "{nextLink}",
httpMethod: "GET",
responses: {
200: {
bodyMapper: Mappers.AvailablePrivateEndpointTypesResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
queryParameters: [Parameters.apiVersion],
urlParameters: [
Parameters.$host,
Parameters.subscriptionId,
Parameters.nextLink,
Parameters.location
],
headerParameters: [Parameters.accept],
serializer
};
const listByResourceGroupNextOperationSpec: coreHttp.OperationSpec = {
path: "{nextLink}",
httpMethod: "GET",
responses: {
200: {
bodyMapper: Mappers.AvailablePrivateEndpointTypesResult
},
default: {
bodyMapper: Mappers.CloudError
}
},
queryParameters: [Parameters.apiVersion],
urlParameters: [
Parameters.$host,
Parameters.resourceGroupName,
Parameters.subscriptionId,
Parameters.nextLink,
Parameters.location
],
headerParameters: [Parameters.accept],
serializer
};
| AvailablePrivateEndpointTypes |
lib.rs | //! The top-level documentation resides on the [project README](https://github.com/graphql-rust/graphql-client) at the moment.
//!
//! The main interface to this library is the custom derive that generates modules from a GraphQL query and schema. See the docs for the [`GraphQLQuery`] trait for a full example.
#![deny(missing_docs)]
#![warn(rust_2018_idioms)]
#[cfg(feature = "graphql_query_derive")]
#[allow(unused_imports)]
#[macro_use]
extern crate graphql_query_derive;
#[cfg(feature = "graphql_query_derive")]
#[doc(hidden)]
pub use graphql_query_derive::*;
use serde::*;
#[cfg(feature = "web")]
pub mod web;
use std::collections::HashMap;
use std::fmt::{self, Display};
doc_comment::doctest!("../../README.md");
/// A convenience trait that can be used to build a GraphQL request body.
///
/// This will be implemented for you by codegen in the normal case. It is implemented on the struct you place the derive on.
///
/// Example:
///
/// ```
/// use graphql_client::*;
/// use serde_json::json;
/// use std::error::Error;
///
/// #[derive(GraphQLQuery)]
/// #[graphql(
/// query_path = "../graphql_client_codegen/src/tests/star_wars_query.graphql",
/// schema_path = "../graphql_client_codegen/src/tests/star_wars_schema.graphql"
/// )]
/// struct StarWarsQuery;
///
/// fn main() -> Result<(), Box<dyn Error>> {
/// use graphql_client::GraphQLQuery;
///
/// let variables = star_wars_query::Variables {
/// episode_for_hero: star_wars_query::Episode::NEWHOPE,
/// };
///
/// let expected_body = json!({
/// "operationName": star_wars_query::OPERATION_NAME,
/// "query": star_wars_query::QUERY,
/// "variables": {
/// "episodeForHero": "NEWHOPE"
/// },
/// });
///
/// let actual_body = serde_json::to_value(
/// StarWarsQuery::build_query(variables)
/// )?;
///
/// assert_eq!(actual_body, expected_body);
///
/// Ok(())
/// }
/// ```
pub trait GraphQLQuery {
/// The shape of the variables expected by the query. This should be a generated struct most of the time.
type Variables: serde::Serialize;
/// The top-level shape of the response data (the `data` field in the GraphQL response). In practice this should be generated, since it is hard to write by hand without error.
type ResponseData: for<'de> serde::Deserialize<'de>;
/// Produce a GraphQL query struct that can be JSON serialized and sent to a GraphQL API.
fn build_query(variables: Self::Variables) -> QueryBody<Self::Variables>;
}
/// The form in which queries are sent over HTTP in most implementations. This will be built using the [`GraphQLQuery`] trait normally.
#[derive(Debug, Serialize, Deserialize)]
pub struct QueryBody<Variables> {
/// The values for the variables. They must match those declared in the queries. This should be the `Variables` struct from the generated module corresponding to the query.
pub variables: Variables,
/// The GraphQL query, as a string.
pub query: &'static str,
/// The GraphQL operation name, as a string.
#[serde(rename = "operationName")]
pub operation_name: &'static str,
}
/// Represents a location inside a query string. Used in errors. See [`Error`].
#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize, PartialEq)]
pub struct | {
/// The line number in the query string where the error originated (starting from 1).
pub line: i32,
/// The column number in the query string where the error originated (starting from 1).
pub column: i32,
}
/// Part of a path in a query. It can be an object key or an array index. See [`Error`].
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(untagged)]
pub enum PathFragment {
/// A key inside an object
Key(String),
/// An index inside an array
Index(i32),
}
impl Display for PathFragment {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
PathFragment::Key(ref key) => write!(f, "{}", key),
PathFragment::Index(ref idx) => write!(f, "{}", idx),
}
}
}
/// An element in the top-level `errors` array of a response body.
///
/// This tries to be as close to the spec as possible.
///
/// [Spec](https://github.com/facebook/graphql/blob/master/spec/Section%207%20--%20Response.md)
///
///
/// ```
/// # use serde_json::json;
/// # use serde::Deserialize;
/// # use graphql_client::GraphQLQuery;
/// # use std::error::Error;
/// #
/// # #[derive(Debug, Deserialize, PartialEq)]
/// # struct ResponseData {
/// # something: i32
/// # }
/// #
/// # fn main() -> Result<(), Box<dyn Error>> {
/// use graphql_client::*;
///
/// let body: Response<ResponseData> = serde_json::from_value(json!({
/// "data": null,
/// "errors": [
/// {
/// "message": "The server crashed. Sorry.",
/// "locations": [{ "line": 1, "column": 1 }]
/// },
/// {
/// "message": "Seismic activity detected",
/// "path": ["underground", 20]
/// },
/// ],
/// }))?;
///
/// let expected: Response<ResponseData> = Response {
/// data: None,
/// errors: Some(vec![
/// Error {
/// message: "The server crashed. Sorry.".to_owned(),
/// locations: Some(vec![
/// Location {
/// line: 1,
/// column: 1,
/// }
/// ]),
/// path: None,
/// extensions: None,
/// },
/// Error {
/// message: "Seismic activity detected".to_owned(),
/// locations: None,
/// path: Some(vec![
/// PathFragment::Key("underground".into()),
/// PathFragment::Index(20),
/// ]),
/// extensions: None,
/// },
/// ]),
/// };
///
/// assert_eq!(body, expected);
///
/// # Ok(())
/// # }
/// ```
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct Error {
/// The human-readable error message. This is the only required field.
pub message: String,
/// Which locations in the query the error applies to.
pub locations: Option<Vec<Location>>,
/// Which path in the query the error applies to, e.g. `["users", 0, "email"]`.
pub path: Option<Vec<PathFragment>>,
/// Additional errors. Their exact format is defined by the server.
pub extensions: Option<HashMap<String, serde_json::Value>>,
}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Use `/` as a separator like JSON Pointer.
let path = self
.path
.as_ref()
.map(|fragments| {
fragments
.iter()
.fold(String::new(), |mut acc, item| {
acc.push_str(&format!("{}/", item));
acc
})
.trim_end_matches('/')
.to_string()
})
.unwrap_or_else(|| "<query>".to_string());
// Get the location of the error. We'll use just the first location for this.
let loc = self
.locations
.as_ref()
.and_then(|locations| locations.iter().next())
.cloned()
.unwrap_or_else(Location::default);
write!(f, "{}:{}:{}: {}", path, loc.line, loc.column, self.message)
}
}
/// The generic shape taken by the responses of GraphQL APIs.
///
/// This will generally be used with the `ResponseData` struct from a derived module.
///
/// [Spec](https://github.com/facebook/graphql/blob/master/spec/Section%207%20--%20Response.md)
///
/// ```
/// # use serde_json::json;
/// # use serde::Deserialize;
/// # use graphql_client::GraphQLQuery;
/// # use std::error::Error;
/// #
/// # #[derive(Debug, Deserialize, PartialEq)]
/// # struct User {
/// # id: i32,
/// # }
/// #
/// # #[derive(Debug, Deserialize, PartialEq)]
/// # struct Dog {
/// # name: String
/// # }
/// #
/// # #[derive(Debug, Deserialize, PartialEq)]
/// # struct ResponseData {
/// # users: Vec<User>,
/// # dogs: Vec<Dog>,
/// # }
/// #
/// # fn main() -> Result<(), Box<dyn Error>> {
/// use graphql_client::Response;
///
/// let body: Response<ResponseData> = serde_json::from_value(json!({
/// "data": {
/// "users": [{"id": 13}],
/// "dogs": [{"name": "Strelka"}],
/// },
/// "errors": [],
/// }))?;
///
/// let expected: Response<ResponseData> = Response {
/// data: Some(ResponseData {
/// users: vec![User { id: 13 }],
/// dogs: vec![Dog { name: "Strelka".to_owned() }],
/// }),
/// errors: Some(vec![]),
/// };
///
/// assert_eq!(body, expected);
///
/// # Ok(())
/// # }
/// ```
#[derive(Debug, Serialize, Deserialize, PartialEq)]
pub struct Response<Data> {
/// The absent, partial or complete response data.
pub data: Option<Data>,
/// The top-level errors returned by the server.
pub errors: Option<Vec<Error>>,
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
#[test]
fn graphql_error_works_with_just_message() {
let err = json!({
"message": "I accidentally your whole query"
});
let deserialized_error: Error = serde_json::from_value(err).unwrap();
assert_eq!(
deserialized_error,
Error {
message: "I accidentally your whole query".to_string(),
locations: None,
path: None,
extensions: None,
}
)
}
#[test]
fn full_graphql_error_deserialization() {
let err = json!({
"message": "I accidentally your whole query",
"locations": [{ "line": 3, "column": 13}, {"line": 56, "column": 1}],
"path": ["home", "alone", 3, "rating"]
});
let deserialized_error: Error = serde_json::from_value(err).unwrap();
assert_eq!(
deserialized_error,
Error {
message: "I accidentally your whole query".to_string(),
locations: Some(vec![
Location {
line: 3,
column: 13,
},
Location {
line: 56,
column: 1,
},
]),
path: Some(vec![
PathFragment::Key("home".to_owned()),
PathFragment::Key("alone".to_owned()),
PathFragment::Index(3),
PathFragment::Key("rating".to_owned()),
]),
extensions: None,
}
)
}
#[test]
fn full_graphql_error_with_extensions_deserialization() {
let err = json!({
"message": "I accidentally your whole query",
"locations": [{ "line": 3, "column": 13}, {"line": 56, "column": 1}],
"path": ["home", "alone", 3, "rating"],
"extensions": {
"code": "CAN_NOT_FETCH_BY_ID",
"timestamp": "Fri Feb 9 14:33:09 UTC 2018"
}
});
let deserialized_error: Error = serde_json::from_value(err).unwrap();
let mut expected_extensions = HashMap::new();
expected_extensions.insert("code".to_owned(), json!("CAN_NOT_FETCH_BY_ID"));
expected_extensions.insert("timestamp".to_owned(), json!("Fri Feb 9 14:33:09 UTC 2018"));
let expected_extensions = Some(expected_extensions);
assert_eq!(
deserialized_error,
Error {
message: "I accidentally your whole query".to_string(),
locations: Some(vec![
Location {
line: 3,
column: 13,
},
Location {
line: 56,
column: 1,
},
]),
path: Some(vec![
PathFragment::Key("home".to_owned()),
PathFragment::Key("alone".to_owned()),
PathFragment::Index(3),
PathFragment::Key("rating".to_owned()),
]),
extensions: expected_extensions,
}
)
}
}
| Location |
Q5.py | d = dict()
l = []
for i in range(int(input())):
x, y = input().split()
d[y] = x
l.append(y)
l.sort()
for i in range(len(l)):
if d[l[i]] == "Percy": | print(i+1)
break |
|
list_postgresql_db_user_paginated_response.py | # coding: utf-8
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ListPostgresqlDbUserPaginatedResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'users': 'list[PostgresqlUserForList]',
'total_count': 'int'
}
attribute_map = {
'users': 'users',
'total_count': 'total_count'
}
def __init__(self, users=None, total_count=None):
"""ListPostgresqlDbUserPaginatedResponse - a model defined in huaweicloud sdk"""
super(ListPostgresqlDbUserPaginatedResponse, self).__init__()
self._users = None
self._total_count = None
self.discriminator = None
if users is not None:
self.users = users
if total_count is not None:
self.total_count = total_count
@property
def users(self):
"""Gets the users of this ListPostgresqlDbUserPaginatedResponse.
列表中每个元素表示一个数据库用户。
:return: The users of this ListPostgresqlDbUserPaginatedResponse.
:rtype: list[PostgresqlUserForList]
"""
return self._users
@users.setter
def users(self, users):
"""Sets the users of this ListPostgresqlDbUserPaginatedResponse.
列表中每个元素表示一个数据库用户。
:param users: The users of this ListPostgresqlDbUserPaginatedResponse.
:type: list[PostgresqlUserForList]
"""
self._users = users
@property
def total_count(self):
"""Gets the total_count of this ListPostgresqlDbUserPaginatedResponse.
数据库用户总数。
:return: The total_count of this ListPostgresqlDbUserPaginatedResponse.
:rtype: int
"""
return self._total_count
@total_count.setter
def total_count(self, total_count):
"""Sets the total_count of this ListPostgresqlDbUserPaginatedResponse.
数据库用 | for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListPostgresqlDbUserPaginatedResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 户总数。
:param total_count: The total_count of this ListPostgresqlDbUserPaginatedResponse.
:type: int
"""
self._total_count = total_count
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
|
main.go | package main
import (
"flag"
"fmt"
"io/ioutil"
"os"
"github.com/bordnul/golang-drive/authorize"
"github.com/bordnul/golang-drive/controller"
"runtime"
"strings"
"golang.org/x/oauth2/google"
drive "google.golang.org/api/drive/v3"
)
func manualInit(clientPath string, tokenPath string) (*drive.Service, error) {
//
// add len(os.Arg) != 1 check
//
//
//sets trailing slash type depending on operating system
//Initiates Google Drive service
c, err := ioutil.ReadFile(clientPath)
if err != nil {
return nil, err
}
creds, err := google.ConfigFromJSON(c, drive.DriveScope)
if err != nil {
return nil, err
}
driveClient, err := authorize.GetClient(creds, tokenPath, clientPath)
if err != nil {
return nil, err
}
driveService, err := drive.New(driveClient)
if err != nil {
return nil, err
}
fmt.Println("service started successfully")
return driveService, nil
}
func | () {
slashType := ""
if runtime.GOOS == "windows" {
slashType = "\\"
} else {
slashType = "/"
}
////////////////////////////
//call to get token and client info
var tokenPath string = "token.json"
var clientPath string = "client.json"
////////////////////////////
//mode
modeF := flag.String("m", "", "modes: localupload, drivedownload (needs -d for target)")
//local target path\file
localF := flag.String("l", "C:\\Users\\game_game\\go\\test\\", "local file target")
//root google folder
googleF := flag.String("g", "shared_golang", "Google root folder")
//target in google folder
googleD := flag.String("d", "", "download target in Google root folder")
flag.Parse()
callArgs := []string{slashType, *modeF, *localF, *googleF, *googleD}
driveService, err := manualInit(clientPath, tokenPath)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
//trims trailing slash from local target
if strings.HasSuffix(callArgs[2], callArgs[0]) {
callArgs[2] = callArgs[2][:len(callArgs[2])-1]
}
switch {
case *modeF == "localupload" || *modeF == "drivedownload" && *googleD != "":
err := controller.StartAPI(driveService, &callArgs)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
}
}
| main |
fancy_platform_commands.bzl | # buildifier: disable=module-docstring
load("@rules_foreign_cc//tools/build_defs/shell_toolchain/toolchains:function_and_call.bzl", "FunctionAndCall")
_REPLACE_VALUE = "BAZEL_GEN_ROOT"
def os_name():
return "Fancy"
def pwd():
return "$(pwd)"
def echo(text):
return "printf \"{text}\"".format(text = text)
def export_var(name, value):
return "export {name}={value}".format(name = name, value = value)
def local_var(name, value):
return "local {name}={value}".format(name = name, value = value)
def use_var(name):
return "$" + name
def env():
return "env"
def path(expression):
return "export PATH=\"{expression}:$PATH\"".format(expression = expression)
def touch(path):
return "touch " + path
def mkdirs(path):
return "mkdir -p " + path
def if_else(condition, if_text, else_text):
return """
if [ {condition} ]; then
{if_text}
else
{else_text}
fi
""".format(condition = condition, if_text = if_text, else_text = else_text)
# buildifier: disable=function-docstring | lines = []
lines.append("function " + name + "() {")
for line_ in text.splitlines():
lines.append(" " + line_)
lines.append("}")
return "\n".join(lines)
def replace_in_files(dir, from_, to_):
return FunctionAndCall(
text = """if [ -d "$1" ]; then
find -L $1 -print -type f \\( -name "*.pc" -or -name "*.la" -or -name "*-config" -or -name "*.cmake" \\) -exec sed -i 's@'"$2"'@'"$3"'@g' {} ';'
fi
""",
)
def copy_dir_contents_to_dir(source, target):
return """cp -L -r --no-target-directory "{}" "{}" """.format(source, target)
def symlink_contents_to_dir(source, target):
text = """local target="$2"
mkdir -p $target
if [[ -f $1 ]]; then
##symlink_to_dir## $1 $target
return 0
fi
local children=$(find $1 -maxdepth 1 -mindepth 1)
for child in $children; do
##symlink_to_dir## $child $target
done
"""
return FunctionAndCall(text = text)
def symlink_to_dir(source, target):
text = """local target="$2"
mkdir -p ${target}
if [[ -d $1 ]]; then
ln -s -t ${target} $1
elif [[ -f $1 ]]; then
ln -s -t ${target} $1
elif [[ -L $1 ]]; then
cp --no-target-directory $1 ${target}
else
echo "Can not copy $1"
fi
"""
return FunctionAndCall(text = text)
def script_prelude():
return "set -euo pipefail"
def increment_pkg_config_path(source):
text = """local children=$(find $1 -mindepth 1 -name '*.pc')
# assume there is only one directory with pkg config
for child in $children; do
export PKG_CONFIG_PATH="$${PKG_CONFIG_PATH:-}$$:$(dirname $child)"
return
done
"""
return FunctionAndCall(text = text)
def cat(filepath):
return "cat \"{}\"".format(filepath)
def redirect_out_err(from_process, to_file):
return from_process + " &> " + to_file
def assert_script_errors():
return "set -e"
def cleanup_function(on_success, on_failure):
text = "\n".join([
"local ecode=$?",
"if [ $ecode -eq 0 ]; then",
on_success,
"else",
on_failure,
"fi",
])
return FunctionAndCall(text = text, call = "trap \"cleanup_function\" EXIT")
def children_to_path(dir_):
text = """if [ -d {dir_} ]; then
local tools=$(find $EXT_BUILD_DEPS/bin -maxdepth 1 -mindepth 1)
for tool in $tools;
do
if [[ -d \"$tool\" ]] || [[ -L \"$tool\" ]]; then
export PATH=$PATH:$tool
fi
done
fi""".format(dir_ = dir_)
return FunctionAndCall(text = text)
def define_absolute_paths(dir_, abs_path):
return "##replace_in_files## {dir_} {REPLACE_VALUE} {abs_path}".format(
dir_ = dir_,
REPLACE_VALUE = _REPLACE_VALUE,
abs_path = abs_path,
)
def replace_absolute_paths(dir_, abs_path):
return "##replace_in_files## {dir_} {abs_path} {REPLACE_VALUE}".format(
dir_ = dir_,
REPLACE_VALUE = _REPLACE_VALUE,
abs_path = abs_path,
) | def define_function(name, text): |
query.rs | use std::convert::{TryFrom, TryInto};
use super::ChainNodeApp;
use crate::enclave_bridge::EnclaveProxy;
use abci::*;
use chain_core::common::{MerkleTree, Proof as MerkleProof, H256, HASH_SIZE_256};
use chain_core::state::account::StakedStateAddress;
use chain_core::state::tendermint::BlockHeight;
use chain_core::state::ChainState;
use chain_core::tx::data::{txid_hash, TXID_HASH_ID};
use chain_storage::jellyfish::get_with_proof;
use chain_storage::LookupItem;
use parity_scale_codec::{Decode, Encode};
/// Generate generic ABCI ProofOp for the witness
fn get_witness_proof_op(witness: &[u8]) -> ProofOp {
let mut op = ProofOp::new();
op.set_field_type("witness".into());
op.set_key(TXID_HASH_ID.to_vec());
op.set_data(txid_hash(witness).to_vec());
op
}
fn get_key(resp: &mut ResponseQuery, data_key: &[u8]) -> Option<H256> {
if data_key.len() != HASH_SIZE_256 {
resp.log += "invalid txid or app hash length";
resp.code = 4;
None
} else {
let mut key = H256::default();
key.copy_from_slice(&data_key[..]);
Some(key)
}
}
impl<T: EnclaveProxy + 'static> ChainNodeApp<T> {
fn lookup_key(
&self,
resp: &mut ResponseQuery,
item: LookupItem,
key: &H256,
log_message: &str,
) {
let v = self.storage.lookup_item(item, &key);
match v {
Some(uv) => {
resp.value = uv;
}
_ => {
resp.log += log_message;
resp.code = 1;
}
}
}
/// Helper to find a key under a column in KV DB, or log an error (both stored in the response).
fn lookup(
&self,
resp: &mut ResponseQuery,
item: LookupItem,
data_key: &[u8],
log_message: &str,
) -> Option<H256> |
/// Responds to query requests -- note that path is hex-encoded in the original request on the client side
/// e.g. "store" == 0x73746f7265.
pub fn query_handler(&self, _req: &RequestQuery) -> ResponseQuery {
let mut resp = ResponseQuery::new();
// "When Tendermint connects to a peer, it sends two queries to the ABCI application using the following paths, with no additional data:
// * /p2p/filter/addr/<IP:PORT>, where <IP:PORT> denote the IP address and the port of the connection
// * p2p/filter/id/<ID>, where <ID> is the peer node ID (ie. the pubkey.Address() for the peer's PubKey)
// If either of these queries return a non-zero ABCI code, Tendermint will refuse to connect to the peer."
if _req.path.starts_with("/p2p") || _req.path.starts_with("p2p") {
// TODO: peer filtering
return resp;
}
match _req.path.as_ref() {
"txquery" => match &self.tx_query_address {
Some(addr) => {
resp.value = addr.clone().into_bytes();
}
None => {
resp.code = 1;
resp.log += "tx query address not set";
}
},
"store" => {
let key = self.lookup(
&mut resp,
LookupItem::TxBody,
&_req.data[..],
"tx not found",
);
if let (Some(txid), true) = (key, _req.prove) {
let mwitness = self.storage.lookup_item(LookupItem::TxWitness, &txid);
if let Some(witness) = mwitness {
// Negative height default to 0
let req_height = _req
.height
.try_into()
.unwrap_or_else(|_| BlockHeight::genesis());
let last_height = self
.last_state
.as_ref()
.map_or(BlockHeight::genesis(), |x| x.last_block_height);
let height =
if req_height == BlockHeight::genesis() || req_height > last_height {
last_height
} else {
req_height
};
// note this should not crash if Tendermint delivers all blocks with height in order
// TODO: invariant / sanity check in rust-abci?
let app_hash = self.storage.get_historical_app_hash(height).unwrap();
let data = self
.storage
.lookup_item(LookupItem::TxsMerkle, &app_hash)
.unwrap();
let tree = MerkleTree::decode(&mut data.as_slice()).expect("merkle tree");
// TODO: Change this in future to include individual ops?
let proof_ops = match tree.generate_proof(txid) {
None => vec![get_witness_proof_op(&witness[..])],
Some(merkle_proof) => vec![
into_proof_op(tree.root_hash(), merkle_proof),
get_witness_proof_op(&witness[..]),
],
};
let mut proof = Proof::new();
proof.set_ops(proof_ops.into());
resp.set_proof(proof);
} else {
resp.log += "proof error: witness not found";
resp.code = 2;
}
}
}
"meta" => {
self.lookup(
&mut resp,
LookupItem::TxMetaSpent,
&_req.data[..],
"tx not found",
);
}
"witness" => {
self.lookup(
&mut resp,
LookupItem::TxWitness,
&_req.data[..],
"tx not found",
);
}
"merkle" => {
self.lookup(
&mut resp,
LookupItem::TxsMerkle,
&_req.data[..],
"app state not found",
);
}
"account" => {
let account_address = StakedStateAddress::try_from(_req.data.as_slice());
if let (Some(state), Ok(address)) = (&self.last_state, account_address) {
let (account, _proof) =
get_with_proof(&self.storage, state.staking_version, &address);
match account {
Some(a) => {
resp.value = a.encode();
// TODO: inclusion proof
}
None => {
resp.log += "account lookup failed: account not exists";
resp.code = 1;
}
}
} else {
resp.log += "account lookup failed (either invalid address or node not correctly restored / initialized)";
resp.code = 3;
}
}
"staking" => {
let mversion = if let Ok(height) = _req.height.try_into() {
self.storage.get_historical_staking_version(height)
} else {
self.last_state.as_ref().map(|state| state.staking_version)
};
let account_address = StakedStateAddress::try_from(_req.data.as_slice());
if let (Some(version), Ok(address)) = (mversion, account_address) {
let (mstaking, proof) = get_with_proof(&self.storage, version, &address);
resp.value = mstaking.encode();
if _req.prove {
resp.set_proof(Proof {
ops: vec![ProofOp {
field_type: "staking".to_owned(),
key: address.encode(),
data: proof.encode(),
..Default::default()
}]
.into(),
..Default::default()
});
}
} else {
resp.log += "account lookup failed (either invalid address or node not correctly restored / initialized)";
resp.code = 3;
}
}
"state" => {
if self.tx_query_address.is_none() {
resp.code = 1;
resp.log += "tx query address not set / state is not persisted";
} else {
let value = self.storage.get_historical_state(
_req.height.try_into().expect("Invalid block height"),
);
match value {
Some(value) => {
if let Ok(state) = ChainState::decode(&mut value.to_vec().as_slice()) {
resp.value = serde_json::to_string(&state).unwrap().into_bytes();
} else {
resp.log += "state decode failed";
resp.code = 2;
}
}
_ => {
resp.log += "state not found";
resp.code = 2;
}
}
}
}
"council-nodes" => {
let council_nodes = &self
.last_state
.as_ref()
.expect("Missing last_state: init chain was not called")
.staking_table
.list_council_nodes(&self.staking_getter_committed());
resp.value = serde_json::to_string(&council_nodes)
.expect("Unable to serialize validator metadata into json")
.into_bytes();
}
"sealed" => {
self.lookup(
&mut resp,
LookupItem::TxSealed,
&_req.data[..],
"sealed log not found",
);
}
_ => {
resp.log += "invalid path";
resp.code = 1;
}
}
resp
}
}
fn into_proof_op<T: Encode>(root_hash: H256, proof: MerkleProof<T>) -> ProofOp {
let mut proof_op = ProofOp::new();
proof_op.set_field_type("transaction".into());
proof_op.set_key(root_hash.to_vec());
proof_op.set_data(proof.encode());
proof_op
}
| {
if let Some(key) = get_key(resp, data_key) {
self.lookup_key(resp, item, &key, log_message);
if resp.code == 0 {
return Some(key);
}
}
None
} |
funcmapmaker.go | package funcmapmaker | "html/template"
"io/ioutil"
"net/http"
"github.com/qorpress/qorpress-contrib/flickr/config"
"github.com/qorpress/qorpress-contrib/flickr/models"
"github.com/qorpress/qorpress/core/render"
"github.com/qorpress/qorpress/pkg/utils"
)
func AddFuncMapMaker(view *render.Render) *render.Render {
oldFuncMapMaker := view.FuncMapMaker
view.FuncMapMaker = func(render *render.Render, req *http.Request, w http.ResponseWriter) template.FuncMap {
funcMap := template.FuncMap{}
if oldFuncMapMaker != nil {
funcMap = oldFuncMapMaker(render, req, w)
}
funcMap["get_flickr_images"] = func() (payload models.FlickrPayload) {
var fs models.FlickrSetting
utils.GetDB(req).Find(&fs)
if fs.ApiKey != "" && fs.UserId != "" {
// get flickr images
photostreamUrl := fmt.Sprintf(config.PhotoStreamUrl, fs.ApiKey, fs.UserId, fs.PerPage)
response, err := http.Get(photostreamUrl)
if err != nil {
fmt.Println(err.Error())
return
}
defer response.Body.Close() //Response.Body is of type io.ReadCloser *Look this up later"
body, err := ioutil.ReadAll(response.Body)
if err != nil {
fmt.Println(err.Error())
}
json.Unmarshal(body, &payload)
return payload
}
return
}
funcMap["get_flickr_albums"] = func() []models.FlickrPhotoAlbum {
var fs models.FlickrSetting
utils.GetDB(req).Find(&fs)
if fs.ApiKey != "" && fs.UserId != "" {
// get flickr albums
albumsUrl := fmt.Sprintf(config.AlbumsUrl, fs.ApiKey, fs.UserId)
response, err := http.Get(albumsUrl)
if err != nil {
fmt.Println(err)
return nil
}
defer response.Body.Close()
body, err := ioutil.ReadAll(response.Body)
var payload models.FlickrAlbumPayload
json.Unmarshal(body, &payload)
return payload.PhotoSets.PhotoAlbums
}
return nil
}
funcMap["get_flickr_photos_in_album"] = func(albumId int) (photos []models.FlickrPhotoItem) {
var fs models.FlickrSetting
utils.GetDB(req).Find(&fs)
if fs.ApiKey != "" && fs.UserId != "" {
// get flickr images from album
albumUrl := fmt.Sprintf(config.AlbumsUrl, fs.ApiKey, albumId, fs.UserId)
resp, err := http.Get(albumUrl)
if err != nil {
fmt.Println(err)
return
}
defer resp.Body.Close()
var photoAlbum models.FlickrPhotosPayload
body, err := ioutil.ReadAll(resp.Body)
jsonError := json.Unmarshal(body, &photoAlbum)
if jsonError != nil {
fmt.Println("Json marshal error: ", jsonError)
return
}
return photoAlbum.PhotoSet.PhotoItems
}
return
}
return funcMap
}
return view
} |
import (
"encoding/json"
"fmt" |
conftest.py | import os
import pytest
from sap.aibus.dar.client.data_manager_client import DataManagerClient
from sap.aibus.dar.client.inference_client import InferenceClient
from sap.aibus.dar.client.model_manager_client import ModelManagerClient
from sap.aibus.dar.client.util.credentials import OnlineCredentialsSource
from sap.aibus.dar.client.workflow.model import ModelCreator
@pytest.fixture()
def dar_url():
return os.environ["DAR_URL"]
@pytest.fixture()
def dar_client_id():
return os.environ["DAR_CLIENT_ID"]
@pytest.fixture()
def dar_client_secret():
return os.environ["DAR_CLIENT_SECRET"]
@pytest.fixture()
def dar_uaa_url():
return os.environ["DAR_AUTH_URL"]
# For the following fixtures, the parameters to the functions
# will be provided by existing fixtures of the same name!
@pytest.fixture()
def credentials_source(dar_client_id, dar_client_secret, dar_uaa_url):
return OnlineCredentialsSource(dar_uaa_url, dar_client_id, dar_client_secret)
@pytest.fixture()
def data_manager_client(dar_url, credentials_source):
|
@pytest.fixture()
def model_manager_client(dar_url, credentials_source):
client = ModelManagerClient(dar_url, credentials_source)
return client
@pytest.fixture()
def inference_client(dar_url, credentials_source):
client = InferenceClient(dar_url, credentials_source)
return client
@pytest.fixture()
def model_creator(dar_url, credentials_source):
create_model = ModelCreator(dar_url, credentials_source)
return create_model
| client = DataManagerClient(dar_url, credentials_source)
return client |
utils.rs | use super::Rule;
use crate::Result;
use pest::iterators::{Pair, Pairs};
pub(super) fn next_if_rule<'a>(pairs: &mut Pairs<'a, Rule>, rule: Rule) -> Option<Pair<'a, Rule>> {
if pairs.peek().map_or(false, |pair| pair.as_rule() == rule) {
Some(pairs.next().unwrap())
} else {
None
}
}
pub(super) fn parse_if_rule<'a, T>(
pairs: &mut Pairs<'a, Rule>,
rule: Rule,
f: impl FnOnce(Pair<Rule>) -> Result<T>,
) -> Result<Option<T>> {
next_if_rule(pairs, rule).map(f).transpose()
}
pub(super) fn exactly_one<T>(iter: impl IntoIterator<Item = T>) -> T {
let mut iter = iter.into_iter();
let res = iter.next().unwrap();
debug_assert!(matches!(iter.next(), None));
res
}
pub(super) fn block_string_value(raw: &str) -> String {
// Split the string by either \r\n, \r or \n
let lines: Vec<_> = raw
.split("\r\n")
.flat_map(|s| s.split(['\r', '\n'].as_ref()))
.collect();
// Find the common indent
let common_indent = lines
.iter()
.skip(1)
.copied()
.filter_map(|line| line.find(|c| c != '\t' && c != ' '))
.min()
.unwrap_or(0);
let line_has_content = |line: &str| line.as_bytes().iter().any(|&c| c != b'\t' && c != b' ');
let first_contentful_line = lines
.iter()
.copied()
.position(line_has_content)
.unwrap_or_else(|| lines.len());
let ending_lines_start = lines
.iter()
.copied()
.rposition(line_has_content)
.map_or(0, |i| i + 1);
lines
.iter()
.copied()
.enumerate()
.take(ending_lines_start)
.skip(first_contentful_line)
// Remove the common indent, but not on the first line
.map(|(i, line)| {
if i != 0 && line.len() >= common_indent {
&line[common_indent..]
} else {
line
}
})
// Put a newline between each line
.enumerate()
.flat_map(|(i, line)| {
if i == 0 { [].as_ref() } else { ['\n'].as_ref() }
.iter()
.copied()
.chain(line.chars())
})
.collect()
}
#[test]
fn test_block_string_value() {
assert_eq!(block_string_value(""), "");
assert_eq!(block_string_value("\r\n"), "");
assert_eq!(block_string_value("\r\r\r\r\n\n\r\n\r\r"), "");
assert_eq!(block_string_value("abc"), "abc");
assert_eq!(
block_string_value("line 1\r\n line 2\n line 3\r line 4"),
"line 1\nline 2\n line 3\n line 4"
);
assert_eq!(
block_string_value("\r\r some text\r\n \n \n "),
"some text"
);
assert_eq!(
block_string_value(
r#"
a
b
c
"#
),
"a\nb\n\nc"
);
}
pub(super) fn string_value(s: &str) -> String {
let mut chars = s.chars();
std::iter::from_fn(|| {
Some(match chars.next()? {
'\\' => match chars.next().expect("backslash at end") {
c @ '\"' | c @ '\\' | c @ '/' => c,
'b' => '\x08',
'f' => '\x0C',
'n' => '\n',
'r' => '\r',
't' => '\t',
'u' => std::char::from_u32(
(0..4)
.map(|_| chars.next().unwrap().to_digit(16).unwrap())
.fold(0, |acc, digit| acc * 16 + digit),
)
.unwrap(),
_ => unreachable!(),
},
other => other,
})
})
.collect()
}
#[test]
fn test_string_value() | {
assert_eq!(string_value("abc"), "abc");
assert_eq!(string_value("\\n\\b\\u2a1A"), "\n\x08\u{2A1A}");
assert_eq!(string_value("\\\"\\\\"), "\"\\");
} |
|
manager.go | // Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"). You may
// not use this file except in compliance with the License. A copy of the
// License is located at
//
// http://aws.amazon.com/apache2.0/
//
// or in the "license" file accompanying this file. This file is distributed
// on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
// express or implied. See the License for the specific language governing
// permissions and limitations under the License.
// Code generated by ack-generate. DO NOT EDIT.
package model
| "fmt"
ackerr "github.com/aws/aws-controllers-k8s/pkg/errors"
corev1 "k8s.io/api/core/v1"
ackv1alpha1 "github.com/aws/aws-controllers-k8s/apis/core/v1alpha1"
ackcompare "github.com/aws/aws-controllers-k8s/pkg/compare"
ackcfg "github.com/aws/aws-controllers-k8s/pkg/config"
ackmetrics "github.com/aws/aws-controllers-k8s/pkg/metrics"
acktypes "github.com/aws/aws-controllers-k8s/pkg/types"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/go-logr/logr"
svcsdk "github.com/aws/aws-sdk-go/service/apigatewayv2"
svcsdkapi "github.com/aws/aws-sdk-go/service/apigatewayv2/apigatewayv2iface"
)
// +kubebuilder:rbac:groups=apigatewayv2.services.k8s.aws,resources=models,verbs=get;list;watch;create;update;patch;delete
// +kubebuilder:rbac:groups=apigatewayv2.services.k8s.aws,resources=models/status,verbs=get;update;patch
// +kubebuilder:rbac:groups="",resources=namespaces,verbs=get;list;watch
// +kubebuilder:rbac:groups="",resources=configmaps,verbs=get;list;watch
// resourceManager is responsible for providing a consistent way to perform
// CRUD operations in a backend AWS service API for Book custom resources.
type resourceManager struct {
// cfg is a copy of the ackcfg.Config object passed on start of the service
// controller
cfg ackcfg.Config
// log refers to the logr.Logger object handling logging for the service
// controller
log logr.Logger
// metrics contains a collection of Prometheus metric objects that the
// service controller and its reconcilers track
metrics *ackmetrics.Metrics
// rr is the AWSResourceReconciler which can be used for various utility
// functions such as querying for Secret values given a SecretReference
rr acktypes.AWSResourceReconciler
// awsAccountID is the AWS account identifier that contains the resources
// managed by this resource manager
awsAccountID ackv1alpha1.AWSAccountID
// The AWS Region that this resource manager targets
awsRegion ackv1alpha1.AWSRegion
// sess is the AWS SDK Session object used to communicate with the backend
// AWS service API
sess *session.Session
// sdk is a pointer to the AWS service API interface exposed by the
// aws-sdk-go/services/{alias}/{alias}iface package.
sdkapi svcsdkapi.ApiGatewayV2API
}
// concreteResource returns a pointer to a resource from the supplied
// generic AWSResource interface
func (rm *resourceManager) concreteResource(
res acktypes.AWSResource,
) *resource {
// cast the generic interface into a pointer type specific to the concrete
// implementing resource type managed by this resource manager
return res.(*resource)
}
// ReadOne returns the currently-observed state of the supplied AWSResource in
// the backend AWS service API.
func (rm *resourceManager) ReadOne(
ctx context.Context,
res acktypes.AWSResource,
) (acktypes.AWSResource, error) {
r := rm.concreteResource(res)
if r.ko == nil {
// Should never happen... if it does, it's buggy code.
panic("resource manager's ReadOne() method received resource with nil CR object")
}
observed, err := rm.sdkFind(ctx, r)
if err != nil {
return rm.onError(r, err)
}
return rm.onSuccess(observed)
}
// Create attempts to create the supplied AWSResource in the backend AWS
// service API, returning an AWSResource representing the newly-created
// resource
func (rm *resourceManager) Create(
ctx context.Context,
res acktypes.AWSResource,
) (acktypes.AWSResource, error) {
r := rm.concreteResource(res)
if r.ko == nil {
// Should never happen... if it does, it's buggy code.
panic("resource manager's Create() method received resource with nil CR object")
}
created, err := rm.sdkCreate(ctx, r)
if err != nil {
return rm.onError(r, err)
}
return rm.onSuccess(created)
}
// Update attempts to mutate the supplied desired AWSResource in the backend AWS
// service API, returning an AWSResource representing the newly-mutated
// resource.
// Note for specialized logic implementers can check to see how the latest
// observed resource differs from the supplied desired state. The
// higher-level reonciler determines whether or not the desired differs
// from the latest observed and decides whether to call the resource
// manager's Update method
func (rm *resourceManager) Update(
ctx context.Context,
resDesired acktypes.AWSResource,
resLatest acktypes.AWSResource,
diffReporter *ackcompare.Reporter,
) (acktypes.AWSResource, error) {
desired := rm.concreteResource(resDesired)
latest := rm.concreteResource(resLatest)
if desired.ko == nil || latest.ko == nil {
// Should never happen... if it does, it's buggy code.
panic("resource manager's Update() method received resource with nil CR object")
}
updated, err := rm.sdkUpdate(ctx, desired, latest, diffReporter)
if err != nil {
return rm.onError(latest, err)
}
return rm.onSuccess(updated)
}
// Delete attempts to destroy the supplied AWSResource in the backend AWS
// service API.
func (rm *resourceManager) Delete(
ctx context.Context,
res acktypes.AWSResource,
) error {
r := rm.concreteResource(res)
if r.ko == nil {
// Should never happen... if it does, it's buggy code.
panic("resource manager's Update() method received resource with nil CR object")
}
return rm.sdkDelete(ctx, r)
}
// ARNFromName returns an AWS Resource Name from a given string name. This
// is useful for constructing ARNs for APIs that require ARNs in their
// GetAttributes operations but all we have (for new CRs at least) is a
// name for the resource
func (rm *resourceManager) ARNFromName(name string) string {
return fmt.Sprintf(
"arn:aws:apigatewayv2:%s:%s:%s",
rm.awsRegion,
rm.awsAccountID,
name,
)
}
// newResourceManager returns a new struct implementing
// acktypes.AWSResourceManager
func newResourceManager(
cfg ackcfg.Config,
log logr.Logger,
metrics *ackmetrics.Metrics,
rr acktypes.AWSResourceReconciler,
sess *session.Session,
id ackv1alpha1.AWSAccountID,
region ackv1alpha1.AWSRegion,
) (*resourceManager, error) {
return &resourceManager{
cfg: cfg,
log: log,
metrics: metrics,
rr: rr,
awsAccountID: id,
awsRegion: region,
sess: sess,
sdkapi: svcsdk.New(sess),
}, nil
}
// onError updates resource conditions and returns updated resource
// it returns nil if no condition is updated.
func (rm *resourceManager) onError(
r *resource,
err error,
) (acktypes.AWSResource, error) {
r1, updated := rm.updateConditions(r, err)
if !updated {
return r, err
}
for _, condition := range r1.Conditions() {
if condition.Type == ackv1alpha1.ConditionTypeTerminal &&
condition.Status == corev1.ConditionTrue {
// resource is in Terminal condition
// return Terminal error
return r1, ackerr.Terminal
}
}
return r1, err
}
// onSuccess updates resource conditions and returns updated resource
// it returns the supplied resource if no condition is updated.
func (rm *resourceManager) onSuccess(
r *resource,
) (acktypes.AWSResource, error) {
r1, updated := rm.updateConditions(r, nil)
if !updated {
return r, nil
}
return r1, nil
} | import (
"context" |
borrowck-init-in-called-fn-expr.rs | fn main() {
let j = || -> isize {
let i: isize; | j();
} | i //~ ERROR use of possibly uninitialized variable: `i`
}; |
bwa_index.py | """Create genome index for BWA aligner."""
import shutil
from pathlib import Path
from plumbum import TEE
from resolwe.process import Cmd, DataField, DirField, FileField, Process, StringField
class BWAIndex(Process):
"""Create BWA genome index."""
slug = "bwa-index"
process_type = "data:index:bwa"
name = "BWA genome index"
requirements = {
"expression-engine": "jinja",
"executor": {
"docker": {"image": "resolwebio/rnaseq:5.9.0"},
},
"resources": {
"cores": 1,
"memory": 16384,
},
}
category = "Genome index"
data_name = '{{ ref_seq.fasta.file|basename|default("?") }}'
version = "1.0.2"
class Input:
"""Input fields for BWAIndex."""
ref_seq = DataField(
"seq:nucleotide", label="Reference sequence (nucleotide FASTA)"
)
class | :
"""Output fields to process BWAIndex."""
index = DirField(label="BWA index")
fastagz = FileField(label="FASTA file (compressed)")
fasta = FileField(label="FASTA file")
fai = FileField(label="FASTA file index")
species = StringField(label="Species")
build = StringField(label="Build")
def run(self, inputs, outputs):
"""Run analysis."""
basename = Path(inputs.ref_seq.output.fasta.path).name
assert basename.endswith(".fasta")
name = basename[:-6]
index_dir = Path("BWA_index")
index_dir.mkdir()
shutil.copy(Path(inputs.ref_seq.output.fasta.path), Path.cwd())
shutil.copy(Path(inputs.ref_seq.output.fastagz.path), Path.cwd())
shutil.copy(Path(inputs.ref_seq.output.fai.path), Path.cwd())
args = [
"-p",
index_dir / f"{name}.fasta",
inputs.ref_seq.output.fasta.path,
]
return_code, _, _ = Cmd["bwa"]["index"][args] & TEE(retcode=None)
if return_code:
self.error("Error occurred while preparing the BWA index.")
outputs.index = index_dir.name
outputs.fasta = f"{name}.fasta"
outputs.fastagz = f"{name}.fasta.gz"
outputs.fai = f"{name}.fasta.fai"
outputs.species = inputs.ref_seq.output.species
outputs.build = inputs.ref_seq.output.build
| Output |
TodoList.tsx |
import { TodoListProps } from './TodoList.model';
const TodoList: FunctionComponent<TodoListProps> = ({ todos }) => (
<List>
{
todos.map(({ id, text }) => <ListItem key={id}>{text}</ListItem>)
}
</List>
);
export default TodoList; | import React, { FunctionComponent } from 'react';
import { List, ListItem } from '@material-ui/core'; |
|
HeaderOptionsItem.tsx | export interface HeaderOptionsItemProps {
Icon: (props: React.ComponentProps<"svg">) => JSX.Element;
IconClass?: string;
count?: number;
onClick?: () => void;
} | count = 0,
onClick,
}) => {
return (
<div className="relative hidden md:inline-flex" onClick={onClick}>
<Icon
className={`h-6 cursor-pointer hover:scale-125 transition-all duration-150 ease-out
${IconClass}`}
/>
{count > 0 && (
<div
role="count"
className="absolute -top-2 -right-2 text-xs w-5 h-5
bg-red-500 rounded-full flex items-center justify-center
animate-pulse text-white"
>
{count}
</div>
)}
</div>
);
};
export default HeaderOptionsItem; |
const HeaderOptionsItem: React.FC<HeaderOptionsItemProps> = ({
Icon,
IconClass, |
update.py | import bpy
import math
import mathutils
from os import listdir
from os.path import isfile, join
from . raytracer import sensor_position_for_distance
from . import calc
from . import create
from . import data
# ------------------------------------------------------------------------
# Helper functions
# ------------------------------------------------------------------------
# scans the lens folder for csv files containing lens data. The files are then listed in the objective list selector.
def find_items(self, context):
# check if list was already created
if (not data.objective_list_created):
# get all files in the lenses dir
lensfiles = [f for f in listdir(data.lens_directory) if isfile(join(data.lens_directory, f))]
lensfiles.sort()
result = ()
counter = 0
for lensfile in lensfiles:
# check if file ends with .csv
file_ending = lensfile[-3:]
if file_ending == "csv":
# find "_" which separates lens name and author/company name
separator = lensfile.find("_")
# add objective entry to list
result = result + (('OBJECTIVE_'+str(counter),lensfile[:separator],lensfile),)
counter = counter + 1
data.objective_list_created = True
data.objective_list = result
return data.objective_list
# ------------------------------------------------------------------------
# Update functions
# ------------------------------------------------------------------------
def objective_scale(self, context):
return
def lens_creation_method(self,context):
data.lens_creation_method = bpy.data.scenes[0].camera_generator.prop_lens_creation_method
def sensor(self, context):
cg = bpy.data.scenes[0].camera_generator
# rescale diffusor plane
if 'Diffusor Plane' in bpy.data.objects:
bpy.data.objects['Diffusor Plane'].scale[1] = cg.prop_sensor_width / 1000.0
bpy.data.objects['Diffusor Plane'].scale[2] = cg.prop_sensor_height / 1000.0
# adjust render resolution assuming square pixels
bpy.data.scenes[0].render.resolution_x = cg.prop_sensor_width / cg.prop_pixel_size
bpy.data.scenes[0].render.resolution_y = cg.prop_sensor_height / cg.prop_pixel_size
# rescale orthographic camera
if 'Orthographic Camera' in bpy.data.objects:
bpy.data.cameras['Orthographic Camera'].ortho_scale = max(cg.prop_sensor_width, cg.prop_sensor_height) / 1000.0
# rescale MLA to sensor size
if 'Two Plane Model' in bpy.data.objects:
bpy.data.objects['Two Plane Model'].scale[1] = cg.prop_sensor_width / (1000.0 * bpy.data.objects['Two Plane Model'].dimensions[1])
bpy.data.objects['Two Plane Model'].scale[2] = cg.prop_sensor_height / (1000.0 * bpy.data.objects['Two Plane Model'].dimensions[2])
temp_object = bpy.context.active_object
bpy.context.active_object.select_set(False)
bpy.data.objects['Two Plane Model'].select_set(True)
bpy.ops.object.transform_apply(location = False, scale = True, rotation = False)
bpy.data.objects['Two Plane Model'].select_set(False)
temp_object.select_set(True)
if 'MLA Hex Material' in bpy.data.materials:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['MLA Width in mm'].outputs['Value'].default_value = cg.prop_sensor_width
bpy.data.materials['MLA Hex Material'].node_tree.nodes['MLA Height in mm'].outputs['Value'].default_value = cg.prop_sensor_height
if 'MLA Rect Material' in bpy.data.materials:
bpy.data.materials['MLA Rect Material'].node_tree.nodes['MLA Width in mm'].outputs['Value'].default_value = cg.prop_sensor_width
bpy.data.materials['MLA Rect Material'].node_tree.nodes['MLA Height in mm'].outputs['Value'].default_value = cg.prop_sensor_height
def sensor_width(self, context):
sensor(self,context)
def sensor_height(self, context):
sensor(self,context)
def pixel_size(self, context):
sensor(self, context)
def sensor_mainlens_distance(self, context):
cg = bpy.data.scenes[0].camera_generator
# move sensor
if 'Sensor' in bpy.data.objects:
bpy.data.objects['Sensor'].location[0] = cg.prop_sensor_mainlens_distance / 1000.0
# move MLA
if 'MLA' in bpy.data.objects:
bpy.data.objects['MLA'].location[0] = cg.prop_sensor_mainlens_distance / 1000.0 - cg.prop_mla_sensor_dist / 1000.0
def aperture_blades(self, context):
if 'Aperture Plane' in bpy.data.objects:
create.aperture()
def aperture_size(self, context):
if 'Opening' in bpy.data.objects:
cg = bpy.data.scenes[0].camera_generator
bpy.data.objects['Opening'].scale[1] = cg.prop_aperture_size / 1000.0
bpy.data.objects['Opening'].scale[2] = cg.prop_aperture_size / 1000.0
data.semi_aperture = cg.prop_aperture_size / 2000.0
def aperture_angle(self, context):
if 'Opening' in bpy.data.objects:
bpy.data.objects['Opening'].rotation_euler[0] = bpy.data.scenes[0].camera_generator.prop_aperture_angle/180.0*math.pi
def wavelength(self,context):
if data.glass_data_known == False:
# reset wavelength since not all glass materials are known
if abs(bpy.data.scenes[0].camera_generator.prop_wavelength - 587.6) > 0.01:
bpy.data.scenes[0].camera_generator.prop_wavelength = 587.6
return
# check whether objective is available
if len(data.objective) == 0:
return
else:
wavelength_um = bpy.data.scenes[0].camera_generator.prop_wavelength/1000.0
iors = []
for lens in data.objective:
if lens['material'] == 'air' or lens['material'] == 'Air':
iors.append(1.0)
else:
new_ior = calc.ior(lens['material'], wavelength_um)
if new_ior == None:
iors.clear()
break
else:
iors.append(new_ior)
if len(iors) > 0:
counter = 0
for lens in data.objective:
lens['ior_wavelength'] = iors[counter]
counter = counter + 1
for i in range(len(data.objective)-1, 0, -1):
data.objective[i]['ior_ratio'] = data.objective[i-1]['ior_wavelength']/data.objective[i]['ior_wavelength']
data.objective[0]['ior_ratio'] = 1.0/data.objective[0]['ior_wavelength']
for lens in data.objective:
for object in bpy.data.objects:
if object.name == lens['name']:
bpy.data.materials[object.material_slots[0].name].node_tree.nodes['IOR'].outputs[0].default_value = lens['ior_ratio']
def fresnel_reflection_enabled(self,context):
# check whether objective is available
if (len(data.objective) == 0) or (not self.prop_fresnel_transmission_enabled):
return
else:
for lens in data.objective:
for object in bpy.data.objects:
if object.name == lens['name']:
material = bpy.data.materials[object.material_slots[0].name]
if self.prop_fresnel_reflection_enabled:
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[0]=1
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[1]=1
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[2]=1
else:
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[0]=0
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[1]=0
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[2]=0
def fresnel_transmission_enabled(self,context):
# check whether objective is available
if len(data.objective) == 0:
return
else:
for lens in data.objective:
for object in bpy.data.objects:
if object.name == lens['name']:
material = bpy.data.materials[object.material_slots[0].name]
if self.prop_fresnel_transmission_enabled:
material.node_tree.links.new(material.node_tree.nodes['Mix Shader'].outputs[0],material.node_tree.nodes['Material Output'].inputs[0])
else:
material.node_tree.links.new(material.node_tree.nodes['Refraction BSDF'].outputs[0],material.node_tree.nodes['Material Output'].inputs[0])
fresnel_reflection_enabled(self,context)
def mla_enabled(self, context):
hide = not bpy.data.scenes[0].camera_generator.prop_mla_enabled
if 'Two Plane Model' in bpy.data.objects:
bpy.data.objects['Two Plane Model'].hide_render = hide
bpy.data.objects['Two Plane Model'].hide_viewport = hide
if 'MLA' in bpy.data.objects:
bpy.data.objects['MLA'].hide_render = hide
bpy.data.objects['MLA'].hide_viewport = hide
data.use_mla = not hide
if data.use_mla:
sensor(self, context)
def microlens_diam(self, context):
if 'MLA Hex Material' in bpy.data.materials:
# set microlens size
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Microlens Diameter in um'].outputs['Value'].default_value = bpy.data.scenes[0].camera_generator.prop_microlens_diam
bpy.data.materials['MLA Rect Material'].node_tree.nodes['Microlens Diameter in um'].outputs['Value'].default_value = bpy.data.scenes[0].camera_generator.prop_microlens_diam
def mla_sensor_dist(self, context):
if 'MLA' in bpy.data.objects:
bpy.data.objects['MLA'].location[0] = bpy.data.objects['Sensor'].location[0] - bpy.data.scenes[0].camera_generator.prop_mla_sensor_dist / 1000.0
def ml_type_1_f(self, context):
if 'MLA Hex Material' in bpy.data.materials:
cg = bpy.data.scenes[0].camera_generator
# get currently active MLA type
is_hex_mla = (cg.prop_mla_type == 'HEX')
if is_hex_mla:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 1 f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
if not cg.prop_three_ml_types:
cg.prop_ml_type_2_f = cg.prop_ml_type_1_f
cg.prop_ml_type_3_f = cg.prop_ml_type_1_f
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 2 f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 3 f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
else:
bpy.data.materials['MLA Rect Material'].node_tree.nodes['Microlens f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
cg.prop_ml_type_2_f = cg.prop_ml_type_1_f
cg.prop_ml_type_3_f = cg.prop_ml_type_1_f
def ml_type_2_f(self, context):
if 'MLA Hex Material' in bpy.data.materials:
# get currently active MLA type
cg = bpy.data.scenes[0].camera_generator
is_hex_mla = (cg.prop_mla_type == 'HEX')
if is_hex_mla: | else:
cg.prop_ml_type_2_f = cg.prop_ml_type_1_f
def ml_type_3_f(self, context):
if 'MLA Hex Material' in bpy.data.materials:
# get currently active MLA type
cg = bpy.data.scenes[0].camera_generator
is_hex_mla = (cg.prop_mla_type == 'HEX')
if is_hex_mla:
if cg.prop_three_ml_types:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 3 f'].outputs['Value'].default_value = cg.prop_ml_type_3_f
else:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 3 f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
else:
cg.prop_ml_type_3_f = cg.prop_ml_type_1_f
def three_ml_types(self, context):
cg = bpy.data.scenes[0].camera_generator
if not cg.prop_three_ml_types:
cg.prop_ml_type_2_f = cg.prop_ml_type_1_f
cg.prop_ml_type_3_f = cg.prop_ml_type_1_f
else:
if cg.prop_mla_type == 'RECT':
cg.prop_three_ml_types = False
def mla_type(self, context):
cg = bpy.data.scenes[0].camera_generator
# get currently active MLA type
is_hex_mla = (cg.prop_mla_type == 'HEX')
# set materials
if is_hex_mla:
if 'Two Plane Model' in bpy.data.objects:
bpy.data.objects['Two Plane Model'].data.materials[0] = bpy.data.materials['MLA Hex Material']
else:
if 'Two Plane Model' in bpy.data.objects:
bpy.data.objects['Two Plane Model'].data.materials[0] = bpy.data.materials['MLA Rect Material']
ml_type_1_f(self,context)
cg.prop_three_ml_types = False
three_ml_types(self,context)
def focus_distance(self, context):
if 'MLA' in bpy.data.objects:
cg = bpy.data.scenes[0].camera_generator
# calculate the new sensor distance
sensor_position = sensor_position_for_distance(cg.prop_focus_distance / 100.0)
if sensor_position != -1.0:
cg.prop_sensor_mainlens_distance = sensor_position * 1000.0
sensor_mainlens_distance(self, context)
# set the calibration pattern to new distance
if 'Calibration Pattern' in bpy.data.objects:
calibration_pattern = bpy.data.objects['Calibration Pattern']
translation = mathutils.Vector((-bpy.data.scenes[0].camera_generator.prop_focus_distance / 100.0, 0.0, 0.0))
translation.rotate(calibration_pattern.rotation_euler)
calibration_pattern.location = translation | if cg.prop_three_ml_types:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 2 f'].outputs['Value'].default_value = cg.prop_ml_type_2_f
else:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 2 f'].outputs['Value'].default_value = cg.prop_ml_type_1_f |
aliddns.py | from aliyunsdkcore.client import AcsClient
from aliyunsdkcore.acs_exception.exceptions import ClientException
from aliyunsdkcore.acs_exception.exceptions import ServerException
from aliyunsdkalidns.request.v20150109.DescribeSubDomainRecordsRequest import DescribeSubDomainRecordsRequest
from aliyunsdkalidns.request.v20150109.DescribeDomainRecordsRequest import DescribeDomainRecordsRequest
import requests
from urllib.request import urlopen
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--access-key-id')
parser.add_argument('--access-key-secret')
parser.add_argument('--domain-name')
parser.add_argument('--host')
args = parser.parse_args()
print(args)
accessKeyId = args.access_key_id
accessSecret = args.access_key_secret
domain = args.domain_name
ipv4_flag = 1
name_ipv4 = args.host
ipv6_flag = 0 # 是否开启ipv6 ddns解析,1为开启,0为关闭
name_ipv6 = "ipv6.test" # 要进行ipv6 ddns解析的子域名
client = AcsClient(accessKeyId, accessSecret, 'cn-hangzhou')
def update(RecordId, RR, Type, Value): # 修改域名解析记录
from aliyunsdkalidns.request.v20150109.UpdateDomainRecordRequest import UpdateDomainRecordRequest
request = UpdateDomainRecordRequest()
request.set_accept_format('json')
request.set_RecordId(RecordId)
request.set_RR(RR)
request.set_Type(Type)
request.set_Value(Value)
response = client.do_action_with_exception(request)
def add(DomainName, RR, Type, Value): # 添加新的域名解析记录
from aliyunsdkalidns.request.v20150109.AddDomainRecordRequest import AddDomainRe | t.set_accept_format('json')
request.set_DomainName(domain)
request.set_SubDomain(name_ipv4 + '.' + domain)
response = client.do_action_with_exception(request) # 获取域名解析记录列表
domain_list = json.loads(response) # 将返回的JSON数据转化为Python能识别的
ip = urlopen('https://api-ipv4.ip.sb/ip').read() # 使用 IP.SB 的接口获取ipv4地址
ipv4 = str(ip, encoding='utf-8')
print("当前 IPv4 地址:%s" % ipv4)
if domain_list['TotalCount'] == 0:
add(domain, name_ipv4, "A", ipv4)
print("新建域名解析成功")
elif domain_list['TotalCount'] == 1:
if domain_list['DomainRecords']['Record'][0]['Value'].strip() != ipv4.strip():
update(domain_list['DomainRecords']['Record'][0]['RecordId'], name_ipv4, "A", ipv4)
print("修改域名解析成功")
else: # https://blog.zeruns.tech
print("IPv4地址没变")
elif domain_list['TotalCount'] > 1:
from aliyunsdkalidns.request.v20150109.DeleteSubDomainRecordsRequest import DeleteSubDomainRecordsRequest
request = DeleteSubDomainRecordsRequest()
request.set_accept_format('json')
request.set_DomainName(domain) # https://blog.zeruns.tech
request.set_RR(name_ipv4)
response = client.do_action_with_exception(request)
add(domain, name_ipv4, "A", ipv4)
print("修改域名解析成功")
if ipv6_flag == 1:
request = DescribeSubDomainRecordsRequest()
request.set_accept_format('json')
request.set_DomainName(domain)
request.set_SubDomain(name_ipv6 + '.' + domain)
response = client.do_action_with_exception(request) # 获取域名解析记录列表
domain_list = json.loads(response) # 将返回的JSON数据转化为Python能识别的
ip = urlopen('https://api-ipv6.ip.sb/ip').read() # 使用IP.SB的接口获取ipv6地址
ipv6 = str(ip, encoding='utf-8')
print("获取到IPv6地址:%s" % ipv6)
if domain_list['TotalCount'] == 0:
add(domain, name_ipv6, "AAAA", ipv6)
print("新建域名解析成功")
elif domain_list['TotalCount'] == 1:
if domain_list['DomainRecords']['Record'][0]['Value'].strip() != ipv6.strip():
update(domain_list['DomainRecords']['Record'][0]['RecordId'], name_ipv6, "AAAA", ipv6)
print("修改域名解析成功")
else: # https://blog.zeruns.tech
print("IPv6地址没变")
elif domain_list['TotalCount'] > 1:
from aliyunsdkalidns.request.v20150109.DeleteSubDomainRecordsRequest import DeleteSubDomainRecordsRequest
request = DeleteSubDomainRecordsRequest()
request.set_accept_format('json')
request.set_DomainName(domain)
request.set_RR(name_ipv6) # https://blog.zeruns.tech
response = client.do_action_with_exception(request)
add(domain, name_ipv6, "AAAA", ipv6)
print("修改域名解析成功")
| cordRequest
request = AddDomainRecordRequest()
request.set_accept_format('json')
request.set_DomainName(DomainName)
request.set_RR(RR) # https://blog.zeruns.tech
request.set_Type(Type)
request.set_Value(Value)
response = client.do_action_with_exception(request)
if ipv4_flag == 1:
request = DescribeSubDomainRecordsRequest()
reques |
consistency_losses.py | # coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Loss functions that impose RGB and depth motion-consistency across frames."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v1 as tf
from depth_and_motion_learning import resampler
from depth_and_motion_learning import transform_utils
def rgbd_consistency_loss(frame1transformed_depth,
frame1rgb,
frame2depth,
frame2rgb,
validity_mask=None):
"""Computes a loss that penalizes RGBD inconsistencies between frames.
This function computes 3 losses that penalize inconsistencies between two
frames: depth, RGB, and structural similarity. It IS NOT SYMMETRIC with
respect to both frames. In particular, to address occlusions, it only
penalizes depth and RGB inconsistencies at pixels where frame1 is closer to
the camera than frame2 (Why? see https://arxiv.org/abs/1904.04998). Therefore
the intended usage pattern is running it twice - second time with the two
frames swapped.
Args:
frame1transformed_depth: A transform_depth_map.TransformedDepthMap object
representing the depth map of frame 1 after it was motion-transformed to
frame 2, a motion transform that accounts for all camera and object motion
that occurred between frame1 and frame2. The tensors inside
frame1transformed_depth are of shape [B, H, W].
frame1rgb: A tf.Tensor of shape [B, H, W, C] containing the RGB image at
frame1.
frame2depth: A tf.Tensor of shape [B, H, W] containing the depth map at
frame2.
frame2rgb: A tf.Tensor of shape [B, H, W, C] containing the RGB image at
frame2.
validity_mask: a tf.Tensor of a floating point type and a shape of
[B, H, W, 1] containing a validity mask.
Returns:
A dicionary from string to tf.Tensor, with the following entries:
depth_error: A tf scalar, the depth mismatch error between the two frames.
rgb_error: A tf scalar, the rgb mismatch error between the two frames.
ssim_error: A tf scalar, the strictural similarity mismatch error between
the two frames.
depth_proximity_weight: A tf.Tensor of shape [B, H, W], representing a
function that peaks (at 1.0) for pixels where there is depth consistency
between the two frames, and is small otherwise.
frame1_closer_to_camera: A tf.Tensor of shape [B, H, W, 1], a mask that is
1.0 when the depth map of frame 1 has smaller depth than frame 2.
"""
frame2rgbd = tf.concat(
[frame2rgb, tf.expand_dims((frame2depth), -1)], axis=-1) | frame1transformed_depth.pixel_y,
safe=False)
frame2rgb_resampled, frame2depth_resampled = tf.split(
frame2rgbd_resampled, [3, 1], axis=-1)
frame2depth_resampled = tf.squeeze(frame2depth_resampled, axis=-1)
# f1td.depth is the predicted depth at [pixel_y, pixel_x] for frame2. Now we
# generate (by interpolation) the actual depth values for frame2's depth, at
# the same locations, so that we can compare the two depths.
# We penalize inconsistencies between the two frames' depth maps only if the
# transformed depth map (of frame 1) falls closer to the camera than the
# actual depth map (of frame 2). This is intended for avoiding penalizing
# points that become occluded because of the transform.
# So what about depth inconsistencies where frame1's depth map is FARTHER from
# the camera than frame2's? These will be handled when we swap the roles of
# frame 1 and 2 (more in https://arxiv.org/abs/1904.04998).
frame1_closer_to_camera = tf.to_float(
tf.logical_and(
frame1transformed_depth.mask,
tf.less(frame1transformed_depth.depth, frame2depth_resampled)))
frames_l1_diff = tf.abs(frame2depth_resampled - frame1transformed_depth.depth)
if validity_mask is not None:
frames_l1_diff = frames_l1_diff * tf.squeeze(validity_mask, axis=[3])
depth_error = tf.reduce_mean(
tf.math.multiply_no_nan(frames_l1_diff, frame1_closer_to_camera))
frames_rgb_l1_diff = tf.abs(frame2rgb_resampled - frame1rgb)
if validity_mask is not None:
frames_rgb_l1_diff = frames_rgb_l1_diff * validity_mask
rgb_error = tf.math.multiply_no_nan(
frames_rgb_l1_diff, tf.expand_dims(frame1_closer_to_camera, -1))
rgb_error = tf.reduce_mean(rgb_error)
# We generate a weight function that peaks (at 1.0) for pixels where when the
# depth difference is less than its standard deviation across the frame, and
# fall off to zero otherwise. This function is used later for weighing the
# structural similarity loss term. We only want to demand structural
# similarity for surfaces that are close to one another in the two frames.
depth_error_second_moment = _weighted_average(
tf.square(frame2depth_resampled - frame1transformed_depth.depth),
frame1_closer_to_camera) + 1e-4
depth_proximity_weight = tf.math.multiply_no_nan(
depth_error_second_moment /
(tf.square(frame2depth_resampled - frame1transformed_depth.depth) +
depth_error_second_moment), tf.to_float(frame1transformed_depth.mask))
if validity_mask is not None:
depth_proximity_weight = depth_proximity_weight * tf.squeeze(
validity_mask, axis=[3])
# If we don't stop the gradient training won't start. The reason is presumably
# that then the network can push the depths apart instead of seeking RGB
# consistency.
depth_proximity_weight = tf.stop_gradient(depth_proximity_weight)
ssim_error, avg_weight = weighted_ssim(
frame2rgb_resampled,
frame1rgb,
depth_proximity_weight,
c1=float('inf'), # These values of c1 and c2 seemed to work better than
c2=9e-6) # defaults. TODO(gariel): Make them parameters rather
# than hard coded.
ssim_error_mean = tf.reduce_mean(
tf.math.multiply_no_nan(ssim_error, avg_weight))
endpoints = {
'depth_error': depth_error,
'rgb_error': rgb_error,
'ssim_error': ssim_error_mean,
'depth_proximity_weight': depth_proximity_weight,
'frame1_closer_to_camera': frame1_closer_to_camera
}
return endpoints
def motion_field_consistency_loss(frame1transformed_pixelx,
frame1transformed_pixely, mask, rotation1,
translation1, rotation2, translation2):
"""Computes a cycle consistency loss between two motion maps.
Given two rotation and translation maps (of two frames), and a mapping from
one frame to the other, this function assists in imposing that the fields at
frame 1 represent the opposite motion of the ones in frame 2.
In other words: At any given pixel on frame 1, if we apply the translation and
rotation designated at that pixel, we land on some pixel in frame 2, and if we
apply the translation and rotation designated there, we land back at the
original pixel at frame 1.
Args:
frame1transformed_pixelx: A tf.Tensor of shape [B, H, W] representing the
motion-transformed x-location of each pixel in frame 1.
frame1transformed_pixely: A tf.Tensor of shape [B, H, W] representing the
motion-transformed y-location of each pixel in frame 1.
mask: A tf.Tensor of shape [B, H, W, 2] expressing the weight of each pixel
in the calculation of the consistency loss.
rotation1: A tf.Tensor of shape [B, 3] representing rotation angles.
translation1: A tf.Tensor of shape [B, H, W, 3] representing translation
vectors.
rotation2: A tf.Tensor of shape [B, 3] representing rotation angles.
translation2: A tf.Tensor of shape [B, H, W, 3] representing translation
vectors.
Returns:
A dicionary from string to tf.Tensor, with the following entries:
rotation_error: A tf scalar, the rotation consistency error.
translation_error: A tf scalar, the translation consistency error.
"""
translation2resampled = resampler.resampler_with_unstacked_warp(
translation2,
tf.stop_gradient(frame1transformed_pixelx),
tf.stop_gradient(frame1transformed_pixely),
safe=False)
rotation1field = tf.broadcast_to(
_expand_dims_twice(rotation1, -2), tf.shape(translation1))
rotation2field = tf.broadcast_to(
_expand_dims_twice(rotation2, -2), tf.shape(translation2))
rotation1matrix = transform_utils.matrix_from_angles(rotation1field)
rotation2matrix = transform_utils.matrix_from_angles(rotation2field)
rot_unit, trans_zero = transform_utils.combine(rotation2matrix,
translation2resampled,
rotation1matrix, translation1)
eye = tf.eye(3, batch_shape=tf.shape(rot_unit)[:-2])
# We normalize the product of rotations by the product of their norms, to make
# the loss agnostic of their magnitudes, only wanting them to be opposite in
# directions. Otherwise the loss has a tendency to drive the rotations to
# zero.
rot_error = tf.reduce_mean(tf.square(rot_unit - eye), axis=(3, 4))
rot1_scale = tf.reduce_mean(tf.square(rotation1matrix - eye), axis=(3, 4))
rot2_scale = tf.reduce_mean(tf.square(rotation2matrix - eye), axis=(3, 4))
rot_error /= (1e-24 + rot1_scale + rot2_scale)
rotation_error = tf.reduce_mean(rot_error)
def norm(x):
return tf.reduce_sum(tf.square(x), axis=-1)
# Here again, we normalize by the magnitudes, for the same reason.
translation_error = tf.reduce_mean(tf.math.multiply_no_nan(
mask, norm(trans_zero) /
(1e-24 + norm(translation1) + norm(translation2resampled))))
return {
'rotation_error': rotation_error,
'translation_error': translation_error
}
def rgbd_and_motion_consistency_loss(frame1transformed_depth,
frame1rgb,
frame2depth,
frame2rgb,
rotation1,
translation1,
rotation2,
translation2,
validity_mask=None):
"""A helper that bundles rgbd and motion consistency losses together."""
endpoints = rgbd_consistency_loss(
frame1transformed_depth,
frame1rgb,
frame2depth,
frame2rgb,
validity_mask=validity_mask)
# We calculate the loss only for when frame1transformed_depth is closer to the
# camera than frame2 (occlusion-awareness). See explanation in
# rgbd_consistency_loss above.
mask = endpoints['frame1_closer_to_camera']
if validity_mask is not None:
mask *= tf.squeeze(validity_mask, axis=3)
endpoints.update(
motion_field_consistency_loss(frame1transformed_depth.pixel_x,
frame1transformed_depth.pixel_y, mask,
rotation1, translation1, rotation2,
translation2))
return endpoints
def weighted_ssim(x, y, weight, c1=0.01**2, c2=0.03**2, weight_epsilon=0.01):
"""Computes a weighted structured image similarity measure.
See https://en.wikipedia.org/wiki/Structural_similarity#Algorithm. The only
difference here is that not all pixels are weighted equally when calculating
the moments - they are weighted by a weight function.
Args:
x: A tf.Tensor representing a batch of images, of shape [B, H, W, C].
y: A tf.Tensor representing a batch of images, of shape [B, H, W, C].
weight: A tf.Tensor of shape [B, H, W], representing the weight of each
pixel in both images when we come to calculate moments (means and
correlations).
c1: A floating point number, regularizes division by zero of the means.
c2: A floating point number, regularizes division by zero of the second
moments.
weight_epsilon: A floating point number, used to regularize division by the
weight.
Returns:
A tuple of two tf.Tensors. First, of shape [B, H-2, W-2, C], is scalar
similarity loss oer pixel per channel, and the second, of shape
[B, H-2. W-2, 1], is the average pooled `weight`. It is needed so that we
know how much to weigh each pixel in the first tensor. For example, if
`'weight` was very small in some area of the images, the first tensor will
still assign a loss to these pixels, but we shouldn't take the result too
seriously.
"""
if c1 == float('inf') and c2 == float('inf'):
raise ValueError('Both c1 and c2 are infinite, SSIM loss is zero. This is '
'likely unintended.')
weight = tf.expand_dims(weight, -1)
average_pooled_weight = _avg_pool3x3(weight)
weight_plus_epsilon = weight + weight_epsilon
inverse_average_pooled_weight = 1.0 / (average_pooled_weight + weight_epsilon)
def weighted_avg_pool3x3(z):
wighted_avg = _avg_pool3x3(z * weight_plus_epsilon)
return wighted_avg * inverse_average_pooled_weight
mu_x = weighted_avg_pool3x3(x)
mu_y = weighted_avg_pool3x3(y)
sigma_x = weighted_avg_pool3x3(x**2) - mu_x**2
sigma_y = weighted_avg_pool3x3(y**2) - mu_y**2
sigma_xy = weighted_avg_pool3x3(x * y) - mu_x * mu_y
if c1 == float('inf'):
ssim_n = (2 * sigma_xy + c2)
ssim_d = (sigma_x + sigma_y + c2)
elif c2 == float('inf'):
ssim_n = 2 * mu_x * mu_y + c1
ssim_d = mu_x**2 + mu_y**2 + c1
else:
ssim_n = (2 * mu_x * mu_y + c1) * (2 * sigma_xy + c2)
ssim_d = (mu_x**2 + mu_y**2 + c1) * (sigma_x + sigma_y + c2)
result = ssim_n / ssim_d
return tf.clip_by_value((1 - result) / 2, 0, 1), average_pooled_weight
def _avg_pool3x3(x):
return tf.nn.avg_pool(x, [1, 3, 3, 1], [1, 1, 1, 1], 'VALID')
def _weighted_average(x, w, epsilon=1.0):
weighted_sum = tf.reduce_sum(x * w, axis=(1, 2), keepdims=True)
sum_of_weights = tf.reduce_sum(w, axis=(1, 2), keepdims=True)
return weighted_sum / (sum_of_weights + epsilon)
def _expand_dims_twice(x, dim):
return tf.expand_dims(tf.expand_dims(x, dim), dim) | frame2rgbd_resampled = resampler.resampler_with_unstacked_warp(
frame2rgbd,
frame1transformed_depth.pixel_x, |
fm_dashboard.py | # -*- coding: utf-8 -*-
# Copyright (c) 2020, 9T9IT and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class FMDashboard(Document):
def make_outstanding_balances(self):
"""
Make outstanding balances for display
:return:
"""
self.outstanding_balances = None
outstanding_balances = _get_outstanding_balances(_get_properties(self.real_estate_property))
for outstanding_balance in outstanding_balances:
self.append('outstanding_balances', {
'property_name': outstanding_balance.get('property_name'),
'sales_invoice': outstanding_balance.get('sales_invoice'),
'outstanding_amount': outstanding_balance.get('outstanding_amount') | return list(map(lambda x: x['name'], frappe.get_all('Property', {'property_location': real_estate_property})))
def _get_outstanding_balances(filter_properties):
def make_data(balance):
property_name = _get_property_name(balance.get('pm_tenant_renting'))
return {
'property_name': property_name,
'sales_invoice': balance.get('name'),
'outstanding_amount': balance.get('outstanding_amount')
}
outstanding = frappe.db.sql("""
SELECT
si.name,
si.pm_tenant_renting,
si.outstanding_amount,
tr.property
FROM `tabSales Invoice` si
LEFT JOIN `tabTenant Renting` tr ON si.pm_tenant_renting = tr.name
WHERE si.docstatus = 1
AND si.outstanding_amount > 0
AND si.pm_tenant_renting != ''
""", as_dict=True)
outstanding = filter(lambda x: x['property'] in filter_properties, outstanding)
return list(map(make_data, outstanding))
def _get_property_name(tenant_renting):
data = frappe.db.sql("""
SELECT p.title
FROM `tabTenant Renting` tr
JOIN `tabProperty` p
ON tr.property = p.name
WHERE tr.name = %s
""", tenant_renting, as_dict=True)
return data[0]['title'] if data else None | })
def _get_properties(real_estate_property): |
cri.go | // Unless explicitly stated otherwise all files in this repository are licensed
// under the Apache License Version 2.0.
// This product includes software developed at Datadog (https://www.datadoghq.com/).
// Copyright 2016-2019 Datadog, Inc.
// +build cri
package containers
import (
yaml "gopkg.in/yaml.v2"
pb "k8s.io/kubernetes/pkg/kubelet/apis/cri/runtime/v1alpha2"
"github.com/DataDog/datadog-agent/pkg/aggregator"
"github.com/DataDog/datadog-agent/pkg/autodiscovery/integration"
"github.com/DataDog/datadog-agent/pkg/collector/check"
core "github.com/DataDog/datadog-agent/pkg/collector/corechecks"
"github.com/DataDog/datadog-agent/pkg/tagger"
"github.com/DataDog/datadog-agent/pkg/tagger/collectors"
"github.com/DataDog/datadog-agent/pkg/util/containers"
"github.com/DataDog/datadog-agent/pkg/util/containers/cri"
"github.com/DataDog/datadog-agent/pkg/util/log" | )
// CRIConfig holds the config of the check
type CRIConfig struct {
Tags []string `yaml:"tags"`
CollectDisk bool `yaml:"collect_disk"`
}
// CRICheck grabs CRI metrics
type CRICheck struct {
core.CheckBase
instance *CRIConfig
}
func init() {
core.RegisterCheck("cri", CRIFactory)
}
// CRIFactory is exported for integration testing
func CRIFactory() check.Check {
return &CRICheck{
CheckBase: core.NewCheckBase(criCheckName),
instance: &CRIConfig{},
}
}
// Parse parses the CRICheck config and set default values
func (c *CRIConfig) Parse(data []byte) error {
// default values
c.CollectDisk = false
if err := yaml.Unmarshal(data, c); err != nil {
return err
}
return nil
}
// Configure parses the check configuration and init the check
func (c *CRICheck) Configure(config, initConfig integration.Data) error {
err := c.CommonConfigure(config)
if err != nil {
return err
}
return c.instance.Parse(config)
}
// Run executes the check
func (c *CRICheck) Run() error {
sender, err := aggregator.GetSender(c.ID())
if err != nil {
return err
}
util, err := cri.GetUtil()
if err != nil {
c.Warnf("Error initialising check: %s", err)
return err
}
containerStats, err := util.ListContainerStats()
if err != nil {
c.Warnf("Cannot get containers from the CRI: %s", err)
return err
}
c.processContainerStats(sender, util.Runtime, containerStats)
sender.Commit()
return nil
}
// processContainerStats extracts metrics from the protobuf object
func (c *CRICheck) processContainerStats(sender aggregator.Sender, runtime string, containerStats map[string]*pb.ContainerStats) {
for cid, stats := range containerStats {
entityID := containers.BuildEntityName(runtime, cid)
tags, err := tagger.Tag(entityID, collectors.HighCardinality)
if err != nil {
log.Errorf("Could not collect tags for container %s: %s", cid[:12], err)
}
tags = append(tags, "runtime:"+runtime)
tags = append(tags, c.instance.Tags...)
sender.Gauge("cri.mem.rss", float64(stats.GetMemory().GetWorkingSetBytes().GetValue()), "", tags)
// Cumulative CPU usage (sum across all cores) since object creation.
sender.Rate("cri.cpu.usage", float64(stats.GetCpu().GetUsageCoreNanoSeconds().GetValue()), "", tags)
if c.instance.CollectDisk {
sender.Gauge("cri.disk.used", float64(stats.GetWritableLayer().GetUsedBytes().GetValue()), "", tags)
sender.Gauge("cri.disk.inodes", float64(stats.GetWritableLayer().GetInodesUsed().GetValue()), "", tags)
}
}
} | )
const (
criCheckName = "cri" |
slack.py | import json
import requests
from utils import get_secret
from utils import is_pro
def send_slack(text="", channel="test", blocks=None):
|
def slack_state_handler(task, old_state, new_state):
if not new_state.is_finished():
return new_state
failure = new_state.is_failed()
# Prepare message
if failure:
msg = f"*{task.name}:* :x:"
else:
msg = f"*{task.name}:* {task.duration} :heavy_check_mark:"
# Notify result
send_slack(msg, channel="events" if is_pro() else "test")
# In pro notify about failures in general
if failure and is_pro():
send_slack(msg, channel="general")
return new_state
| assert channel in ["test", "events", "general"]
webhook = get_secret(f"SLACK_WEBHOOK_{channel.upper()}")
data = {"text": text}
if blocks:
data["blocks"] = blocks
res = requests.post(
webhook, data=json.dumps(data), headers={"Content-Type": "application/json"}
)
res.raise_for_status() |
base_upgrader.py | """
Upgrade custom's game dir to the latest version.
"""
from utils import compare_version
class | (object):
"""
Upgrade a game dir from the version in [<from_version>, <to_version>) to version
<target_version>.
"""
# Can upgrade the game of version between from_version and to_version.
# min version 0.0.0 (include this version)
from_min_version = (0, 0, 0)
# from max version 0.0.0 (not include this version)
from_max_version = (0, 0, 0)
# Upgrade to the target version. None means the latest version.
target_version = None
def upgrade_game(self, game_dir, game_template, muddery_lib):
"""
Upgrade a game.
Args:
game_dir: (string) the game dir to be upgraded.
game_template: (string) the game template used to upgrade the game dir.
muddery_lib: (string) muddery's dir
"""
pass
def upgrade_data(self, data_path, game_template, muddery_lib):
"""
Upgrade game data.
Args:
data_path: (string) the data path to be upgraded.
game_template: (string) the game template used to upgrade the game dir.
muddery_lib: (string) muddery's dir
"""
pass
def can_upgrade(self, game_ver):
"""
game_version: (list)version numbers.
"""
# The game version should be equal or bigger than from_min_version.
if compare_version(game_ver, self.from_min_version) == -1:
return False
# The game version should be smaller than from_max_version.
if compare_version(game_ver, self.from_max_version) != -1:
return False
return True
| BaseUpgrader |
registrations.py | import logging
import datetime
import urlparse
from django.core.exceptions import ValidationError
from django.db import models
from django.utils import timezone
from framework.auth import Auth
from framework.exceptions import PermissionsError
from osf.utils.fields import NonNaiveDateTimeField
from osf.exceptions import NodeStateError
from website.util import api_v2_url
from website import settings
from website.archiver import ARCHIVER_INITIATED
from osf.models import (
OSFUser, RegistrationSchema,
Retraction, Embargo, DraftRegistrationApproval,
EmbargoTerminationApproval,
)
from osf.models.archive import ArchiveJob
from osf.models.base import BaseModel, ObjectIDMixin
from osf.models.node import AbstractNode
from osf.models.nodelog import NodeLog
from osf.models.provider import RegistrationProvider
from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField
logger = logging.getLogger(__name__)
class Registration(AbstractNode):
WRITABLE_WHITELIST = [
'article_doi',
'description',
'is_public',
'node_license',
'category',
]
provider = models.ForeignKey('RegistrationProvider', related_name='registrations', null=True)
registered_date = NonNaiveDateTimeField(db_index=True, null=True, blank=True)
registered_user = models.ForeignKey(OSFUser,
related_name='related_to',
on_delete=models.SET_NULL,
null=True, blank=True)
registered_schema = models.ManyToManyField(RegistrationSchema)
registered_meta = DateTimeAwareJSONField(default=dict, blank=True)
registered_from = models.ForeignKey('self',
related_name='registrations',
on_delete=models.SET_NULL,
null=True, blank=True)
# Sanctions
registration_approval = models.ForeignKey('RegistrationApproval',
related_name='registrations',
null=True, blank=True,
on_delete=models.SET_NULL)
retraction = models.ForeignKey('Retraction',
related_name='registrations',
null=True, blank=True,
on_delete=models.SET_NULL)
embargo = models.ForeignKey('Embargo',
related_name='registrations',
null=True, blank=True,
on_delete=models.SET_NULL)
embargo_termination_approval = models.ForeignKey('EmbargoTerminationApproval',
related_name='registrations',
null=True, blank=True,
on_delete=models.SET_NULL)
files_count = models.PositiveIntegerField(blank=True, null=True)
@staticmethod
def find_failed_registrations():
expired_if_before = timezone.now() - settings.ARCHIVE_TIMEOUT_TIMEDELTA
node_id_list = ArchiveJob.objects.filter(sent=False, datetime_initiated__lt=expired_if_before, status=ARCHIVER_INITIATED).values_list('dst_node', flat=True)
root_nodes_id = AbstractNode.objects.filter(id__in=node_id_list).values_list('root', flat=True).distinct()
stuck_regs = AbstractNode.objects.filter(id__in=root_nodes_id, is_deleted=False)
return stuck_regs
@property
def registered_schema_id(self):
if self.registered_schema.exists():
return self.registered_schema.first()._id
return None
@property
def is_registration(self):
"""For v1 compat."""
return True
@property
def is_stuck_registration(self):
return self in self.find_failed_registrations()
@property
def is_collection(self):
"""For v1 compat."""
return False
@property
def archive_job(self):
return self.archive_jobs.first() if self.archive_jobs.count() else None
@property
def sanction(self):
root = self._dirty_root
sanction = (
root.embargo_termination_approval or
root.retraction or
root.embargo or
root.registration_approval
)
if sanction:
return sanction
else:
return None
@property
def is_registration_approved(self):
root = self._dirty_root
if root.registration_approval is None:
return False
return root.registration_approval.is_approved
@property
def is_pending_embargo(self):
root = self._dirty_root
if root.embargo is None:
return False
return root.embargo.is_pending_approval
@property
def is_pending_embargo_for_existing_registration(self):
""" Returns True if Node has an Embargo pending approval for an
existing registrations. This is used specifically to ensure
registrations pre-dating the Embargo feature do not get deleted if
their respective Embargo request is rejected.
"""
root = self._dirty_root
if root.embargo is None:
return False
return root.embargo.pending_registration
@property
def is_retracted(self):
root = self._dirty_root
if root.retraction is None:
return False
return root.retraction.is_approved
@property
def is_pending_registration(self):
root = self._dirty_root
if root.registration_approval is None:
return False
return root.registration_approval.is_pending_approval
@property
def is_pending_retraction(self):
root = self._dirty_root
if root.retraction is None:
return False
return root.retraction.is_pending_approval
@property
def is_pending_embargo_termination(self):
root = self._dirty_root
if root.embargo_termination_approval is None:
return False
return root.embargo_termination_approval.is_pending_approval
@property
def is_embargoed(self):
"""A Node is embargoed if:
- it has an associated Embargo record
- that record has been approved
- the node is not public (embargo not yet lifted)
"""
root = self._dirty_root
if root.is_public or root.embargo is None:
return False
return root.embargo.is_approved
@property
def embargo_end_date(self):
root = self._dirty_root
if root.embargo is None:
return False
return root.embargo.embargo_end_date
@property
def archiving(self):
job = self.archive_job
return job and not job.done and not job.archive_tree_finished()
@property
def _dirty_root(self):
"""Equivalent to `self.root`, but don't let Django fetch a clean copy
when `self == self.root`. Use when it's important to reflect unsaved
state rather than database state.
"""
if self.id == self.root_id:
return self
return self.root
def date_withdrawn(self):
return getattr(self.root.retraction, 'date_retracted', None)
@property
def withdrawal_justification(self):
return getattr(self.root.retraction, 'justification', None)
def _initiate_embargo(self, user, end_date, for_existing_registration=False,
notify_initiator_on_complete=False):
"""Initiates the retraction process for a registration
:param user: User who initiated the retraction
:param end_date: Date when the registration should be made public
"""
end_date_midnight = datetime.datetime.combine(
end_date,
datetime.datetime.min.time()
).replace(tzinfo=end_date.tzinfo)
self.embargo = Embargo.objects.create(
initiated_by=user,
end_date=end_date_midnight,
for_existing_registration=for_existing_registration,
notify_initiator_on_complete=notify_initiator_on_complete
)
self.save() # Set foreign field reference Node.embargo
admins = self.get_admin_contributors_recursive(unique_users=True)
for (admin, node) in admins:
self.embargo.add_authorizer(admin, node)
self.embargo.save() # Save embargo's approval_state
return self.embargo
def embargo_registration(self, user, end_date, for_existing_registration=False,
notify_initiator_on_complete=False):
"""Enter registration into an embargo period at end of which, it will
be made public
:param user: User initiating the embargo
:param end_date: Date when the registration should be made public
:raises: NodeStateError if Node is not a registration
:raises: PermissionsError if user is not an admin for the Node
:raises: ValidationError if end_date is not within time constraints
"""
if not self.is_admin_contributor(user):
raise PermissionsError('Only admins may embargo a registration')
if not self._is_embargo_date_valid(end_date):
if (end_date - timezone.now()) >= settings.EMBARGO_END_DATE_MIN:
raise ValidationError('Registrations can only be embargoed for up to four years.')
raise ValidationError('Embargo end date must be at least three days in the future.')
embargo = self._initiate_embargo(user, end_date,
for_existing_registration=for_existing_registration,
notify_initiator_on_complete=notify_initiator_on_complete)
self.registered_from.add_log(
action=NodeLog.EMBARGO_INITIATED,
params={
'node': self.registered_from._id,
'registration': self._id,
'embargo_id': embargo._id,
},
auth=Auth(user),
save=True,
)
if self.is_public:
self.set_privacy('private', Auth(user))
def request_embargo_termination(self, auth):
"""Initiates an EmbargoTerminationApproval to lift this Embargoed Registration's
embargo early."""
if not self.is_embargoed:
raise NodeStateError('This node is not under active embargo')
if not self.root == self:
raise NodeStateError('Only the root of an embargoed registration can request termination')
approval = EmbargoTerminationApproval(
initiated_by=auth.user,
embargoed_registration=self,
)
admins = [admin for admin in self.root.get_admin_contributors_recursive(unique_users=True)]
for (admin, node) in admins:
approval.add_authorizer(admin, node=node)
approval.save()
approval.ask(admins)
self.embargo_termination_approval = approval
self.save()
return approval
def terminate_embargo(self, auth):
"""Handles the actual early termination of an Embargoed registration.
Adds a log to the registered_from Node.
"""
if not self.is_embargoed:
raise NodeStateError('This node is not under active embargo')
self.registered_from.add_log(
action=NodeLog.EMBARGO_TERMINATED,
params={
'project': self._id,
'node': self.registered_from._id,
'registration': self._id,
},
auth=None,
save=True
)
self.embargo.mark_as_completed()
for node in self.node_and_primary_descendants():
node.set_privacy(
self.PUBLIC,
auth=None,
log=False,
save=True
)
return True
def _initiate_retraction(self, user, justification=None):
"""Initiates the retraction process for a registration
:param user: User who initiated the retraction
:param justification: Justification, if given, for retraction
"""
self.retraction = Retraction.objects.create(
initiated_by=user,
justification=justification or None, # make empty strings None
state=Retraction.UNAPPROVED
)
self.save()
admins = self.get_admin_contributors_recursive(unique_users=True)
for (admin, node) in admins:
self.retraction.add_authorizer(admin, node)
self.retraction.save() # Save retraction approval state
return self.retraction
def retract_registration(self, user, justification=None, save=True):
"""Retract public registration. Instantiate new Retraction object
and associate it with the respective registration.
"""
if not self.is_public and not (self.embargo_end_date or self.is_pending_embargo):
raise NodeStateError('Only public or embargoed registrations may be withdrawn.')
if self.root_id != self.id:
raise NodeStateError('Withdrawal of non-parent registrations is not permitted.')
retraction = self._initiate_retraction(user, justification)
self.registered_from.add_log(
action=NodeLog.RETRACTION_INITIATED,
params={
'node': self.registered_from._id,
'registration': self._id,
'retraction_id': retraction._id,
},
auth=Auth(user),
)
self.retraction = retraction
if save:
self.save()
return retraction
def copy_unclaimed_records(self):
"""Copies unclaimed_records to unregistered contributors from the registered_from node"""
registered_from_id = self.registered_from._id
for contributor in self.contributors.filter(is_registered=False):
record = contributor.unclaimed_records.get(registered_from_id)
if record:
contributor.unclaimed_records[self._id] = record
contributor.save()
def delete_registration_tree(self, save=False):
logger.debug('Marking registration {} as deleted'.format(self._id))
self.is_deleted = True
for draft_registration in DraftRegistration.objects.filter(registered_node=self):
# Allow draft registration to be submitted
if draft_registration.approval:
draft_registration.approval = None
draft_registration.save()
if not getattr(self.embargo, 'for_existing_registration', False):
self.registered_from = None
if save:
self.save()
self.update_search()
for child in self.nodes_primary:
child.delete_registration_tree(save=save)
def update_files_count(self):
# Updates registration files_count at archival success or
# at the end of forced (manual) archive for restarted (stuck or failed) registrations.
field = AbstractNode._meta.get_field('modified')
field.auto_now = False
self.files_count = self.files.filter(deleted_on__isnull=True).count()
self.save()
field.auto_now = True
def add_tag(self, tag, auth=None, save=True, log=True, system=False):
if self.retraction is None:
super(Registration, self).add_tag(tag, auth, save, log, system)
else:
raise NodeStateError('Cannot add tags to withdrawn registrations.')
def add_tags(self, tags, auth=None, save=True, log=True, system=False):
if self.retraction is None:
super(Registration, self).add_tags(tags, auth, save, log, system)
else:
raise NodeStateError('Cannot add tags to withdrawn registrations.')
def remove_tag(self, tag, auth, save=True):
if self.retraction is None:
super(Registration, self).remove_tag(tag, auth, save)
else:
raise NodeStateError('Cannot remove tags of withdrawn registrations.')
def remove_tags(self, tags, auth, save=True):
if self.retraction is None:
super(Registration, self).remove_tags(tags, auth, save)
else:
raise NodeStateError('Cannot remove tags of withdrawn registrations.')
class Meta:
# custom permissions for use in the OSF Admin App
permissions = (
('view_registration', 'Can view registration details'),
)
class DraftRegistrationLog(ObjectIDMixin, BaseModel):
""" Simple log to show status changes for DraftRegistrations
field - _id - primary key
field - date - date of the action took place
field - action - simple action to track what happened
field - user - user who did the action
"""
date = NonNaiveDateTimeField(default=timezone.now)
action = models.CharField(max_length=255)
draft = models.ForeignKey('DraftRegistration', related_name='logs',
null=True, blank=True, on_delete=models.CASCADE)
user = models.ForeignKey('OSFUser', null=True, on_delete=models.CASCADE)
SUBMITTED = 'submitted'
REGISTERED = 'registered'
APPROVED = 'approved'
REJECTED = 'rejected'
def __repr__(self):
return ('<DraftRegistrationLog({self.action!r}, date={self.date!r}), '
'user={self.user!r} '
'with id {self._id!r}>').format(self=self)
class DraftRegistration(ObjectIDMixin, BaseModel):
URL_TEMPLATE = settings.DOMAIN + 'project/{node_id}/drafts/{draft_id}'
datetime_initiated = NonNaiveDateTimeField(auto_now_add=True)
datetime_updated = NonNaiveDateTimeField(auto_now=True)
deleted = NonNaiveDateTimeField(null=True, blank=True)
# Original Node a draft registration is associated with
branched_from = models.ForeignKey('Node', related_name='registered_draft',
null=True, on_delete=models.CASCADE)
initiator = models.ForeignKey('OSFUser', null=True, on_delete=models.CASCADE)
provider = models.ForeignKey('RegistrationProvider', related_name='draft_registrations', null=True)
# Dictionary field mapping question id to a question's comments and answer
# {
# <qid>: {
# 'comments': [{
# 'user': {
# 'id': <uid>,
# 'name': <name>
# },
# value: <value>,
# lastModified: <datetime>
# }],
# 'value': <value>
# }
# }
registration_metadata = DateTimeAwareJSONField(default=dict, blank=True)
registration_schema = models.ForeignKey('RegistrationSchema', null=True, on_delete=models.CASCADE)
registered_node = models.ForeignKey('Registration', null=True, blank=True,
related_name='draft_registration', on_delete=models.CASCADE)
approval = models.ForeignKey('DraftRegistrationApproval', null=True, blank=True, on_delete=models.CASCADE)
# Dictionary field mapping extra fields defined in the RegistrationSchema.schema to their
# values. Defaults should be provided in the schema (e.g. 'paymentSent': false),
# and these values are added to the DraftRegistration
# TODO: Use "FIELD_ALIASES"?
_metaschema_flags = DateTimeAwareJSONField(default=dict, blank=True)
notes = models.TextField(blank=True)
def __repr__(self):
return ('<DraftRegistration(branched_from={self.branched_from!r}) '
'with id {self._id!r}>').format(self=self)
# lazily set flags
@property
def flags(self):
if not self._metaschema_flags:
self._metaschema_flags = {}
meta_schema = self.registration_schema
if meta_schema:
schema = meta_schema.schema
flags = schema.get('flags', {})
dirty = False
for flag, value in flags.items():
if flag not in self._metaschema_flags:
self._metaschema_flags[flag] = value
dirty = True
if dirty:
self.save()
return self._metaschema_flags
@flags.setter
def flags(self, flags):
self._metaschema_flags.update(flags)
@property
def url(self):
return self.URL_TEMPLATE.format(
node_id=self.branched_from._id,
draft_id=self._id
)
@property
def absolute_url(self):
return urlparse.urljoin(settings.DOMAIN, self.url)
@property
def absolute_api_v2_url(self):
node = self.branched_from
path = '/nodes/{}/draft_registrations/{}/'.format(node._id, self._id)
return api_v2_url(path)
# used by django and DRF
def get_absolute_url(self):
return self.absolute_api_v2_url
@property
def requires_approval(self):
return self.registration_schema.requires_approval
@property
def is_pending_review(self):
return self.approval.is_pending_approval if (self.requires_approval and self.approval) else False
@property
def is_approved(self):
if self.requires_approval:
if not self.approval:
return bool(self.registered_node)
else:
return self.approval.is_approved
else:
return False
@property
def is_rejected(self):
if self.requires_approval:
if not self.approval:
return False
else:
return self.approval.is_rejected
else:
return False
@property
def status_logs(self):
""" List of logs associated with this node"""
return self.logs.all().order_by('date')
@classmethod
def create_from_node(cls, node, user, schema, data=None, provider=None):
if not provider:
provider = RegistrationProvider.load('osf')
draft = cls(
initiator=user,
branched_from=node,
registration_schema=schema,
registration_metadata=data or {},
provider=provider,
)
draft.save()
return draft
def update_metadata(self, metadata):
changes = []
# Prevent comments on approved drafts
if not self.is_approved:
for question_id, value in metadata.items():
old_value = self.registration_metadata.get(question_id)
if old_value:
old_comments = {
comment['created']: comment
for comment in old_value.get('comments', [])
}
new_comments = {
comment['created']: comment
for comment in value.get('comments', [])
}
old_comments.update(new_comments)
metadata[question_id]['comments'] = sorted(
old_comments.values(),
key=lambda c: c['created']
)
if old_value.get('value') != value.get('value'):
changes.append(question_id)
else:
changes.append(question_id)
self.registration_metadata.update(metadata)
return changes
def submit_for_review(self, initiated_by, meta, save=False):
approval = DraftRegistrationApproval(
meta=meta
)
approval.save()
self.approval = approval
self.add_status_log(initiated_by, DraftRegistrationLog.SUBMITTED)
if save:
self.save()
def register(self, auth, save=False, child_ids=None):
node = self.branched_from
# Create the registration
register = node.register_node(
schema=self.registration_schema,
auth=auth,
data=self.registration_metadata,
child_ids=child_ids,
provider=self.provider
)
self.registered_node = register
self.add_status_log(auth.user, DraftRegistrationLog.REGISTERED)
if save:
self.save()
return register
def approve(self, user):
self.approval.approve(user)
self.refresh_from_db()
self.add_status_log(user, DraftRegistrationLog.APPROVED)
self.approval.save()
def reject(self, user):
self.approval.reject(user)
self.add_status_log(user, DraftRegistrationLog.REJECTED)
self.approval.save()
def add_status_log(self, user, action):
log = DraftRegistrationLog(action=action, user=user, draft=self)
log.save()
def validate_metadata(self, *args, **kwargs):
| """
Validates draft's metadata
"""
return self.registration_schema.validate_metadata(*args, **kwargs) |
|
processor.go | // Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package system_updater
import (
"bufio"
"bytes"
"fmt"
"io"
"io/ioutil"
"os"
"os/exec"
"path/filepath"
"strings"
"syscall"
"syscall/zx"
"syscall/zx/fdio"
"syscall/zx/fidl"
zxio "syscall/zx/io"
"app/context"
fuchsiaio "fidl/fuchsia/io"
"fidl/fuchsia/pkg"
"syslog"
)
type Package struct {
namever string
merkle string
}
func ConnectToPackageResolver() (*pkg.PackageResolverInterface, error) {
context := context.CreateFromStartupInfo()
req, pxy, err := pkg.NewPackageResolverInterfaceRequest()
if err != nil {
syslog.Errorf("control interface could not be acquired: %s", err)
return nil, err
}
context.ConnectToEnvService(req)
return pxy, nil
}
// CacheUpdatePackage caches the requested, possibly merkle-pinned, update
// package URL and returns the pkgfs path to the package.
func CacheUpdatePackage(updateURL string, resolver *pkg.PackageResolverInterface) (string, error) {
dirPxy, err := resolvePackage(updateURL, resolver)
if err != nil {
return "", err
}
defer dirPxy.Close()
channelProxy := (*fidl.ChannelProxy)(dirPxy)
updateDir := fdio.Directory{fdio.Node{(*zxio.NodeInterface)(channelProxy)}}
path := "meta"
f, err := updateDir.Open(path, zxio.OpenRightReadable, zxio.ModeTypeFile)
if err != nil {
return "", err
}
file := os.NewFile(uintptr(syscall.OpenFDIO(f)), path)
defer file.Close()
b, err := ioutil.ReadAll(file)
if err != nil {
return "", err
}
merkle := string(b)
return "/pkgfs/versions/" + merkle, nil
}
func ParseRequirements(pkgSrc io.ReadCloser, imgSrc io.ReadCloser) ([]*Package, []string, error) {
imgs := []string{}
pkgs := []*Package{}
rdr := bufio.NewReader(pkgSrc)
for {
l, err := rdr.ReadString('\n')
s := strings.TrimSpace(l)
if (err == nil || err == io.EOF) && len(s) > 0 {
entry := strings.Split(s, "=")
if len(entry) != 2 {
return nil, nil, fmt.Errorf("parser: entry format %q", s)
} else {
pkgs = append(pkgs, &Package{namever: entry[0], merkle: entry[1]})
}
}
if err != nil {
if err != io.EOF {
return nil, nil, fmt.Errorf("parser: got error reading packages file %s", err)
}
break
}
}
rdr = bufio.NewReader(imgSrc)
for {
l, err := rdr.ReadString('\n')
s := strings.TrimSpace(l)
if (err == nil || err == io.EOF) && len(s) > 0 {
imgs = append(imgs, s)
}
if err != nil {
if err != io.EOF {
return nil, nil, fmt.Errorf("parser: got error reading images file %s", err)
}
break
}
}
return pkgs, imgs, nil
}
func FetchPackages(pkgs []*Package, resolver *pkg.PackageResolverInterface) error {
var errCount int
for _, pkg := range pkgs {
if err := fetchPackage(pkg, resolver); err != nil {
syslog.Errorf("fetch error: %s", err)
errCount++
}
}
if errCount > 0 {
return fmt.Errorf("system update failed, %d packages had errors", errCount)
}
return nil
}
func fetchPackage(p *Package, resolver *pkg.PackageResolverInterface) error {
b, err := ioutil.ReadFile(filepath.Join("/pkgfs/versions", p.merkle, "meta"))
if err == nil {
// package is already installed, skip
if string(b) == p.merkle {
return nil
}
}
pkgURL := fmt.Sprintf("fuchsia-pkg://fuchsia.com/%s?hash=%s", p.namever, p.merkle)
dirPxy, err := resolvePackage(pkgURL, resolver)
if dirPxy != nil {
dirPxy.Close()
}
return err
}
func resolvePackage(pkgURL string, resolver *pkg.PackageResolverInterface) (*fuchsiaio.DirectoryInterface, error) {
selectors := []string{}
updatePolicy := pkg.UpdatePolicy{}
dirReq, dirPxy, err := fuchsiaio.NewDirectoryInterfaceRequest()
if err != nil {
return nil, err
}
syslog.Infof("requesting %s from update system", pkgURL)
status, err := resolver.Resolve(pkgURL, selectors, updatePolicy, dirReq)
if err != nil {
dirPxy.Close()
return nil, fmt.Errorf("fetch: Resolve error: %s", err)
}
statusErr := zx.Status(status)
if statusErr != zx.ErrOk {
dirPxy.Close()
return nil, fmt.Errorf("fetch: Resolve status: %s", statusErr)
}
return dirPxy, nil
}
var diskImagerPath = filepath.Join("/pkg", "bin", "install-disk-image")
func ValidateImgs(imgs []string, imgsPath string) error {
boardPath := filepath.Join(imgsPath, "board")
actual, err := ioutil.ReadFile(boardPath)
if err != nil {
if os.IsNotExist(err) {
return nil
} else {
return err
}
}
expected, err := ioutil.ReadFile("/config/build-info/board")
if err != nil {
return err
}
if !bytes.Equal(actual, expected) {
return fmt.Errorf("parser: expected board name %s found %s", expected, actual)
}
return nil
}
func WriteImgs(imgs []string, imgsPath string) error {
syslog.Infof("Writing images %+v from %q", imgs, imgsPath)
for _, img := range imgs {
imgPath := filepath.Join(imgsPath, img)
if fi, err := os.Stat(imgPath); err != nil || fi.Size() == 0 {
syslog.Warnf("img_writer: %q image not found or zero length, skipping", img)
continue
}
var c *exec.Cmd
switch img {
case "zbi", "zbi.signed":
c = exec.Command(diskImagerPath, "install-zircona")
case "zedboot", "zedboot.signed":
c = exec.Command(diskImagerPath, "install-zirconr")
case "bootloader":
c = exec.Command(diskImagerPath, "install-bootloader")
case "board":
continue
default:
return fmt.Errorf("unrecognized image %q", img)
}
syslog.Infof("img_writer: writing %q from %q", img, imgPath)
out, err := writeImg(c, imgPath)
if len(out) != 0 {
syslog.Infof("img_writer: %s", string(out))
}
if err != nil |
syslog.Infof("img_writer: wrote %q successfully from %q", img, imgPath)
}
return nil
}
func writeImg(c *exec.Cmd, path string) ([]byte, error) {
info, err := os.Stat(path)
if err != nil {
return nil, err
}
if info.Size() == 0 {
return nil, fmt.Errorf("img_writer: image file is empty!")
}
imgFile, err := os.Open(path)
if err != nil {
return nil, err
}
defer imgFile.Close()
c.Stdin = imgFile
return c.CombinedOutput()
}
// UpdateCurrentChannel persists the update channel info for a successful update
func UpdateCurrentChannel() error {
targetPath := "/misc/ota/target_channel.json"
contents, err := ioutil.ReadFile(targetPath)
if err != nil {
return fmt.Errorf("no target channel recorded in %v: %v", targetPath, err)
}
currentPath := "/misc/ota/current_channel.json"
partPath := currentPath + ".part"
f, err := os.Create(partPath)
if err != nil {
return fmt.Errorf("unable to write current channel to %v: %v", partPath, err)
}
defer f.Close()
buf := bytes.NewBuffer(contents)
_, err = buf.WriteTo(f)
if err != nil {
return fmt.Errorf("unable to write current channel to %v: %v", currentPath, err)
}
f.Sync()
f.Close()
if err := os.Rename(partPath, currentPath); err != nil {
return fmt.Errorf("error moving %v to %v: %v", partPath, currentPath, err)
}
return nil
}
| {
syslog.Errorf("img_writer: error writing %q from %q: %s", img, imgPath, err)
if len(out) != 0 {
syslog.Errorf("img_writer: %s", string(out))
}
return err
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.