file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
gulpfile.babel.js | import bg from 'gulp-bg';
import eslint from 'gulp-eslint';
import gulp from 'gulp';
import makeWebpackConfig from './webpack/makeconfig';
import path from 'path';
import runSequence from 'run-sequence';
import webpackBuild from './webpack/build';
import webpackDevServer from './webpack/devserver';
import yargs from 'yargs';
import {Server as KarmaServer} from 'karma';
const args = yargs
.alias('p', 'production')
.argv;
const runKarma = ({singleRun}, done) => {
const server = new KarmaServer({
configFile: path.join(__dirname, 'karma.conf.js'), // eslint-disable-line no-undef
singleRun: singleRun
}, done);
server.start();
};
const runEslint = () => {
return gulp.src([
'gulpfile.babel.js',
'src/**/*.js',
'webpack/*.js',
'!**/__tests__/*.*'
])
.pipe(eslint())
.pipe(eslint.format());
};
gulp.task('env', () => {
const env = args.production ? 'production' : 'development';
process.env.NODE_ENV = env; // eslint-disable-line no-undef
});
gulp.task('build-webpack-production', webpackBuild(makeWebpackConfig(false)));
gulp.task('build-webpack-dev', webpackDevServer(makeWebpackConfig(true)));
gulp.task('build-webpack', [args.production
? 'build-webpack-production'
: 'build-webpack-dev'
]);
gulp.task('build', ['build-webpack']);
gulp.task('eslint', () => {
return runEslint();
});
gulp.task('eslint-ci', () => {
// Exit process with an error code (1) on lint error for CI build.
return runEslint().pipe(eslint.failAfterError());
});
gulp.task('karma-ci', (done) => {
runKarma({singleRun: true}, done);
});
gulp.task('karma', (done) => {
runKarma({singleRun: false}, done); | runSequence('eslint-ci', 'karma-ci', 'build-webpack-production', done);
});
gulp.task('server', ['env', 'build'], bg('node', 'src/server'));
gulp.task('tdd', (done) => {
// Run karma configured for TDD.
runSequence('server', 'karma', done);
});
gulp.task('default', ['server']); | });
gulp.task('test', (done) => { |
message.go | // Copyright 2014 The Gogs Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package email
import (
"crypto/tls"
"fmt"
"io"
"net"
"net/smtp"
"os"
"strings"
"time"
"github.com/jaytaylor/html2text"
"gopkg.in/gomail.v2"
log "unknwon.dev/clog/v2"
"gogs.io/gogs/internal/conf"
)
type Message struct {
Info string // Message information for log purpose.
*gomail.Message
confirmChan chan struct{}
}
// NewMessageFrom creates new mail message object with custom From header.
func NewMessageFrom(to []string, from, subject, htmlBody string) *Message |
// NewMessage creates new mail message object with default From header.
func NewMessage(to []string, subject, body string) *Message {
return NewMessageFrom(to, conf.Email.From, subject, body)
}
type loginAuth struct {
username, password string
}
// SMTP AUTH LOGIN Auth Handler
func LoginAuth(username, password string) smtp.Auth {
return &loginAuth{username, password}
}
func (*loginAuth) Start(_ *smtp.ServerInfo) (string, []byte, error) {
return "LOGIN", []byte{}, nil
}
func (a *loginAuth) Next(fromServer []byte, more bool) ([]byte, error) {
if more {
switch string(fromServer) {
case "Username:":
return []byte(a.username), nil
case "Password:":
return []byte(a.password), nil
default:
return nil, fmt.Errorf("unknwon fromServer: %s", string(fromServer))
}
}
return nil, nil
}
type Sender struct{}
func (*Sender) Send(from string, to []string, msg io.WriterTo) error {
opts := conf.Email
host, port, err := net.SplitHostPort(opts.Host)
if err != nil {
return err
}
tlsconfig := &tls.Config{
InsecureSkipVerify: opts.SkipVerify,
ServerName: host,
}
if opts.UseCertificate {
cert, err := tls.LoadX509KeyPair(opts.CertFile, opts.KeyFile)
if err != nil {
return err
}
tlsconfig.Certificates = []tls.Certificate{cert}
}
conn, err := net.Dial("tcp", net.JoinHostPort(host, port))
if err != nil {
return err
}
defer conn.Close()
isSecureConn := false
// Start TLS directly if the port ends with 465 (SMTPS protocol)
if strings.HasSuffix(port, "465") {
conn = tls.Client(conn, tlsconfig)
isSecureConn = true
}
client, err := smtp.NewClient(conn, host)
if err != nil {
return fmt.Errorf("NewClient: %v", err)
}
if !opts.DisableHELO {
hostname := opts.HELOHostname
if hostname == "" {
hostname, err = os.Hostname()
if err != nil {
return err
}
}
if err = client.Hello(hostname); err != nil {
return fmt.Errorf("Hello: %v", err)
}
}
// If not using SMTPS, always use STARTTLS if available
hasStartTLS, _ := client.Extension("STARTTLS")
if !isSecureConn && hasStartTLS {
if err = client.StartTLS(tlsconfig); err != nil {
return fmt.Errorf("StartTLS: %v", err)
}
}
canAuth, options := client.Extension("AUTH")
if canAuth && len(opts.User) > 0 {
var auth smtp.Auth
if strings.Contains(options, "CRAM-MD5") {
auth = smtp.CRAMMD5Auth(opts.User, opts.Password)
} else if strings.Contains(options, "PLAIN") {
auth = smtp.PlainAuth("", opts.User, opts.Password, host)
} else if strings.Contains(options, "LOGIN") {
// Patch for AUTH LOGIN
auth = LoginAuth(opts.User, opts.Password)
}
if auth != nil {
if err = client.Auth(auth); err != nil {
return fmt.Errorf("Auth: %v", err)
}
}
}
if err = client.Mail(from); err != nil {
return fmt.Errorf("Mail: %v", err)
}
for _, rec := range to {
if err = client.Rcpt(rec); err != nil {
return fmt.Errorf("Rcpt: %v", err)
}
}
w, err := client.Data()
if err != nil {
return fmt.Errorf("Data: %v", err)
} else if _, err = msg.WriteTo(w); err != nil {
return fmt.Errorf("WriteTo: %v", err)
} else if err = w.Close(); err != nil {
return fmt.Errorf("Close: %v", err)
}
return client.Quit()
}
func processMailQueue() {
sender := &Sender{}
for msg := range mailQueue {
log.Trace("New e-mail sending request %s: %s", msg.GetHeader("To"), msg.Info)
if err := gomail.Send(sender, msg.Message); err != nil {
log.Error("Failed to send emails %s: %s - %v", msg.GetHeader("To"), msg.Info, err)
} else {
log.Trace("E-mails sent %s: %s", msg.GetHeader("To"), msg.Info)
}
msg.confirmChan <- struct{}{}
}
}
var mailQueue chan *Message
// NewContext initializes settings for mailer.
func NewContext() {
// Need to check if mailQueue is nil because in during reinstall (user had installed
// before but switched install lock off), this function will be called again
// while mail queue is already processing tasks, and produces a race condition.
if !conf.Email.Enabled || mailQueue != nil {
return
}
mailQueue = make(chan *Message, 1000)
go processMailQueue()
}
// Send puts new message object into mail queue.
// It returns without confirmation (mail processed asynchronously) in normal cases,
// but waits/blocks under hook mode to make sure mail has been sent.
func Send(msg *Message) {
mailQueue <- msg
if conf.HookMode {
<-msg.confirmChan
return
}
go func() {
<-msg.confirmChan
}()
}
| {
log.Trace("NewMessageFrom (htmlBody):\n%s", htmlBody)
msg := gomail.NewMessage()
msg.SetHeader("From", from)
msg.SetHeader("To", to...)
msg.SetHeader("Subject", conf.Email.SubjectPrefix+subject)
msg.SetDateHeader("Date", time.Now())
contentType := "text/html"
body := htmlBody
switchedToPlaintext := false
if conf.Email.UsePlainText || conf.Email.AddPlainTextAlt {
plainBody, err := html2text.FromString(htmlBody)
if err != nil {
log.Error("html2text.FromString: %v", err)
} else {
contentType = "text/plain"
body = plainBody
switchedToPlaintext = true
}
}
msg.SetBody(contentType, body)
if switchedToPlaintext && conf.Email.AddPlainTextAlt && !conf.Email.UsePlainText {
// The AddAlternative method name is confusing - adding html as an "alternative" will actually cause mail
// clients to show it as first priority, and the text "main body" is the 2nd priority fallback.
// See: https://godoc.org/gopkg.in/gomail.v2#Message.AddAlternative
msg.AddAlternative("text/html", htmlBody)
}
return &Message{
Message: msg,
confirmChan: make(chan struct{}),
}
} |
ModalTextinput.js | import React, { useState } from "react";
import styled from "styled-components";
const TextInput = styled.input`
border: #547fb3 solid 2.5px;
border-radius: 12px;
color: #707070;
text-align: center;
font-family: "Roboto", sans-serif;
font-weight: bold;
font-size: 16px;
/* opacity: 0.5; */
padding: 10px;
width: 304px;
border-color: #547fb3;
/* color: ${props => (props.primary ? "#707070" : "#547fb3")}; */
`;
export default function RestaurantInput(onChange, value) {
const [input, setInput] = useState("");
return (
<div>
{" "}
<TextInput | type="text"
value={input}
onChange={event => {
setInput(event.target.value);
}}
/>
<p> Input value: {input} </p>
</div>
);
} | |
test_sound.py | import cog
| snd = cog.snd_add("media/testsnd.wav")
cog.snd_play(snd)
while not cog.hasquit():
cog.loopstep()
cog.quit() | cog.init() |
sse.rs | //! Streaming SIMD Extensions (SSE)
use crate::{
core_arch::{simd::*, simd_llvm::*, x86::*},
intrinsics, mem, ptr,
};
#[cfg(test)]
use stdarch_test::assert_instr;
/// Adds the first component of `a` and `b`, the other components are copied
/// from `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_add_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(addss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_add_ss(a: __m128, b: __m128) -> __m128 {
addss(a, b)
}
/// Adds __m128 vectors.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_add_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(addps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_add_ps(a: __m128, b: __m128) -> __m128 {
simd_add(a, b)
}
/// Subtracts the first component of `b` from `a`, the other components are
/// copied from `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sub_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(subss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_sub_ss(a: __m128, b: __m128) -> __m128 {
subss(a, b)
}
/// Subtracts __m128 vectors.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sub_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(subps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_sub_ps(a: __m128, b: __m128) -> __m128 {
simd_sub(a, b)
}
/// Multiplies the first component of `a` and `b`, the other components are
/// copied from `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mul_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(mulss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_mul_ss(a: __m128, b: __m128) -> __m128 {
mulss(a, b)
}
/// Multiplies __m128 vectors.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_mul_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(mulps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_mul_ps(a: __m128, b: __m128) -> __m128 {
simd_mul(a, b)
}
/// Divides the first component of `b` by `a`, the other components are
/// copied from `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_div_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(divss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_div_ss(a: __m128, b: __m128) -> __m128 {
divss(a, b)
}
/// Divides __m128 vectors.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_div_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(divps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_div_ps(a: __m128, b: __m128) -> __m128 {
simd_div(a, b)
}
/// Returns the square root of the first single-precision (32-bit)
/// floating-point element in `a`, the other elements are unchanged.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sqrt_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(sqrtss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_sqrt_ss(a: __m128) -> __m128 {
sqrtss(a)
}
/// Returns the square root of packed single-precision (32-bit) floating-point
/// elements in `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sqrt_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(sqrtps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_sqrt_ps(a: __m128) -> __m128 {
sqrtps(a)
}
/// Returns the approximate reciprocal of the first single-precision
/// (32-bit) floating-point element in `a`, the other elements are unchanged.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_rcp_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(rcpss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_rcp_ss(a: __m128) -> __m128 {
rcpss(a)
}
/// Returns the approximate reciprocal of packed single-precision (32-bit)
/// floating-point elements in `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_rcp_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(rcpps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_rcp_ps(a: __m128) -> __m128 {
rcpps(a)
}
/// Returns the approximate reciprocal square root of the first single-precision
/// (32-bit) floating-point element in `a`, the other elements are unchanged.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_rsqrt_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(rsqrtss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_rsqrt_ss(a: __m128) -> __m128 {
rsqrtss(a)
}
/// Returns the approximate reciprocal square root of packed single-precision
/// (32-bit) floating-point elements in `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_rsqrt_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(rsqrtps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_rsqrt_ps(a: __m128) -> __m128 {
rsqrtps(a)
}
/// Compares the first single-precision (32-bit) floating-point element of `a`
/// and `b`, and return the minimum value in the first element of the return
/// value, the other elements are copied from `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(minss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_min_ss(a: __m128, b: __m128) -> __m128 {
minss(a, b)
}
/// Compares packed single-precision (32-bit) floating-point elements in `a` and
/// `b`, and return the corresponding minimum values.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_min_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(minps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_min_ps(a: __m128, b: __m128) -> __m128 {
// See the `test_mm_min_ps` test why this can't be implemented using `simd_fmin`.
minps(a, b)
}
/// Compares the first single-precision (32-bit) floating-point element of `a`
/// and `b`, and return the maximum value in the first element of the return
/// value, the other elements are copied from `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(maxss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_max_ss(a: __m128, b: __m128) -> __m128 {
maxss(a, b)
}
/// Compares packed single-precision (32-bit) floating-point elements in `a` and
/// `b`, and return the corresponding maximum values.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_max_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(maxps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_max_ps(a: __m128, b: __m128) -> __m128 {
// See the `test_mm_min_ps` test why this can't be implemented using `simd_fmax`.
maxps(a, b)
}
/// Bitwise AND of packed single-precision (32-bit) floating-point elements.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_and_ps)
#[inline]
#[target_feature(enable = "sse")]
// i586 only seems to generate plain `and` instructions, so ignore it.
#[cfg_attr(
all(test, any(target_arch = "x86_64", target_feature = "sse2")),
assert_instr(andps)
)]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_and_ps(a: __m128, b: __m128) -> __m128 {
let a: __m128i = mem::transmute(a);
let b: __m128i = mem::transmute(b);
mem::transmute(simd_and(a, b))
}
/// Bitwise AND-NOT of packed single-precision (32-bit) floating-point
/// elements.
///
/// Computes `!a & b` for each bit in `a` and `b`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_andnot_ps)
#[inline]
#[target_feature(enable = "sse")]
// i586 only seems to generate plain `not` and `and` instructions, so ignore
// it.
#[cfg_attr(
all(test, any(target_arch = "x86_64", target_feature = "sse2")),
assert_instr(andnps)
)]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_andnot_ps(a: __m128, b: __m128) -> __m128 {
let a: __m128i = mem::transmute(a);
let b: __m128i = mem::transmute(b);
let mask: __m128i = mem::transmute(i32x4::splat(-1));
mem::transmute(simd_and(simd_xor(mask, a), b))
}
/// Bitwise OR of packed single-precision (32-bit) floating-point elements.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_or_ps)
#[inline]
#[target_feature(enable = "sse")]
// i586 only seems to generate plain `or` instructions, so we ignore it.
#[cfg_attr(
all(test, any(target_arch = "x86_64", target_feature = "sse2")),
assert_instr(orps)
)]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_or_ps(a: __m128, b: __m128) -> __m128 {
let a: __m128i = mem::transmute(a);
let b: __m128i = mem::transmute(b);
mem::transmute(simd_or(a, b))
}
/// Bitwise exclusive OR of packed single-precision (32-bit) floating-point
/// elements.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_xor_ps)
#[inline]
#[target_feature(enable = "sse")]
// i586 only seems to generate plain `xor` instructions, so we ignore it.
#[cfg_attr(
all(test, any(target_arch = "x86_64", target_feature = "sse2")),
assert_instr(xorps)
)]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_xor_ps(a: __m128, b: __m128) -> __m128 {
let a: __m128i = mem::transmute(a);
let b: __m128i = mem::transmute(b);
mem::transmute(simd_xor(a, b))
}
/// Compares the lowest `f32` of both inputs for equality. The lowest 32 bits of
/// the result will be `0xffffffff` if the two inputs are equal, or `0`
/// otherwise. The upper 96 bits of the result are the upper 96 bits of `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpeq_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpeqss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpeq_ss(a: __m128, b: __m128) -> __m128 {
cmpss(a, b, 0)
}
/// Compares the lowest `f32` of both inputs for less than. The lowest 32 bits
/// of the result will be `0xffffffff` if `a.extract(0)` is less than
/// `b.extract(0)`, or `0` otherwise. The upper 96 bits of the result are the
/// upper 96 bits of `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmplt_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpltss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmplt_ss(a: __m128, b: __m128) -> __m128 {
cmpss(a, b, 1)
}
/// Compares the lowest `f32` of both inputs for less than or equal. The lowest
/// 32 bits of the result will be `0xffffffff` if `a.extract(0)` is less than
/// or equal `b.extract(0)`, or `0` otherwise. The upper 96 bits of the result
/// are the upper 96 bits of `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmple_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpless))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmple_ss(a: __m128, b: __m128) -> __m128 {
cmpss(a, b, 2)
}
/// Compares the lowest `f32` of both inputs for greater than. The lowest 32
/// bits of the result will be `0xffffffff` if `a.extract(0)` is greater
/// than `b.extract(0)`, or `0` otherwise. The upper 96 bits of the result
/// are the upper 96 bits of `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpgt_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpltss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpgt_ss(a: __m128, b: __m128) -> __m128 {
simd_shuffle4!(a, cmpss(b, a, 1), [4, 1, 2, 3])
}
/// Compares the lowest `f32` of both inputs for greater than or equal. The
/// lowest 32 bits of the result will be `0xffffffff` if `a.extract(0)` is
/// greater than or equal `b.extract(0)`, or `0` otherwise. The upper 96 bits
/// of the result are the upper 96 bits of `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpge_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpless))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpge_ss(a: __m128, b: __m128) -> __m128 {
simd_shuffle4!(a, cmpss(b, a, 2), [4, 1, 2, 3])
}
/// Compares the lowest `f32` of both inputs for inequality. The lowest 32 bits
/// of the result will be `0xffffffff` if `a.extract(0)` is not equal to
/// `b.extract(0)`, or `0` otherwise. The upper 96 bits of the result are the
/// upper 96 bits of `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpneq_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpneqss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpneq_ss(a: __m128, b: __m128) -> __m128 {
cmpss(a, b, 4)
}
/// Compares the lowest `f32` of both inputs for not-less-than. The lowest 32
/// bits of the result will be `0xffffffff` if `a.extract(0)` is not less than
/// `b.extract(0)`, or `0` otherwise. The upper 96 bits of the result are the
/// upper 96 bits of `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpnlt_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpnltss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpnlt_ss(a: __m128, b: __m128) -> __m128 {
cmpss(a, b, 5)
}
/// Compares the lowest `f32` of both inputs for not-less-than-or-equal. The
/// lowest 32 bits of the result will be `0xffffffff` if `a.extract(0)` is not
/// less than or equal to `b.extract(0)`, or `0` otherwise. The upper 96 bits
/// of the result are the upper 96 bits of `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpnle_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpnless))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpnle_ss(a: __m128, b: __m128) -> __m128 {
cmpss(a, b, 6)
}
/// Compares the lowest `f32` of both inputs for not-greater-than. The lowest 32
/// bits of the result will be `0xffffffff` if `a.extract(0)` is not greater
/// than `b.extract(0)`, or `0` otherwise. The upper 96 bits of the result are
/// the upper 96 bits of `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpngt_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpnltss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpngt_ss(a: __m128, b: __m128) -> __m128 {
simd_shuffle4!(a, cmpss(b, a, 5), [4, 1, 2, 3])
}
/// Compares the lowest `f32` of both inputs for not-greater-than-or-equal. The
/// lowest 32 bits of the result will be `0xffffffff` if `a.extract(0)` is not
/// greater than or equal to `b.extract(0)`, or `0` otherwise. The upper 96
/// bits of the result are the upper 96 bits of `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpnge_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpnless))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpnge_ss(a: __m128, b: __m128) -> __m128 {
simd_shuffle4!(a, cmpss(b, a, 6), [4, 1, 2, 3])
}
/// Checks if the lowest `f32` of both inputs are ordered. The lowest 32 bits of
/// the result will be `0xffffffff` if neither of `a.extract(0)` or
/// `b.extract(0)` is a NaN, or `0` otherwise. The upper 96 bits of the result
/// are the upper 96 bits of `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpord_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpordss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpord_ss(a: __m128, b: __m128) -> __m128 {
cmpss(a, b, 7)
}
/// Checks if the lowest `f32` of both inputs are unordered. The lowest 32 bits
/// of the result will be `0xffffffff` if any of `a.extract(0)` or
/// `b.extract(0)` is a NaN, or `0` otherwise. The upper 96 bits of the result
/// are the upper 96 bits of `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpunord_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpunordss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpunord_ss(a: __m128, b: __m128) -> __m128 {
cmpss(a, b, 3)
}
/// Compares each of the four floats in `a` to the corresponding element in `b`.
/// The result in the output vector will be `0xffffffff` if the input elements
/// were equal, or `0` otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpeq_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpeqps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpeq_ps(a: __m128, b: __m128) -> __m128 {
cmpps(a, b, 0)
}
/// Compares each of the four floats in `a` to the corresponding element in `b`.
/// The result in the output vector will be `0xffffffff` if the input element
/// in `a` is less than the corresponding element in `b`, or `0` otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmplt_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpltps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmplt_ps(a: __m128, b: __m128) -> __m128 {
cmpps(a, b, 1)
}
/// Compares each of the four floats in `a` to the corresponding element in `b`.
/// The result in the output vector will be `0xffffffff` if the input element
/// in `a` is less than or equal to the corresponding element in `b`, or `0`
/// otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmple_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpleps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmple_ps(a: __m128, b: __m128) -> __m128 {
cmpps(a, b, 2)
}
/// Compares each of the four floats in `a` to the corresponding element in `b`.
/// The result in the output vector will be `0xffffffff` if the input element
/// in `a` is greater than the corresponding element in `b`, or `0` otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpgt_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpltps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpgt_ps(a: __m128, b: __m128) -> __m128 {
cmpps(b, a, 1)
}
/// Compares each of the four floats in `a` to the corresponding element in `b`.
/// The result in the output vector will be `0xffffffff` if the input element
/// in `a` is greater than or equal to the corresponding element in `b`, or `0`
/// otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpge_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpleps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpge_ps(a: __m128, b: __m128) -> __m128 {
cmpps(b, a, 2)
}
/// Compares each of the four floats in `a` to the corresponding element in `b`.
/// The result in the output vector will be `0xffffffff` if the input elements
/// are **not** equal, or `0` otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpneq_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpneqps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpneq_ps(a: __m128, b: __m128) -> __m128 {
cmpps(a, b, 4)
}
/// Compares each of the four floats in `a` to the corresponding element in `b`.
/// The result in the output vector will be `0xffffffff` if the input element
/// in `a` is **not** less than the corresponding element in `b`, or `0`
/// otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpnlt_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpnltps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpnlt_ps(a: __m128, b: __m128) -> __m128 {
cmpps(a, b, 5)
}
/// Compares each of the four floats in `a` to the corresponding element in `b`.
/// The result in the output vector will be `0xffffffff` if the input element
/// in `a` is **not** less than or equal to the corresponding element in `b`, or
/// `0` otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpnle_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpnleps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpnle_ps(a: __m128, b: __m128) -> __m128 {
cmpps(a, b, 6)
}
/// Compares each of the four floats in `a` to the corresponding element in `b`.
/// The result in the output vector will be `0xffffffff` if the input element
/// in `a` is **not** greater than the corresponding element in `b`, or `0`
/// otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpngt_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpnltps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpngt_ps(a: __m128, b: __m128) -> __m128 {
cmpps(b, a, 5)
}
/// Compares each of the four floats in `a` to the corresponding element in `b`.
/// The result in the output vector will be `0xffffffff` if the input element
/// in `a` is **not** greater than or equal to the corresponding element in `b`,
/// or `0` otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpnge_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpnleps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpnge_ps(a: __m128, b: __m128) -> __m128 {
cmpps(b, a, 6)
}
/// Compares each of the four floats in `a` to the corresponding element in `b`.
/// Returns four floats that have one of two possible bit patterns. The element
/// in the output vector will be `0xffffffff` if the input elements in `a` and
/// `b` are ordered (i.e., neither of them is a NaN), or 0 otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpord_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpordps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpord_ps(a: __m128, b: __m128) -> __m128 {
cmpps(b, a, 7)
}
/// Compares each of the four floats in `a` to the corresponding element in `b`.
/// Returns four floats that have one of two possible bit patterns. The element
/// in the output vector will be `0xffffffff` if the input elements in `a` and
/// `b` are unordered (i.e., at least on of them is a NaN), or 0 otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cmpunord_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cmpunordps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cmpunord_ps(a: __m128, b: __m128) -> __m128 {
cmpps(b, a, 3)
}
/// Compares two 32-bit floats from the low-order bits of `a` and `b`. Returns
/// `1` if they are equal, or `0` otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_comieq_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(comiss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_comieq_ss(a: __m128, b: __m128) -> i32 {
comieq_ss(a, b)
}
/// Compares two 32-bit floats from the low-order bits of `a` and `b`. Returns
/// `1` if the value from `a` is less than the one from `b`, or `0` otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_comilt_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(comiss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_comilt_ss(a: __m128, b: __m128) -> i32 {
comilt_ss(a, b)
}
/// Compares two 32-bit floats from the low-order bits of `a` and `b`. Returns
/// `1` if the value from `a` is less than or equal to the one from `b`, or `0`
/// otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_comile_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(comiss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_comile_ss(a: __m128, b: __m128) -> i32 {
comile_ss(a, b)
}
/// Compares two 32-bit floats from the low-order bits of `a` and `b`. Returns
/// `1` if the value from `a` is greater than the one from `b`, or `0`
/// otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_comigt_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(comiss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_comigt_ss(a: __m128, b: __m128) -> i32 {
comigt_ss(a, b)
}
/// Compares two 32-bit floats from the low-order bits of `a` and `b`. Returns
/// `1` if the value from `a` is greater than or equal to the one from `b`, or
/// `0` otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_comige_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(comiss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_comige_ss(a: __m128, b: __m128) -> i32 {
comige_ss(a, b)
}
/// Compares two 32-bit floats from the low-order bits of `a` and `b`. Returns
/// `1` if they are **not** equal, or `0` otherwise.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_comineq_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(comiss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_comineq_ss(a: __m128, b: __m128) -> i32 {
comineq_ss(a, b)
}
/// Compares two 32-bit floats from the low-order bits of `a` and `b`. Returns
/// `1` if they are equal, or `0` otherwise. This instruction will not signal
/// an exception if either argument is a quiet NaN.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_ucomieq_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(ucomiss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_ucomieq_ss(a: __m128, b: __m128) -> i32 {
ucomieq_ss(a, b)
}
/// Compares two 32-bit floats from the low-order bits of `a` and `b`. Returns
/// `1` if the value from `a` is less than the one from `b`, or `0` otherwise.
/// This instruction will not signal an exception if either argument is a quiet
/// NaN.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_ucomilt_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(ucomiss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_ucomilt_ss(a: __m128, b: __m128) -> i32 {
ucomilt_ss(a, b)
}
/// Compares two 32-bit floats from the low-order bits of `a` and `b`. Returns
/// `1` if the value from `a` is less than or equal to the one from `b`, or `0`
/// otherwise. This instruction will not signal an exception if either argument
/// is a quiet NaN.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_ucomile_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(ucomiss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_ucomile_ss(a: __m128, b: __m128) -> i32 {
ucomile_ss(a, b)
}
/// Compares two 32-bit floats from the low-order bits of `a` and `b`. Returns
/// `1` if the value from `a` is greater than the one from `b`, or `0`
/// otherwise. This instruction will not signal an exception if either argument
/// is a quiet NaN.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_ucomigt_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(ucomiss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_ucomigt_ss(a: __m128, b: __m128) -> i32 {
ucomigt_ss(a, b)
}
/// Compares two 32-bit floats from the low-order bits of `a` and `b`. Returns
/// `1` if the value from `a` is greater than or equal to the one from `b`, or
/// `0` otherwise. This instruction will not signal an exception if either
/// argument is a quiet NaN.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_ucomige_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(ucomiss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_ucomige_ss(a: __m128, b: __m128) -> i32 {
ucomige_ss(a, b)
}
/// Compares two 32-bit floats from the low-order bits of `a` and `b`. Returns
/// `1` if they are **not** equal, or `0` otherwise. This instruction will not
/// signal an exception if either argument is a quiet NaN.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_ucomineq_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(ucomiss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_ucomineq_ss(a: __m128, b: __m128) -> i32 {
ucomineq_ss(a, b)
}
/// Converts the lowest 32 bit float in the input vector to a 32 bit integer.
///
/// The result is rounded according to the current rounding mode. If the result
/// cannot be represented as a 32 bit integer the result will be `0x8000_0000`
/// (`i32::MIN`) or an invalid operation floating point exception if
/// unmasked (see [`_mm_setcsr`](fn._mm_setcsr.html)).
///
/// This corresponds to the `CVTSS2SI` instruction (with 32 bit output).
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtss_si32)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cvtss2si))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cvtss_si32(a: __m128) -> i32 {
cvtss2si(a)
}
/// Alias for [`_mm_cvtss_si32`](fn._mm_cvtss_si32.html).
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvt_ss2si)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cvtss2si))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cvt_ss2si(a: __m128) -> i32 {
_mm_cvtss_si32(a)
}
/// Converts the lowest 32 bit float in the input vector to a 32 bit integer
/// with
/// truncation.
///
/// The result is rounded always using truncation (round towards zero). If the
/// result cannot be represented as a 32 bit integer the result will be
/// `0x8000_0000` (`i32::MIN`) or an invalid operation floating point
/// exception if unmasked (see [`_mm_setcsr`](fn._mm_setcsr.html)).
///
/// This corresponds to the `CVTTSS2SI` instruction (with 32 bit output).
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvttss_si32)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cvttss2si))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cvttss_si32(a: __m128) -> i32 {
cvttss2si(a)
}
/// Alias for [`_mm_cvttss_si32`](fn._mm_cvttss_si32.html).
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtt_ss2si)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cvttss2si))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cvtt_ss2si(a: __m128) -> i32 {
_mm_cvttss_si32(a)
}
/// Extracts the lowest 32 bit float from the input vector.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtss_f32)
#[inline]
#[target_feature(enable = "sse")]
// No point in using assert_instrs. In Unix x86_64 calling convention this is a
// no-op, and on Windows it's just a `mov`.
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cvtss_f32(a: __m128) -> f32 {
simd_extract(a, 0)
}
/// Converts a 32 bit integer to a 32 bit float. The result vector is the input
/// vector `a` with the lowest 32 bit float replaced by the converted integer.
///
/// This intrinsic corresponds to the `CVTSI2SS` instruction (with 32 bit
/// input).
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvtsi32_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cvtsi2ss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cvtsi32_ss(a: __m128, b: i32) -> __m128 {
cvtsi2ss(a, b)
}
/// Alias for [`_mm_cvtsi32_ss`](fn._mm_cvtsi32_ss.html).
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_cvt_si2ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(cvtsi2ss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_cvt_si2ss(a: __m128, b: i32) -> __m128 {
_mm_cvtsi32_ss(a, b)
}
/// Construct a `__m128` with the lowest element set to `a` and the rest set to
/// zero.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(movss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_set_ss(a: f32) -> __m128 {
__m128(a, 0.0, 0.0, 0.0)
}
/// Construct a `__m128` with all element set to `a`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set1_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(shufps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_set1_ps(a: f32) -> __m128 {
__m128(a, a, a, a)
}
/// Alias for [`_mm_set1_ps`](fn._mm_set1_ps.html)
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set_ps1)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(shufps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_set_ps1(a: f32) -> __m128 {
_mm_set1_ps(a)
}
/// Construct a `__m128` from four floating point values highest to lowest.
///
/// Note that `a` will be the highest 32 bits of the result, and `d` the
/// lowest. This matches the standard way of writing bit patterns on x86:
///
/// ```text
/// bit 127 .. 96 95 .. 64 63 .. 32 31 .. 0
/// +---------+---------+---------+---------+
/// | a | b | c | d | result
/// +---------+---------+---------+---------+
/// ```
///
/// Alternatively:
///
/// ```text
/// let v = _mm_set_ps(d, c, b, a);
/// ```
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_set_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(unpcklps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_set_ps(a: f32, b: f32, c: f32, d: f32) -> __m128 {
__m128(d, c, b, a)
}
/// Construct a `__m128` from four floating point values lowest to highest.
///
/// This matches the memory order of `__m128`, i.e., `a` will be the lowest 32
/// bits of the result, and `d` the highest.
///
/// ```text
/// assert_eq!(__m128::new(a, b, c, d), _mm_setr_ps(a, b, c, d));
/// ```
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_setr_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(
all(test, any(target_os = "windows", target_arch = "x86_64")),
assert_instr(unpcklps)
)]
// On a 32-bit architecture on non-Windows it just copies the operands from the stack.
#[cfg_attr(
all(test, all(not(target_os = "windows"), target_arch = "x86")),
assert_instr(movaps)
)]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_setr_ps(a: f32, b: f32, c: f32, d: f32) -> __m128 {
__m128(a, b, c, d)
}
/// Construct a `__m128` with all elements initialized to zero.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_setzero_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(xorps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_setzero_ps() -> __m128 {
__m128(0.0, 0.0, 0.0, 0.0)
}
/// A utility function for creating masks to use with Intel shuffle and
/// permute intrinsics.
#[inline]
#[allow(non_snake_case)]
#[unstable(feature = "stdarch", issue = "27731")]
pub const fn _MM_SHUFFLE(z: u32, y: u32, x: u32, w: u32) -> i32 {
((z << 6) | (y << 4) | (x << 2) | w) as i32
}
/// Shuffles packed single-precision (32-bit) floating-point elements in `a` and
/// `b` using `MASK`.
///
/// The lower half of result takes values from `a` and the higher half from
/// `b`. Mask is split to 2 control bits each to index the element from inputs.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_shuffle_ps)
///
/// Note that there appears to be a mistake within Intel's Intrinsics Guide.
/// `_mm_shuffle_ps` is supposed to take an `i32` instead of a `u32`
/// as is the case for [other shuffle intrinsics](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_shuffle_).
/// Performing an implicit type conversion between an unsigned integer and a signed integer
/// does not cause a problem in C, however Rust's commitment to strong typing does not allow this.
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(shufps, MASK = 3))]
#[rustc_legacy_const_generics(2)]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_shuffle_ps<const MASK: i32>(a: __m128, b: __m128) -> __m128 {
static_assert_imm8!(MASK);
simd_shuffle4!(
a,
b,
<const MASK: i32> [
MASK as u32 & 0b11,
(MASK as u32 >> 2) & 0b11,
((MASK as u32 >> 4) & 0b11) + 4,
((MASK as u32 >> 6) & 0b11) + 4,
],
)
}
/// Unpacks and interleave single-precision (32-bit) floating-point elements
/// from the higher half of `a` and `b`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_unpackhi_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(unpckhps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_unpackhi_ps(a: __m128, b: __m128) -> __m128 {
simd_shuffle4!(a, b, [2, 6, 3, 7])
}
/// Unpacks and interleave single-precision (32-bit) floating-point elements
/// from the lower half of `a` and `b`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_unpacklo_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(unpcklps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_unpacklo_ps(a: __m128, b: __m128) -> __m128 {
simd_shuffle4!(a, b, [0, 4, 1, 5])
}
/// Combine higher half of `a` and `b`. The highwe half of `b` occupies the
/// lower half of result.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movehl_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(all(test, not(target_os = "windows")), assert_instr(movhlps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_movehl_ps(a: __m128, b: __m128) -> __m128 {
// TODO; figure why this is a different instruction on Windows?
simd_shuffle4!(a, b, [6, 7, 2, 3])
}
/// Combine lower half of `a` and `b`. The lower half of `b` occupies the
/// higher half of result.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movelh_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(all(test, not(target_os = "windows")), assert_instr(movlhps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_movelh_ps(a: __m128, b: __m128) -> __m128 {
simd_shuffle4!(a, b, [0, 1, 4, 5])
}
/// Returns a mask of the most significant bit of each element in `a`.
///
/// The mask is stored in the 4 least significant bits of the return value.
/// All other bits are set to `0`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_movemask_ps)
#[inline]
#[target_feature(enable = "sse")]
// FIXME: LLVM9 trunk has the following bug:
// https://github.com/rust-lang/stdarch/issues/794
// so we only temporarily test this on i686 and x86_64 but not on i586:
#[cfg_attr(all(test, target_feature = "sse2"), assert_instr(movmskps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_movemask_ps(a: __m128) -> i32 {
movmskps(a)
}
/// Construct a `__m128` with the lowest element read from `p` and the other
/// elements set to zero.
///
/// This corresponds to instructions `VMOVSS` / `MOVSS`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(movss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_load_ss(p: *const f32) -> __m128 {
__m128(*p, 0.0, 0.0, 0.0)
}
/// Construct a `__m128` by duplicating the value read from `p` into all
/// elements.
///
/// This corresponds to instructions `VMOVSS` / `MOVSS` followed by some
/// shuffling.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load1_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(movss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_load1_ps(p: *const f32) -> __m128 {
let a = *p;
__m128(a, a, a, a)
}
/// Alias for [`_mm_load1_ps`](fn._mm_load1_ps.html)
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_ps1)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(movss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_load_ps1(p: *const f32) -> __m128 {
_mm_load1_ps(p)
}
/// Loads four `f32` values from *aligned* memory into a `__m128`. If the
/// pointer is not aligned to a 128-bit boundary (16 bytes) a general
/// protection fault will be triggered (fatal program crash).
///
/// Use [`_mm_loadu_ps`](fn._mm_loadu_ps.html) for potentially unaligned
/// memory.
///
/// This corresponds to instructions `VMOVAPS` / `MOVAPS`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_load_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(movaps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn _mm_load_ps(p: *const f32) -> __m128 {
*(p as *const __m128)
}
/// Loads four `f32` values from memory into a `__m128`. There are no
/// restrictions
/// on memory alignment. For aligned memory
/// [`_mm_load_ps`](fn._mm_load_ps.html)
/// may be faster.
///
/// This corresponds to instructions `VMOVUPS` / `MOVUPS`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadu_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(movups))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_loadu_ps(p: *const f32) -> __m128 {
// Note: Using `*p` would require `f32` alignment, but `movups` has no
// alignment restrictions.
let mut dst = _mm_undefined_ps();
ptr::copy_nonoverlapping(
p as *const u8,
&mut dst as *mut __m128 as *mut u8,
mem::size_of::<__m128>(),
);
dst
}
/// Loads four `f32` values from aligned memory into a `__m128` in reverse
/// order.
///
/// If the pointer is not aligned to a 128-bit boundary (16 bytes) a general
/// protection fault will be triggered (fatal program crash).
///
/// Functionally equivalent to the following code sequence (assuming `p`
/// satisfies the alignment restrictions):
///
/// ```text
/// let a0 = *p;
/// let a1 = *p.offset(1);
/// let a2 = *p.offset(2);
/// let a3 = *p.offset(3);
/// __m128::new(a3, a2, a1, a0)
/// ```
///
/// This corresponds to instructions `VMOVAPS` / `MOVAPS` followed by some
/// shuffling.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadr_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(movaps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_loadr_ps(p: *const f32) -> __m128 {
let a = _mm_load_ps(p);
simd_shuffle4!(a, a, [3, 2, 1, 0])
}
/// Loads unaligned 64-bits of integer data from memory into new vector.
///
/// `mem_addr` does not need to be aligned on any particular boundary.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_loadu_si64)
#[inline]
#[target_feature(enable = "sse")]
#[stable(feature = "simd_x86_mm_loadu_si64", since = "1.46.0")]
pub unsafe fn _mm_loadu_si64(mem_addr: *const u8) -> __m128i {
transmute(i64x2(ptr::read_unaligned(mem_addr as *const i64), 0))
}
/// Stores the lowest 32 bit float of `a` into memory.
///
/// This intrinsic corresponds to the `MOVSS` instruction.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_store_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(movss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_store_ss(p: *mut f32, a: __m128) {
*p = simd_extract(a, 0);
}
/// Stores the lowest 32 bit float of `a` repeated four times into *aligned*
/// memory.
///
/// If the pointer is not aligned to a 128-bit boundary (16 bytes) a general
/// protection fault will be triggered (fatal program crash).
///
/// Functionally equivalent to the following code sequence (assuming `p`
/// satisfies the alignment restrictions):
///
/// ```text
/// let x = a.extract(0);
/// *p = x;
/// *p.offset(1) = x;
/// *p.offset(2) = x;
/// *p.offset(3) = x;
/// ```
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_store1_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(movaps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn _mm_store1_ps(p: *mut f32, a: __m128) {
let b: __m128 = simd_shuffle4!(a, a, [0, 0, 0, 0]);
*(p as *mut __m128) = b;
}
/// Alias for [`_mm_store1_ps`](fn._mm_store1_ps.html)
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_store_ps1)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(movaps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_store_ps1(p: *mut f32, a: __m128) {
_mm_store1_ps(p, a);
}
/// Stores four 32-bit floats into *aligned* memory.
///
/// If the pointer is not aligned to a 128-bit boundary (16 bytes) a general
/// protection fault will be triggered (fatal program crash).
///
/// Use [`_mm_storeu_ps`](fn._mm_storeu_ps.html) for potentially unaligned
/// memory.
///
/// This corresponds to instructions `VMOVAPS` / `MOVAPS`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_store_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(movaps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn _mm_store_ps(p: *mut f32, a: __m128) {
*(p as *mut __m128) = a;
}
/// Stores four 32-bit floats into memory. There are no restrictions on memory
/// alignment. For aligned memory [`_mm_store_ps`](fn._mm_store_ps.html) may be
/// faster.
///
/// This corresponds to instructions `VMOVUPS` / `MOVUPS`.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storeu_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(movups))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_storeu_ps(p: *mut f32, a: __m128) {
ptr::copy_nonoverlapping(
&a as *const __m128 as *const u8,
p as *mut u8,
mem::size_of::<__m128>(),
);
}
/// Stores four 32-bit floats into *aligned* memory in reverse order.
///
/// If the pointer is not aligned to a 128-bit boundary (16 bytes) a general
/// protection fault will be triggered (fatal program crash).
///
/// Functionally equivalent to the following code sequence (assuming `p`
/// satisfies the alignment restrictions):
///
/// ```text
/// *p = a.extract(3);
/// *p.offset(1) = a.extract(2);
/// *p.offset(2) = a.extract(1);
/// *p.offset(3) = a.extract(0);
/// ```
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_storer_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(movaps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn _mm_storer_ps(p: *mut f32, a: __m128) {
let b: __m128 = simd_shuffle4!(a, a, [3, 2, 1, 0]);
*(p as *mut __m128) = b;
}
/// Returns a `__m128` with the first component from `b` and the remaining
/// components from `a`.
///
/// In other words for any `a` and `b`:
/// ```text
/// _mm_move_ss(a, b) == a.replace(0, b.extract(0))
/// ```
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_move_ss)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(movss))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_move_ss(a: __m128, b: __m128) -> __m128 {
simd_shuffle4!(a, b, [4, 1, 2, 3])
}
/// Performs a serializing operation on all store-to-memory instructions that
/// were issued prior to this instruction.
///
/// Guarantees that every store instruction that precedes, in program order, is
/// globally visible before any store instruction which follows the fence in
/// program order.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_sfence)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(sfence))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_sfence() {
sfence()
}
/// Gets the unsigned 32-bit value of the MXCSR control and status register.
///
/// For more info see [`_mm_setcsr`](fn._mm_setcsr.html)
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_getcsr)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(stmxcsr))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_getcsr() -> u32 {
let mut result = 0_i32;
stmxcsr((&mut result) as *mut _ as *mut i8);
result as u32
}
/// Sets the MXCSR register with the 32-bit unsigned integer value.
///
/// This register constrols how SIMD instructions handle floating point
/// operations. Modifying this register only affects the current thread.
///
/// It contains several groups of flags:
///
/// * *Exception flags* report which exceptions occurred since last they were
/// reset.
///
/// * *Masking flags* can be used to mask (ignore) certain exceptions. By
/// default
/// these flags are all set to 1, so all exceptions are masked. When an
/// an exception is masked, the processor simply sets the exception flag and
/// continues the operation. If the exception is unmasked, the flag is also set
/// but additionally an exception handler is invoked.
///
/// * *Rounding mode flags* control the rounding mode of floating point
/// instructions.
///
/// * The *denormals-are-zero mode flag* turns all numbers which would be
/// denormalized (exponent bits are all zeros) into zeros.
///
/// ## Exception Flags
///
/// * `_MM_EXCEPT_INVALID`: An invalid operation was performed (e.g., dividing
/// Infinity by Infinity).
///
/// * `_MM_EXCEPT_DENORM`: An operation attempted to operate on a denormalized
/// number. Mainly this can cause loss of precision.
///
/// * `_MM_EXCEPT_DIV_ZERO`: Division by zero occured.
///
/// * `_MM_EXCEPT_OVERFLOW`: A numeric overflow exception occured, i.e., a
/// result was too large to be represented (e.g., an `f32` with absolute
/// value
/// greater than `2^128`).
///
/// * `_MM_EXCEPT_UNDERFLOW`: A numeric underflow exception occured, i.e., a
/// result was too small to be represented in a normalized way (e.g., an
/// `f32`
/// with absulte value smaller than `2^-126`.)
///
/// * `_MM_EXCEPT_INEXACT`: An inexact-result exception occured (a.k.a.
/// precision exception). This means some precision was lost due to rounding.
/// For example, the fraction `1/3` cannot be represented accurately in a
/// 32 or 64 bit float and computing it would cause this exception to be
/// raised. Precision exceptions are very common, so they are usually masked.
///
/// Exception flags can be read and set using the convenience functions
/// `_MM_GET_EXCEPTION_STATE` and `_MM_SET_EXCEPTION_STATE`. For example, to
/// check if an operation caused some overflow:
///
/// ```rust,ignore
/// _MM_SET_EXCEPTION_STATE(0); // clear all exception flags
/// // perform calculations
/// if _MM_GET_EXCEPTION_STATE() & _MM_EXCEPT_OVERFLOW != 0 {
/// // handle overflow
/// }
/// ```
///
/// ## Masking Flags
///
/// There is one masking flag for each exception flag: `_MM_MASK_INVALID`,
/// `_MM_MASK_DENORM`, `_MM_MASK_DIV_ZERO`, `_MM_MASK_OVERFLOW`,
/// `_MM_MASK_UNDERFLOW`, `_MM_MASK_INEXACT`.
///
/// A single masking bit can be set via
///
/// ```rust,ignore
/// _MM_SET_EXCEPTION_MASK(_MM_MASK_UNDERFLOW);
/// ```
///
/// However, since mask bits are by default all set to 1, it is more common to
/// want to *disable* certain bits. For example, to unmask the underflow
/// exception, use:
///
/// ```rust,ignore
/// _mm_setcsr(_mm_getcsr() & !_MM_MASK_UNDERFLOW); // unmask underflow
/// exception
/// ```
///
/// Warning: an unmasked exception will cause an exception handler to be
/// called.
/// The standard handler will simply terminate the process. So, in this case
/// any underflow exception would terminate the current process with something
/// like `signal: 8, SIGFPE: erroneous arithmetic operation`.
///
/// ## Rounding Mode
///
/// The rounding mode is describe using two bits. It can be read and set using
/// the convenience wrappers `_MM_GET_ROUNDING_MODE()` and
/// `_MM_SET_ROUNDING_MODE(mode)`.
///
/// The rounding modes are:
///
/// * `_MM_ROUND_NEAREST`: (default) Round to closest to the infinite precision
/// value. If two values are equally close, round to even (i.e., least
/// significant bit will be zero).
///
/// * `_MM_ROUND_DOWN`: Round toward negative Infinity.
///
/// * `_MM_ROUND_UP`: Round toward positive Infinity.
///
/// * `_MM_ROUND_TOWARD_ZERO`: Round towards zero (truncate).
///
/// Example:
///
/// ```rust,ignore
/// _MM_SET_ROUNDING_MODE(_MM_ROUND_DOWN)
/// ```
///
/// ## Denormals-are-zero/Flush-to-zero Mode
///
/// If this bit is set, values that would be denormalized will be set to zero
/// instead. This is turned off by default.
///
/// You can read and enable/disable this mode via the helper functions
/// `_MM_GET_FLUSH_ZERO_MODE()` and `_MM_SET_FLUSH_ZERO_MODE()`:
///
/// ```rust,ignore
/// _MM_SET_FLUSH_ZERO_MODE(_MM_FLUSH_ZERO_OFF); // turn off (default)
/// _MM_SET_FLUSH_ZERO_MODE(_MM_FLUSH_ZERO_ON); // turn on
/// ```
///
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_setcsr)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(ldmxcsr))]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_setcsr(val: u32) {
ldmxcsr(&val as *const _ as *const i8);
}
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_EXCEPT_INVALID: u32 = 0x0001;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_EXCEPT_DENORM: u32 = 0x0002;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_EXCEPT_DIV_ZERO: u32 = 0x0004;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_EXCEPT_OVERFLOW: u32 = 0x0008;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_EXCEPT_UNDERFLOW: u32 = 0x0010;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_EXCEPT_INEXACT: u32 = 0x0020;
/// See [`_MM_GET_EXCEPTION_STATE`](fn._MM_GET_EXCEPTION_STATE.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_EXCEPT_MASK: u32 = 0x003f;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_MASK_INVALID: u32 = 0x0080;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_MASK_DENORM: u32 = 0x0100;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_MASK_DIV_ZERO: u32 = 0x0200;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_MASK_OVERFLOW: u32 = 0x0400;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_MASK_UNDERFLOW: u32 = 0x0800;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_MASK_INEXACT: u32 = 0x1000;
/// See [`_MM_GET_EXCEPTION_MASK`](fn._MM_GET_EXCEPTION_MASK.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_MASK_MASK: u32 = 0x1f80;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_ROUND_NEAREST: u32 = 0x0000;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_ROUND_DOWN: u32 = 0x2000;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_ROUND_UP: u32 = 0x4000;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_ROUND_TOWARD_ZERO: u32 = 0x6000;
/// See [`_MM_GET_ROUNDING_MODE`](fn._MM_GET_ROUNDING_MODE.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_ROUND_MASK: u32 = 0x6000;
/// See [`_MM_GET_FLUSH_ZERO_MODE`](fn._MM_GET_FLUSH_ZERO_MODE.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_FLUSH_ZERO_MASK: u32 = 0x8000;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_FLUSH_ZERO_ON: u32 = 0x8000;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_FLUSH_ZERO_OFF: u32 = 0x0000;
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_MM_GET_EXCEPTION_MASK)
#[inline]
#[allow(non_snake_case)]
#[target_feature(enable = "sse")]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _MM_GET_EXCEPTION_MASK() -> u32 {
_mm_getcsr() & _MM_MASK_MASK
}
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_MM_GET_EXCEPTION_STATE)
#[inline]
#[allow(non_snake_case)]
#[target_feature(enable = "sse")]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _MM_GET_EXCEPTION_STATE() -> u32 {
_mm_getcsr() & _MM_EXCEPT_MASK
}
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_MM_GET_FLUSH_ZERO_MODE)
#[inline]
#[allow(non_snake_case)]
#[target_feature(enable = "sse")]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _MM_GET_FLUSH_ZERO_MODE() -> u32 {
_mm_getcsr() & _MM_FLUSH_ZERO_MASK
}
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_MM_GET_ROUNDING_MODE)
#[inline]
#[allow(non_snake_case)]
#[target_feature(enable = "sse")]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _MM_GET_ROUNDING_MODE() -> u32 {
_mm_getcsr() & _MM_ROUND_MASK
}
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_MM_SET_EXCEPTION_MASK)
#[inline]
#[allow(non_snake_case)]
#[target_feature(enable = "sse")]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _MM_SET_EXCEPTION_MASK(x: u32) {
_mm_setcsr((_mm_getcsr() & !_MM_MASK_MASK) | x)
}
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_MM_SET_EXCEPTION_STATE)
#[inline]
#[allow(non_snake_case)]
#[target_feature(enable = "sse")]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _MM_SET_EXCEPTION_STATE(x: u32) {
_mm_setcsr((_mm_getcsr() & !_MM_EXCEPT_MASK) | x)
}
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_MM_SET_FLUSH_ZERO_MODE)
#[inline]
#[allow(non_snake_case)]
#[target_feature(enable = "sse")]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _MM_SET_FLUSH_ZERO_MODE(x: u32) {
let val = (_mm_getcsr() & !_MM_FLUSH_ZERO_MASK) | x;
// println!("setting csr={:x}", val);
_mm_setcsr(val)
}
/// See [`_mm_setcsr`](fn._mm_setcsr.html)
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_MM_SET_ROUNDING_MODE)
#[inline]
#[allow(non_snake_case)]
#[target_feature(enable = "sse")]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _MM_SET_ROUNDING_MODE(x: u32) {
_mm_setcsr((_mm_getcsr() & !_MM_ROUND_MASK) | x)
}
/// See [`_mm_prefetch`](fn._mm_prefetch.html).
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_HINT_T0: i32 = 3;
/// See [`_mm_prefetch`](fn._mm_prefetch.html).
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_HINT_T1: i32 = 2;
/// See [`_mm_prefetch`](fn._mm_prefetch.html).
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_HINT_T2: i32 = 1;
/// See [`_mm_prefetch`](fn._mm_prefetch.html).
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_HINT_NTA: i32 = 0;
/// See [`_mm_prefetch`](fn._mm_prefetch.html).
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_HINT_ET0: i32 = 7;
/// See [`_mm_prefetch`](fn._mm_prefetch.html).
#[stable(feature = "simd_x86", since = "1.27.0")]
pub const _MM_HINT_ET1: i32 = 6;
/// Fetch the cache line that contains address `p` using the given `STRATEGY`.
///
/// The `STRATEGY` must be one of:
///
/// * [`_MM_HINT_T0`](constant._MM_HINT_T0.html): Fetch into all levels of the
/// cache hierarchy.
///
/// * [`_MM_HINT_T1`](constant._MM_HINT_T1.html): Fetch into L2 and higher.
///
/// * [`_MM_HINT_T2`](constant._MM_HINT_T2.html): Fetch into L3 and higher or
/// an implementation-specific choice (e.g., L2 if there is no L3).
///
/// * [`_MM_HINT_NTA`](constant._MM_HINT_NTA.html): Fetch data using the
/// non-temporal access (NTA) hint. It may be a place closer than main memory
/// but outside of the cache hierarchy. This is used to reduce access latency
/// without polluting the cache.
///
/// * [`_MM_HINT_ET0`](constant._MM_HINT_ET0.html) and
/// [`_MM_HINT_ET1`](constant._MM_HINT_ET1.html) are similar to `_MM_HINT_T0`
/// and `_MM_HINT_T1` but indicate an anticipation to write to the address.
///
/// The actual implementation depends on the particular CPU. This instruction
/// is considered a hint, so the CPU is also free to simply ignore the request.
///
/// The amount of prefetched data depends on the cache line size of the
/// specific CPU, but it will be at least 32 bytes.
///
/// Common caveats:
///
/// * Most modern CPUs already automatically prefetch data based on predicted
/// access patterns.
///
/// * Data is usually not fetched if this would cause a TLB miss or a page
/// fault.
///
/// * Too much prefetching can cause unnecessary cache evictions.
///
/// * Prefetching may also fail if there are not enough memory-subsystem
/// resources (e.g., request buffers).
///
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_prefetch)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(prefetcht0, STRATEGY = _MM_HINT_T0))]
#[cfg_attr(test, assert_instr(prefetcht1, STRATEGY = _MM_HINT_T1))]
#[cfg_attr(test, assert_instr(prefetcht2, STRATEGY = _MM_HINT_T2))]
#[cfg_attr(test, assert_instr(prefetchnta, STRATEGY = _MM_HINT_NTA))]
#[rustc_legacy_const_generics(1)]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn | <const STRATEGY: i32>(p: *const i8) {
// We use the `llvm.prefetch` instrinsic with `cache type` = 1 (data cache).
// `locality` and `rw` are based on our `STRATEGY`.
prefetch(p, (STRATEGY >> 2) & 1, STRATEGY & 3, 1);
}
/// Returns vector of type __m128 with undefined elements.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_undefined_ps)
#[inline]
#[target_feature(enable = "sse")]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _mm_undefined_ps() -> __m128 {
_mm_set1_ps(0.0)
}
/// Transpose the 4x4 matrix formed by 4 rows of __m128 in place.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_MM_TRANSPOSE4_PS)
#[inline]
#[allow(non_snake_case)]
#[target_feature(enable = "sse")]
#[stable(feature = "simd_x86", since = "1.27.0")]
pub unsafe fn _MM_TRANSPOSE4_PS(
row0: &mut __m128,
row1: &mut __m128,
row2: &mut __m128,
row3: &mut __m128,
) {
let tmp0 = _mm_unpacklo_ps(*row0, *row1);
let tmp2 = _mm_unpacklo_ps(*row2, *row3);
let tmp1 = _mm_unpackhi_ps(*row0, *row1);
let tmp3 = _mm_unpackhi_ps(*row2, *row3);
*row0 = _mm_movelh_ps(tmp0, tmp2);
*row1 = _mm_movehl_ps(tmp2, tmp0);
*row2 = _mm_movelh_ps(tmp1, tmp3);
*row3 = _mm_movehl_ps(tmp3, tmp1);
}
#[allow(improper_ctypes)]
extern "C" {
#[link_name = "llvm.x86.sse.add.ss"]
fn addss(a: __m128, b: __m128) -> __m128;
#[link_name = "llvm.x86.sse.sub.ss"]
fn subss(a: __m128, b: __m128) -> __m128;
#[link_name = "llvm.x86.sse.mul.ss"]
fn mulss(a: __m128, b: __m128) -> __m128;
#[link_name = "llvm.x86.sse.div.ss"]
fn divss(a: __m128, b: __m128) -> __m128;
#[link_name = "llvm.x86.sse.sqrt.ss"]
fn sqrtss(a: __m128) -> __m128;
#[link_name = "llvm.x86.sse.sqrt.ps"]
fn sqrtps(a: __m128) -> __m128;
#[link_name = "llvm.x86.sse.rcp.ss"]
fn rcpss(a: __m128) -> __m128;
#[link_name = "llvm.x86.sse.rcp.ps"]
fn rcpps(a: __m128) -> __m128;
#[link_name = "llvm.x86.sse.rsqrt.ss"]
fn rsqrtss(a: __m128) -> __m128;
#[link_name = "llvm.x86.sse.rsqrt.ps"]
fn rsqrtps(a: __m128) -> __m128;
#[link_name = "llvm.x86.sse.min.ss"]
fn minss(a: __m128, b: __m128) -> __m128;
#[link_name = "llvm.x86.sse.min.ps"]
fn minps(a: __m128, b: __m128) -> __m128;
#[link_name = "llvm.x86.sse.max.ss"]
fn maxss(a: __m128, b: __m128) -> __m128;
#[link_name = "llvm.x86.sse.max.ps"]
fn maxps(a: __m128, b: __m128) -> __m128;
#[link_name = "llvm.x86.sse.movmsk.ps"]
fn movmskps(a: __m128) -> i32;
#[link_name = "llvm.x86.sse.cmp.ps"]
fn cmpps(a: __m128, b: __m128, imm8: i8) -> __m128;
#[link_name = "llvm.x86.sse.comieq.ss"]
fn comieq_ss(a: __m128, b: __m128) -> i32;
#[link_name = "llvm.x86.sse.comilt.ss"]
fn comilt_ss(a: __m128, b: __m128) -> i32;
#[link_name = "llvm.x86.sse.comile.ss"]
fn comile_ss(a: __m128, b: __m128) -> i32;
#[link_name = "llvm.x86.sse.comigt.ss"]
fn comigt_ss(a: __m128, b: __m128) -> i32;
#[link_name = "llvm.x86.sse.comige.ss"]
fn comige_ss(a: __m128, b: __m128) -> i32;
#[link_name = "llvm.x86.sse.comineq.ss"]
fn comineq_ss(a: __m128, b: __m128) -> i32;
#[link_name = "llvm.x86.sse.ucomieq.ss"]
fn ucomieq_ss(a: __m128, b: __m128) -> i32;
#[link_name = "llvm.x86.sse.ucomilt.ss"]
fn ucomilt_ss(a: __m128, b: __m128) -> i32;
#[link_name = "llvm.x86.sse.ucomile.ss"]
fn ucomile_ss(a: __m128, b: __m128) -> i32;
#[link_name = "llvm.x86.sse.ucomigt.ss"]
fn ucomigt_ss(a: __m128, b: __m128) -> i32;
#[link_name = "llvm.x86.sse.ucomige.ss"]
fn ucomige_ss(a: __m128, b: __m128) -> i32;
#[link_name = "llvm.x86.sse.ucomineq.ss"]
fn ucomineq_ss(a: __m128, b: __m128) -> i32;
#[link_name = "llvm.x86.sse.cvtss2si"]
fn cvtss2si(a: __m128) -> i32;
#[link_name = "llvm.x86.sse.cvttss2si"]
fn cvttss2si(a: __m128) -> i32;
#[link_name = "llvm.x86.sse.cvtsi2ss"]
fn cvtsi2ss(a: __m128, b: i32) -> __m128;
#[link_name = "llvm.x86.sse.sfence"]
fn sfence();
#[link_name = "llvm.x86.sse.stmxcsr"]
fn stmxcsr(p: *mut i8);
#[link_name = "llvm.x86.sse.ldmxcsr"]
fn ldmxcsr(p: *const i8);
#[link_name = "llvm.prefetch"]
fn prefetch(p: *const i8, rw: i32, loc: i32, ty: i32);
#[link_name = "llvm.x86.sse.cmp.ss"]
fn cmpss(a: __m128, b: __m128, imm8: i8) -> __m128;
}
/// Stores `a` into the memory at `mem_addr` using a non-temporal memory hint.
///
/// `mem_addr` must be aligned on a 16-byte boundary or a general-protection
/// exception _may_ be generated.
///
/// [Intel's documentation](https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_stream_ps)
#[inline]
#[target_feature(enable = "sse")]
#[cfg_attr(test, assert_instr(movntps))]
#[stable(feature = "simd_x86", since = "1.27.0")]
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn _mm_stream_ps(mem_addr: *mut f32, a: __m128) {
intrinsics::nontemporal_store(mem_addr as *mut __m128, a);
}
#[cfg(test)]
mod tests {
use crate::{hint::black_box, mem::transmute};
use std::{boxed, f32::NAN};
use stdarch_test::simd_test;
use crate::core_arch::{simd::*, x86::*};
#[simd_test(enable = "sse")]
unsafe fn test_mm_add_ps() {
let a = _mm_setr_ps(-1.0, 5.0, 0.0, -10.0);
let b = _mm_setr_ps(-100.0, 20.0, 0.0, -5.0);
let r = _mm_add_ps(a, b);
assert_eq_m128(r, _mm_setr_ps(-101.0, 25.0, 0.0, -15.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_add_ss() {
let a = _mm_set_ps(-1.0, 5.0, 0.0, -10.0);
let b = _mm_set_ps(-100.0, 20.0, 0.0, -5.0);
let r = _mm_add_ss(a, b);
assert_eq_m128(r, _mm_set_ps(-1.0, 5.0, 0.0, -15.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_sub_ps() {
let a = _mm_setr_ps(-1.0, 5.0, 0.0, -10.0);
let b = _mm_setr_ps(-100.0, 20.0, 0.0, -5.0);
let r = _mm_sub_ps(a, b);
assert_eq_m128(r, _mm_setr_ps(99.0, -15.0, 0.0, -5.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_sub_ss() {
let a = _mm_setr_ps(-1.0, 5.0, 0.0, -10.0);
let b = _mm_setr_ps(-100.0, 20.0, 0.0, -5.0);
let r = _mm_sub_ss(a, b);
assert_eq_m128(r, _mm_setr_ps(99.0, 5.0, 0.0, -10.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_mul_ps() {
let a = _mm_setr_ps(-1.0, 5.0, 0.0, -10.0);
let b = _mm_setr_ps(-100.0, 20.0, 0.0, -5.0);
let r = _mm_mul_ps(a, b);
assert_eq_m128(r, _mm_setr_ps(100.0, 100.0, 0.0, 50.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_mul_ss() {
let a = _mm_setr_ps(-1.0, 5.0, 0.0, -10.0);
let b = _mm_setr_ps(-100.0, 20.0, 0.0, -5.0);
let r = _mm_mul_ss(a, b);
assert_eq_m128(r, _mm_setr_ps(100.0, 5.0, 0.0, -10.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_div_ps() {
let a = _mm_setr_ps(-1.0, 5.0, 2.0, -10.0);
let b = _mm_setr_ps(-100.0, 20.0, 0.2, -5.0);
let r = _mm_div_ps(a, b);
assert_eq_m128(r, _mm_setr_ps(0.01, 0.25, 10.0, 2.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_div_ss() {
let a = _mm_setr_ps(-1.0, 5.0, 0.0, -10.0);
let b = _mm_setr_ps(-100.0, 20.0, 0.0, -5.0);
let r = _mm_div_ss(a, b);
assert_eq_m128(r, _mm_setr_ps(0.01, 5.0, 0.0, -10.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_sqrt_ss() {
let a = _mm_setr_ps(4.0, 13.0, 16.0, 100.0);
let r = _mm_sqrt_ss(a);
let e = _mm_setr_ps(2.0, 13.0, 16.0, 100.0);
assert_eq_m128(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_sqrt_ps() {
let a = _mm_setr_ps(4.0, 13.0, 16.0, 100.0);
let r = _mm_sqrt_ps(a);
let e = _mm_setr_ps(2.0, 3.6055512, 4.0, 10.0);
assert_eq_m128(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_rcp_ss() {
let a = _mm_setr_ps(4.0, 13.0, 16.0, 100.0);
let r = _mm_rcp_ss(a);
let e = _mm_setr_ps(0.24993896, 13.0, 16.0, 100.0);
assert_eq_m128(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_rcp_ps() {
let a = _mm_setr_ps(4.0, 13.0, 16.0, 100.0);
let r = _mm_rcp_ps(a);
let e = _mm_setr_ps(0.24993896, 0.0769043, 0.06248474, 0.0099983215);
let rel_err = 0.00048828125;
for i in 0..4 {
assert_approx_eq!(get_m128(r, i), get_m128(e, i), 2. * rel_err);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_rsqrt_ss() {
let a = _mm_setr_ps(4.0, 13.0, 16.0, 100.0);
let r = _mm_rsqrt_ss(a);
let e = _mm_setr_ps(0.49987793, 13.0, 16.0, 100.0);
let rel_err = 0.00048828125;
for i in 0..4 {
assert_approx_eq!(get_m128(r, i), get_m128(e, i), 2. * rel_err);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_rsqrt_ps() {
let a = _mm_setr_ps(4.0, 13.0, 16.0, 100.0);
let r = _mm_rsqrt_ps(a);
let e = _mm_setr_ps(0.49987793, 0.2772827, 0.24993896, 0.099990845);
let rel_err = 0.00048828125;
for i in 0..4 {
assert_approx_eq!(get_m128(r, i), get_m128(e, i), 2. * rel_err);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_min_ss() {
let a = _mm_setr_ps(-1.0, 5.0, 0.0, -10.0);
let b = _mm_setr_ps(-100.0, 20.0, 0.0, -5.0);
let r = _mm_min_ss(a, b);
assert_eq_m128(r, _mm_setr_ps(-100.0, 5.0, 0.0, -10.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_min_ps() {
let a = _mm_setr_ps(-1.0, 5.0, 0.0, -10.0);
let b = _mm_setr_ps(-100.0, 20.0, 0.0, -5.0);
let r = _mm_min_ps(a, b);
assert_eq_m128(r, _mm_setr_ps(-100.0, 5.0, 0.0, -10.0));
// `_mm_min_ps` can **not** be implemented using the `simd_min` rust intrinsic. `simd_min`
// is lowered by the llvm codegen backend to `llvm.minnum.v*` llvm intrinsic. This intrinsic
// doesn't specify how -0.0 is handled. Unfortunately it happens to behave different from
// the `minps` x86 instruction on x86. The `llvm.minnum.v*` llvm intrinsic equals
// `r1` to `a` and `r2` to `b`.
let a = _mm_setr_ps(-0.0, 0.0, 0.0, 0.0);
let b = _mm_setr_ps(0.0, 0.0, 0.0, 0.0);
let r1: [u8; 16] = transmute(_mm_min_ps(a, b));
let r2: [u8; 16] = transmute(_mm_min_ps(b, a));
let a: [u8; 16] = transmute(a);
let b: [u8; 16] = transmute(b);
assert_eq!(r1, b);
assert_eq!(r2, a);
assert_ne!(a, b); // sanity check that -0.0 is actually present
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_max_ss() {
let a = _mm_setr_ps(-1.0, 5.0, 0.0, -10.0);
let b = _mm_setr_ps(-100.0, 20.0, 0.0, -5.0);
let r = _mm_max_ss(a, b);
assert_eq_m128(r, _mm_setr_ps(-1.0, 5.0, 0.0, -10.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_max_ps() {
let a = _mm_setr_ps(-1.0, 5.0, 0.0, -10.0);
let b = _mm_setr_ps(-100.0, 20.0, 0.0, -5.0);
let r = _mm_max_ps(a, b);
assert_eq_m128(r, _mm_setr_ps(-1.0, 20.0, 0.0, -5.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_and_ps() {
let a = transmute(u32x4::splat(0b0011));
let b = transmute(u32x4::splat(0b0101));
let r = _mm_and_ps(*black_box(&a), *black_box(&b));
let e = transmute(u32x4::splat(0b0001));
assert_eq_m128(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_andnot_ps() {
let a = transmute(u32x4::splat(0b0011));
let b = transmute(u32x4::splat(0b0101));
let r = _mm_andnot_ps(*black_box(&a), *black_box(&b));
let e = transmute(u32x4::splat(0b0100));
assert_eq_m128(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_or_ps() {
let a = transmute(u32x4::splat(0b0011));
let b = transmute(u32x4::splat(0b0101));
let r = _mm_or_ps(*black_box(&a), *black_box(&b));
let e = transmute(u32x4::splat(0b0111));
assert_eq_m128(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_xor_ps() {
let a = transmute(u32x4::splat(0b0011));
let b = transmute(u32x4::splat(0b0101));
let r = _mm_xor_ps(*black_box(&a), *black_box(&b));
let e = transmute(u32x4::splat(0b0110));
assert_eq_m128(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpeq_ss() {
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(-1.0, 5.0, 6.0, 7.0);
let r: u32x4 = transmute(_mm_cmpeq_ss(a, b));
let e: u32x4 = transmute(_mm_setr_ps(transmute(0u32), 2.0, 3.0, 4.0));
assert_eq!(r, e);
let b2 = _mm_setr_ps(1.0, 5.0, 6.0, 7.0);
let r2: u32x4 = transmute(_mm_cmpeq_ss(a, b2));
let e2: u32x4 = transmute(_mm_setr_ps(transmute(0xffffffffu32), 2.0, 3.0, 4.0));
assert_eq!(r2, e2);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmplt_ss() {
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(0.0, 5.0, 6.0, 7.0);
let c = _mm_setr_ps(1.0, 5.0, 6.0, 7.0);
let d = _mm_setr_ps(2.0, 5.0, 6.0, 7.0);
let b1 = 0u32; // a.extract(0) < b.extract(0)
let c1 = 0u32; // a.extract(0) < c.extract(0)
let d1 = !0u32; // a.extract(0) < d.extract(0)
let rb: u32x4 = transmute(_mm_cmplt_ss(a, b));
let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0));
assert_eq!(rb, eb);
let rc: u32x4 = transmute(_mm_cmplt_ss(a, c));
let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0));
assert_eq!(rc, ec);
let rd: u32x4 = transmute(_mm_cmplt_ss(a, d));
let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0));
assert_eq!(rd, ed);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmple_ss() {
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(0.0, 5.0, 6.0, 7.0);
let c = _mm_setr_ps(1.0, 5.0, 6.0, 7.0);
let d = _mm_setr_ps(2.0, 5.0, 6.0, 7.0);
let b1 = 0u32; // a.extract(0) <= b.extract(0)
let c1 = !0u32; // a.extract(0) <= c.extract(0)
let d1 = !0u32; // a.extract(0) <= d.extract(0)
let rb: u32x4 = transmute(_mm_cmple_ss(a, b));
let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0));
assert_eq!(rb, eb);
let rc: u32x4 = transmute(_mm_cmple_ss(a, c));
let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0));
assert_eq!(rc, ec);
let rd: u32x4 = transmute(_mm_cmple_ss(a, d));
let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0));
assert_eq!(rd, ed);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpgt_ss() {
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(0.0, 5.0, 6.0, 7.0);
let c = _mm_setr_ps(1.0, 5.0, 6.0, 7.0);
let d = _mm_setr_ps(2.0, 5.0, 6.0, 7.0);
let b1 = !0u32; // a.extract(0) > b.extract(0)
let c1 = 0u32; // a.extract(0) > c.extract(0)
let d1 = 0u32; // a.extract(0) > d.extract(0)
let rb: u32x4 = transmute(_mm_cmpgt_ss(a, b));
let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0));
assert_eq!(rb, eb);
let rc: u32x4 = transmute(_mm_cmpgt_ss(a, c));
let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0));
assert_eq!(rc, ec);
let rd: u32x4 = transmute(_mm_cmpgt_ss(a, d));
let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0));
assert_eq!(rd, ed);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpge_ss() {
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(0.0, 5.0, 6.0, 7.0);
let c = _mm_setr_ps(1.0, 5.0, 6.0, 7.0);
let d = _mm_setr_ps(2.0, 5.0, 6.0, 7.0);
let b1 = !0u32; // a.extract(0) >= b.extract(0)
let c1 = !0u32; // a.extract(0) >= c.extract(0)
let d1 = 0u32; // a.extract(0) >= d.extract(0)
let rb: u32x4 = transmute(_mm_cmpge_ss(a, b));
let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0));
assert_eq!(rb, eb);
let rc: u32x4 = transmute(_mm_cmpge_ss(a, c));
let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0));
assert_eq!(rc, ec);
let rd: u32x4 = transmute(_mm_cmpge_ss(a, d));
let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0));
assert_eq!(rd, ed);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpneq_ss() {
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(0.0, 5.0, 6.0, 7.0);
let c = _mm_setr_ps(1.0, 5.0, 6.0, 7.0);
let d = _mm_setr_ps(2.0, 5.0, 6.0, 7.0);
let b1 = !0u32; // a.extract(0) != b.extract(0)
let c1 = 0u32; // a.extract(0) != c.extract(0)
let d1 = !0u32; // a.extract(0) != d.extract(0)
let rb: u32x4 = transmute(_mm_cmpneq_ss(a, b));
let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0));
assert_eq!(rb, eb);
let rc: u32x4 = transmute(_mm_cmpneq_ss(a, c));
let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0));
assert_eq!(rc, ec);
let rd: u32x4 = transmute(_mm_cmpneq_ss(a, d));
let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0));
assert_eq!(rd, ed);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpnlt_ss() {
// TODO: this test is exactly the same as for `_mm_cmpge_ss`, but there
// must be a difference. It may have to do with behavior in the
// presence of NaNs (signaling or quiet). If so, we should add tests
// for those.
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(0.0, 5.0, 6.0, 7.0);
let c = _mm_setr_ps(1.0, 5.0, 6.0, 7.0);
let d = _mm_setr_ps(2.0, 5.0, 6.0, 7.0);
let b1 = !0u32; // a.extract(0) >= b.extract(0)
let c1 = !0u32; // a.extract(0) >= c.extract(0)
let d1 = 0u32; // a.extract(0) >= d.extract(0)
let rb: u32x4 = transmute(_mm_cmpnlt_ss(a, b));
let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0));
assert_eq!(rb, eb);
let rc: u32x4 = transmute(_mm_cmpnlt_ss(a, c));
let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0));
assert_eq!(rc, ec);
let rd: u32x4 = transmute(_mm_cmpnlt_ss(a, d));
let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0));
assert_eq!(rd, ed);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpnle_ss() {
// TODO: this test is exactly the same as for `_mm_cmpgt_ss`, but there
// must be a difference. It may have to do with behavior in the
// presence
// of NaNs (signaling or quiet). If so, we should add tests for those.
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(0.0, 5.0, 6.0, 7.0);
let c = _mm_setr_ps(1.0, 5.0, 6.0, 7.0);
let d = _mm_setr_ps(2.0, 5.0, 6.0, 7.0);
let b1 = !0u32; // a.extract(0) > b.extract(0)
let c1 = 0u32; // a.extract(0) > c.extract(0)
let d1 = 0u32; // a.extract(0) > d.extract(0)
let rb: u32x4 = transmute(_mm_cmpnle_ss(a, b));
let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0));
assert_eq!(rb, eb);
let rc: u32x4 = transmute(_mm_cmpnle_ss(a, c));
let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0));
assert_eq!(rc, ec);
let rd: u32x4 = transmute(_mm_cmpnle_ss(a, d));
let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0));
assert_eq!(rd, ed);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpngt_ss() {
// TODO: this test is exactly the same as for `_mm_cmple_ss`, but there
// must be a difference. It may have to do with behavior in the
// presence of NaNs (signaling or quiet). If so, we should add tests
// for those.
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(0.0, 5.0, 6.0, 7.0);
let c = _mm_setr_ps(1.0, 5.0, 6.0, 7.0);
let d = _mm_setr_ps(2.0, 5.0, 6.0, 7.0);
let b1 = 0u32; // a.extract(0) <= b.extract(0)
let c1 = !0u32; // a.extract(0) <= c.extract(0)
let d1 = !0u32; // a.extract(0) <= d.extract(0)
let rb: u32x4 = transmute(_mm_cmpngt_ss(a, b));
let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0));
assert_eq!(rb, eb);
let rc: u32x4 = transmute(_mm_cmpngt_ss(a, c));
let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0));
assert_eq!(rc, ec);
let rd: u32x4 = transmute(_mm_cmpngt_ss(a, d));
let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0));
assert_eq!(rd, ed);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpnge_ss() {
// TODO: this test is exactly the same as for `_mm_cmplt_ss`, but there
// must be a difference. It may have to do with behavior in the
// presence of NaNs (signaling or quiet). If so, we should add tests
// for those.
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(0.0, 5.0, 6.0, 7.0);
let c = _mm_setr_ps(1.0, 5.0, 6.0, 7.0);
let d = _mm_setr_ps(2.0, 5.0, 6.0, 7.0);
let b1 = 0u32; // a.extract(0) < b.extract(0)
let c1 = 0u32; // a.extract(0) < c.extract(0)
let d1 = !0u32; // a.extract(0) < d.extract(0)
let rb: u32x4 = transmute(_mm_cmpnge_ss(a, b));
let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0));
assert_eq!(rb, eb);
let rc: u32x4 = transmute(_mm_cmpnge_ss(a, c));
let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0));
assert_eq!(rc, ec);
let rd: u32x4 = transmute(_mm_cmpnge_ss(a, d));
let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0));
assert_eq!(rd, ed);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpord_ss() {
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(0.0, 5.0, 6.0, 7.0);
let c = _mm_setr_ps(NAN, 5.0, 6.0, 7.0);
let d = _mm_setr_ps(2.0, 5.0, 6.0, 7.0);
let b1 = !0u32; // a.extract(0) ord b.extract(0)
let c1 = 0u32; // a.extract(0) ord c.extract(0)
let d1 = !0u32; // a.extract(0) ord d.extract(0)
let rb: u32x4 = transmute(_mm_cmpord_ss(a, b));
let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0));
assert_eq!(rb, eb);
let rc: u32x4 = transmute(_mm_cmpord_ss(a, c));
let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0));
assert_eq!(rc, ec);
let rd: u32x4 = transmute(_mm_cmpord_ss(a, d));
let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0));
assert_eq!(rd, ed);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpunord_ss() {
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(0.0, 5.0, 6.0, 7.0);
let c = _mm_setr_ps(NAN, 5.0, 6.0, 7.0);
let d = _mm_setr_ps(2.0, 5.0, 6.0, 7.0);
let b1 = 0u32; // a.extract(0) unord b.extract(0)
let c1 = !0u32; // a.extract(0) unord c.extract(0)
let d1 = 0u32; // a.extract(0) unord d.extract(0)
let rb: u32x4 = transmute(_mm_cmpunord_ss(a, b));
let eb: u32x4 = transmute(_mm_setr_ps(transmute(b1), 2.0, 3.0, 4.0));
assert_eq!(rb, eb);
let rc: u32x4 = transmute(_mm_cmpunord_ss(a, c));
let ec: u32x4 = transmute(_mm_setr_ps(transmute(c1), 2.0, 3.0, 4.0));
assert_eq!(rc, ec);
let rd: u32x4 = transmute(_mm_cmpunord_ss(a, d));
let ed: u32x4 = transmute(_mm_setr_ps(transmute(d1), 2.0, 3.0, 4.0));
assert_eq!(rd, ed);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpeq_ps() {
let a = _mm_setr_ps(10.0, 50.0, 1.0, NAN);
let b = _mm_setr_ps(15.0, 20.0, 1.0, NAN);
let tru = !0u32;
let fls = 0u32;
let e = u32x4::new(fls, fls, tru, fls);
let r: u32x4 = transmute(_mm_cmpeq_ps(a, b));
assert_eq!(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmplt_ps() {
let a = _mm_setr_ps(10.0, 50.0, 1.0, NAN);
let b = _mm_setr_ps(15.0, 20.0, 1.0, NAN);
let tru = !0u32;
let fls = 0u32;
let e = u32x4::new(tru, fls, fls, fls);
let r: u32x4 = transmute(_mm_cmplt_ps(a, b));
assert_eq!(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmple_ps() {
let a = _mm_setr_ps(10.0, 50.0, 1.0, 4.0);
let b = _mm_setr_ps(15.0, 20.0, 1.0, NAN);
let tru = !0u32;
let fls = 0u32;
let e = u32x4::new(tru, fls, tru, fls);
let r: u32x4 = transmute(_mm_cmple_ps(a, b));
assert_eq!(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpgt_ps() {
let a = _mm_setr_ps(10.0, 50.0, 1.0, NAN);
let b = _mm_setr_ps(15.0, 20.0, 1.0, 42.0);
let tru = !0u32;
let fls = 0u32;
let e = u32x4::new(fls, tru, fls, fls);
let r: u32x4 = transmute(_mm_cmpgt_ps(a, b));
assert_eq!(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpge_ps() {
let a = _mm_setr_ps(10.0, 50.0, 1.0, NAN);
let b = _mm_setr_ps(15.0, 20.0, 1.0, 42.0);
let tru = !0u32;
let fls = 0u32;
let e = u32x4::new(fls, tru, tru, fls);
let r: u32x4 = transmute(_mm_cmpge_ps(a, b));
assert_eq!(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpneq_ps() {
let a = _mm_setr_ps(10.0, 50.0, 1.0, NAN);
let b = _mm_setr_ps(15.0, 20.0, 1.0, NAN);
let tru = !0u32;
let fls = 0u32;
let e = u32x4::new(tru, tru, fls, tru);
let r: u32x4 = transmute(_mm_cmpneq_ps(a, b));
assert_eq!(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpnlt_ps() {
let a = _mm_setr_ps(10.0, 50.0, 1.0, NAN);
let b = _mm_setr_ps(15.0, 20.0, 1.0, 5.0);
let tru = !0u32;
let fls = 0u32;
let e = u32x4::new(fls, tru, tru, tru);
let r: u32x4 = transmute(_mm_cmpnlt_ps(a, b));
assert_eq!(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpnle_ps() {
let a = _mm_setr_ps(10.0, 50.0, 1.0, NAN);
let b = _mm_setr_ps(15.0, 20.0, 1.0, 5.0);
let tru = !0u32;
let fls = 0u32;
let e = u32x4::new(fls, tru, fls, tru);
let r: u32x4 = transmute(_mm_cmpnle_ps(a, b));
assert_eq!(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpngt_ps() {
let a = _mm_setr_ps(10.0, 50.0, 1.0, NAN);
let b = _mm_setr_ps(15.0, 20.0, 1.0, 5.0);
let tru = !0u32;
let fls = 0u32;
let e = u32x4::new(tru, fls, tru, tru);
let r: u32x4 = transmute(_mm_cmpngt_ps(a, b));
assert_eq!(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpnge_ps() {
let a = _mm_setr_ps(10.0, 50.0, 1.0, NAN);
let b = _mm_setr_ps(15.0, 20.0, 1.0, 5.0);
let tru = !0u32;
let fls = 0u32;
let e = u32x4::new(tru, fls, fls, tru);
let r: u32x4 = transmute(_mm_cmpnge_ps(a, b));
assert_eq!(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpord_ps() {
let a = _mm_setr_ps(10.0, 50.0, NAN, NAN);
let b = _mm_setr_ps(15.0, NAN, 1.0, NAN);
let tru = !0u32;
let fls = 0u32;
let e = u32x4::new(tru, fls, fls, fls);
let r: u32x4 = transmute(_mm_cmpord_ps(a, b));
assert_eq!(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cmpunord_ps() {
let a = _mm_setr_ps(10.0, 50.0, NAN, NAN);
let b = _mm_setr_ps(15.0, NAN, 1.0, NAN);
let tru = !0u32;
let fls = 0u32;
let e = u32x4::new(fls, tru, tru, tru);
let r: u32x4 = transmute(_mm_cmpunord_ps(a, b));
assert_eq!(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_comieq_ss() {
let aa = &[3.0f32, 12.0, 23.0, NAN];
let bb = &[3.0f32, 47.5, 1.5, NAN];
let ee = &[1i32, 0, 0, 0];
for i in 0..4 {
let a = _mm_setr_ps(aa[i], 1.0, 2.0, 3.0);
let b = _mm_setr_ps(bb[i], 0.0, 2.0, 4.0);
let r = _mm_comieq_ss(a, b);
assert_eq!(
ee[i], r,
"_mm_comieq_ss({:?}, {:?}) = {}, expected: {} (i={})",
a, b, r, ee[i], i
);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_comilt_ss() {
let aa = &[3.0f32, 12.0, 23.0, NAN];
let bb = &[3.0f32, 47.5, 1.5, NAN];
let ee = &[0i32, 1, 0, 0];
for i in 0..4 {
let a = _mm_setr_ps(aa[i], 1.0, 2.0, 3.0);
let b = _mm_setr_ps(bb[i], 0.0, 2.0, 4.0);
let r = _mm_comilt_ss(a, b);
assert_eq!(
ee[i], r,
"_mm_comilt_ss({:?}, {:?}) = {}, expected: {} (i={})",
a, b, r, ee[i], i
);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_comile_ss() {
let aa = &[3.0f32, 12.0, 23.0, NAN];
let bb = &[3.0f32, 47.5, 1.5, NAN];
let ee = &[1i32, 1, 0, 0];
for i in 0..4 {
let a = _mm_setr_ps(aa[i], 1.0, 2.0, 3.0);
let b = _mm_setr_ps(bb[i], 0.0, 2.0, 4.0);
let r = _mm_comile_ss(a, b);
assert_eq!(
ee[i], r,
"_mm_comile_ss({:?}, {:?}) = {}, expected: {} (i={})",
a, b, r, ee[i], i
);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_comigt_ss() {
let aa = &[3.0f32, 12.0, 23.0, NAN];
let bb = &[3.0f32, 47.5, 1.5, NAN];
let ee = &[1i32, 0, 1, 0];
for i in 0..4 {
let a = _mm_setr_ps(aa[i], 1.0, 2.0, 3.0);
let b = _mm_setr_ps(bb[i], 0.0, 2.0, 4.0);
let r = _mm_comige_ss(a, b);
assert_eq!(
ee[i], r,
"_mm_comige_ss({:?}, {:?}) = {}, expected: {} (i={})",
a, b, r, ee[i], i
);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_comineq_ss() {
let aa = &[3.0f32, 12.0, 23.0, NAN];
let bb = &[3.0f32, 47.5, 1.5, NAN];
let ee = &[0i32, 1, 1, 1];
for i in 0..4 {
let a = _mm_setr_ps(aa[i], 1.0, 2.0, 3.0);
let b = _mm_setr_ps(bb[i], 0.0, 2.0, 4.0);
let r = _mm_comineq_ss(a, b);
assert_eq!(
ee[i], r,
"_mm_comineq_ss({:?}, {:?}) = {}, expected: {} (i={})",
a, b, r, ee[i], i
);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_ucomieq_ss() {
let aa = &[3.0f32, 12.0, 23.0, NAN];
let bb = &[3.0f32, 47.5, 1.5, NAN];
let ee = &[1i32, 0, 0, 0];
for i in 0..4 {
let a = _mm_setr_ps(aa[i], 1.0, 2.0, 3.0);
let b = _mm_setr_ps(bb[i], 0.0, 2.0, 4.0);
let r = _mm_ucomieq_ss(a, b);
assert_eq!(
ee[i], r,
"_mm_ucomieq_ss({:?}, {:?}) = {}, expected: {} (i={})",
a, b, r, ee[i], i
);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_ucomilt_ss() {
let aa = &[3.0f32, 12.0, 23.0, NAN];
let bb = &[3.0f32, 47.5, 1.5, NAN];
let ee = &[0i32, 1, 0, 0];
for i in 0..4 {
let a = _mm_setr_ps(aa[i], 1.0, 2.0, 3.0);
let b = _mm_setr_ps(bb[i], 0.0, 2.0, 4.0);
let r = _mm_ucomilt_ss(a, b);
assert_eq!(
ee[i], r,
"_mm_ucomilt_ss({:?}, {:?}) = {}, expected: {} (i={})",
a, b, r, ee[i], i
);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_ucomile_ss() {
let aa = &[3.0f32, 12.0, 23.0, NAN];
let bb = &[3.0f32, 47.5, 1.5, NAN];
let ee = &[1i32, 1, 0, 0];
for i in 0..4 {
let a = _mm_setr_ps(aa[i], 1.0, 2.0, 3.0);
let b = _mm_setr_ps(bb[i], 0.0, 2.0, 4.0);
let r = _mm_ucomile_ss(a, b);
assert_eq!(
ee[i], r,
"_mm_ucomile_ss({:?}, {:?}) = {}, expected: {} (i={})",
a, b, r, ee[i], i
);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_ucomigt_ss() {
let aa = &[3.0f32, 12.0, 23.0, NAN];
let bb = &[3.0f32, 47.5, 1.5, NAN];
let ee = &[0i32, 0, 1, 0];
for i in 0..4 {
let a = _mm_setr_ps(aa[i], 1.0, 2.0, 3.0);
let b = _mm_setr_ps(bb[i], 0.0, 2.0, 4.0);
let r = _mm_ucomigt_ss(a, b);
assert_eq!(
ee[i], r,
"_mm_ucomigt_ss({:?}, {:?}) = {}, expected: {} (i={})",
a, b, r, ee[i], i
);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_ucomige_ss() {
let aa = &[3.0f32, 12.0, 23.0, NAN];
let bb = &[3.0f32, 47.5, 1.5, NAN];
let ee = &[1i32, 0, 1, 0];
for i in 0..4 {
let a = _mm_setr_ps(aa[i], 1.0, 2.0, 3.0);
let b = _mm_setr_ps(bb[i], 0.0, 2.0, 4.0);
let r = _mm_ucomige_ss(a, b);
assert_eq!(
ee[i], r,
"_mm_ucomige_ss({:?}, {:?}) = {}, expected: {} (i={})",
a, b, r, ee[i], i
);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_ucomineq_ss() {
let aa = &[3.0f32, 12.0, 23.0, NAN];
let bb = &[3.0f32, 47.5, 1.5, NAN];
let ee = &[0i32, 1, 1, 1];
for i in 0..4 {
let a = _mm_setr_ps(aa[i], 1.0, 2.0, 3.0);
let b = _mm_setr_ps(bb[i], 0.0, 2.0, 4.0);
let r = _mm_ucomineq_ss(a, b);
assert_eq!(
ee[i], r,
"_mm_ucomineq_ss({:?}, {:?}) = {}, expected: {} (i={})",
a, b, r, ee[i], i
);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_comieq_ss_vs_ucomieq_ss() {
// If one of the arguments is a quiet NaN `comieq_ss` should signal an
// Invalid Operation Exception while `ucomieq_ss` should not.
let aa = &[3.0f32, NAN, 23.0, NAN];
let bb = &[3.0f32, 47.5, NAN, NAN];
let ee = &[1i32, 0, 0, 0];
let exc = &[0u32, 1, 1, 1]; // Should comieq_ss signal an exception?
for i in 0..4 {
let a = _mm_setr_ps(aa[i], 1.0, 2.0, 3.0);
let b = _mm_setr_ps(bb[i], 0.0, 2.0, 4.0);
_MM_SET_EXCEPTION_STATE(0);
let r1 = _mm_comieq_ss(*black_box(&a), b);
let s1 = _MM_GET_EXCEPTION_STATE();
_MM_SET_EXCEPTION_STATE(0);
let r2 = _mm_ucomieq_ss(*black_box(&a), b);
let s2 = _MM_GET_EXCEPTION_STATE();
assert_eq!(
ee[i], r1,
"_mm_comeq_ss({:?}, {:?}) = {}, expected: {} (i={})",
a, b, r1, ee[i], i
);
assert_eq!(
ee[i], r2,
"_mm_ucomeq_ss({:?}, {:?}) = {}, expected: {} (i={})",
a, b, r2, ee[i], i
);
assert_eq!(
s1,
exc[i] * _MM_EXCEPT_INVALID,
"_mm_comieq_ss() set exception flags: {} (i={})",
s1,
i
);
assert_eq!(
s2,
0, // ucomieq_ss should not signal an exception
"_mm_ucomieq_ss() set exception flags: {} (i={})",
s2,
i
);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cvtss_si32() {
let inputs = &[42.0f32, -3.1, 4.0e10, 4.0e-20, NAN, 2147483500.1];
let result = &[42i32, -3, i32::MIN, 0, i32::MIN, 2147483520];
for i in 0..inputs.len() {
let x = _mm_setr_ps(inputs[i], 1.0, 3.0, 4.0);
let e = result[i];
let r = _mm_cvtss_si32(x);
assert_eq!(
e, r,
"TestCase #{} _mm_cvtss_si32({:?}) = {}, expected: {}",
i, x, r, e
);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cvttss_si32() {
let inputs = &[
(42.0f32, 42i32),
(-31.4, -31),
(-33.5, -33),
(-34.5, -34),
(10.999, 10),
(-5.99, -5),
(4.0e10, i32::MIN),
(4.0e-10, 0),
(NAN, i32::MIN),
(2147483500.1, 2147483520),
];
for i in 0..inputs.len() {
let (xi, e) = inputs[i];
let x = _mm_setr_ps(xi, 1.0, 3.0, 4.0);
let r = _mm_cvttss_si32(x);
assert_eq!(
e, r,
"TestCase #{} _mm_cvttss_si32({:?}) = {}, expected: {}",
i, x, r, e
);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cvtsi32_ss() {
let inputs = &[
(4555i32, 4555.0f32),
(322223333, 322223330.0),
(-432, -432.0),
(-322223333, -322223330.0),
];
for i in 0..inputs.len() {
let (x, f) = inputs[i];
let a = _mm_setr_ps(5.0, 6.0, 7.0, 8.0);
let r = _mm_cvtsi32_ss(a, x);
let e = _mm_setr_ps(f, 6.0, 7.0, 8.0);
assert_eq_m128(e, r);
}
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_cvtss_f32() {
let a = _mm_setr_ps(312.0134, 5.0, 6.0, 7.0);
assert_eq!(_mm_cvtss_f32(a), 312.0134);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_set_ss() {
let r = _mm_set_ss(black_box(4.25));
assert_eq_m128(r, _mm_setr_ps(4.25, 0.0, 0.0, 0.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_set1_ps() {
let r1 = _mm_set1_ps(black_box(4.25));
let r2 = _mm_set_ps1(black_box(4.25));
assert_eq!(get_m128(r1, 0), 4.25);
assert_eq!(get_m128(r1, 1), 4.25);
assert_eq!(get_m128(r1, 2), 4.25);
assert_eq!(get_m128(r1, 3), 4.25);
assert_eq!(get_m128(r2, 0), 4.25);
assert_eq!(get_m128(r2, 1), 4.25);
assert_eq!(get_m128(r2, 2), 4.25);
assert_eq!(get_m128(r2, 3), 4.25);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_set_ps() {
let r = _mm_set_ps(
black_box(1.0),
black_box(2.0),
black_box(3.0),
black_box(4.0),
);
assert_eq!(get_m128(r, 0), 4.0);
assert_eq!(get_m128(r, 1), 3.0);
assert_eq!(get_m128(r, 2), 2.0);
assert_eq!(get_m128(r, 3), 1.0);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_setr_ps() {
let r = _mm_setr_ps(
black_box(1.0),
black_box(2.0),
black_box(3.0),
black_box(4.0),
);
assert_eq_m128(r, _mm_setr_ps(1.0, 2.0, 3.0, 4.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_setzero_ps() {
let r = *black_box(&_mm_setzero_ps());
assert_eq_m128(r, _mm_set1_ps(0.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_shuffle() {
assert_eq!(_MM_SHUFFLE(0, 1, 1, 3), 0b00_01_01_11);
assert_eq!(_MM_SHUFFLE(3, 1, 1, 0), 0b11_01_01_00);
assert_eq!(_MM_SHUFFLE(1, 2, 2, 1), 0b01_10_10_01);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_shuffle_ps() {
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(5.0, 6.0, 7.0, 8.0);
let r = _mm_shuffle_ps::<0b00_01_01_11>(a, b);
assert_eq_m128(r, _mm_setr_ps(4.0, 2.0, 6.0, 5.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_unpackhi_ps() {
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(5.0, 6.0, 7.0, 8.0);
let r = _mm_unpackhi_ps(a, b);
assert_eq_m128(r, _mm_setr_ps(3.0, 7.0, 4.0, 8.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_unpacklo_ps() {
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(5.0, 6.0, 7.0, 8.0);
let r = _mm_unpacklo_ps(a, b);
assert_eq_m128(r, _mm_setr_ps(1.0, 5.0, 2.0, 6.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_movehl_ps() {
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(5.0, 6.0, 7.0, 8.0);
let r = _mm_movehl_ps(a, b);
assert_eq_m128(r, _mm_setr_ps(7.0, 8.0, 3.0, 4.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_movelh_ps() {
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(5.0, 6.0, 7.0, 8.0);
let r = _mm_movelh_ps(a, b);
assert_eq_m128(r, _mm_setr_ps(1.0, 2.0, 5.0, 6.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_load_ss() {
let a = 42.0f32;
let r = _mm_load_ss(&a as *const f32);
assert_eq_m128(r, _mm_setr_ps(42.0, 0.0, 0.0, 0.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_load1_ps() {
let a = 42.0f32;
let r = _mm_load1_ps(&a as *const f32);
assert_eq_m128(r, _mm_setr_ps(42.0, 42.0, 42.0, 42.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_load_ps() {
let vals = &[1.0f32, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0];
let mut p = vals.as_ptr();
let mut fixup = 0.0f32;
// Make sure p is aligned, otherwise we might get a
// (signal: 11, SIGSEGV: invalid memory reference)
let unalignment = (p as usize) & 0xf;
if unalignment != 0 {
let delta = ((16 - unalignment) >> 2) as isize;
fixup = delta as f32;
p = p.offset(delta);
}
let r = _mm_load_ps(p);
let e = _mm_add_ps(_mm_setr_ps(1.0, 2.0, 3.0, 4.0), _mm_set1_ps(fixup));
assert_eq_m128(r, e);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_loadu_ps() {
let vals = &[1.0f32, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0];
let p = vals.as_ptr().offset(3);
let r = _mm_loadu_ps(black_box(p));
assert_eq_m128(r, _mm_setr_ps(4.0, 5.0, 6.0, 7.0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_loadr_ps() {
let vals = &[1.0f32, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0];
let mut p = vals.as_ptr();
let mut fixup = 0.0f32;
// Make sure p is aligned, otherwise we might get a
// (signal: 11, SIGSEGV: invalid memory reference)
let unalignment = (p as usize) & 0xf;
if unalignment != 0 {
let delta = ((16 - unalignment) >> 2) as isize;
fixup = delta as f32;
p = p.offset(delta);
}
let r = _mm_loadr_ps(p);
let e = _mm_add_ps(_mm_setr_ps(4.0, 3.0, 2.0, 1.0), _mm_set1_ps(fixup));
assert_eq_m128(r, e);
}
#[simd_test(enable = "sse2")]
unsafe fn test_mm_loadu_si64() {
let a = _mm_setr_epi64x(5, 6);
let r = _mm_loadu_si64(&a as *const _ as *const _);
assert_eq_m128i(r, _mm_setr_epi64x(5, 0));
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_store_ss() {
let mut vals = [0.0f32; 8];
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
_mm_store_ss(vals.as_mut_ptr().offset(1), a);
assert_eq!(vals[0], 0.0);
assert_eq!(vals[1], 1.0);
assert_eq!(vals[2], 0.0);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_store1_ps() {
let mut vals = [0.0f32; 8];
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let mut ofs = 0;
let mut p = vals.as_mut_ptr();
if (p as usize) & 0xf != 0 {
ofs = ((16 - (p as usize)) & 0xf) >> 2;
p = p.add(ofs);
}
_mm_store1_ps(p, *black_box(&a));
if ofs > 0 {
assert_eq!(vals[ofs - 1], 0.0);
}
assert_eq!(vals[ofs + 0], 1.0);
assert_eq!(vals[ofs + 1], 1.0);
assert_eq!(vals[ofs + 2], 1.0);
assert_eq!(vals[ofs + 3], 1.0);
assert_eq!(vals[ofs + 4], 0.0);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_store_ps() {
let mut vals = [0.0f32; 8];
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let mut ofs = 0;
let mut p = vals.as_mut_ptr();
// Align p to 16-byte boundary
if (p as usize) & 0xf != 0 {
ofs = ((16 - (p as usize)) & 0xf) >> 2;
p = p.add(ofs);
}
_mm_store_ps(p, *black_box(&a));
if ofs > 0 {
assert_eq!(vals[ofs - 1], 0.0);
}
assert_eq!(vals[ofs + 0], 1.0);
assert_eq!(vals[ofs + 1], 2.0);
assert_eq!(vals[ofs + 2], 3.0);
assert_eq!(vals[ofs + 3], 4.0);
assert_eq!(vals[ofs + 4], 0.0);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_storer_ps() {
let mut vals = [0.0f32; 8];
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let mut ofs = 0;
let mut p = vals.as_mut_ptr();
// Align p to 16-byte boundary
if (p as usize) & 0xf != 0 {
ofs = ((16 - (p as usize)) & 0xf) >> 2;
p = p.add(ofs);
}
_mm_storer_ps(p, *black_box(&a));
if ofs > 0 {
assert_eq!(vals[ofs - 1], 0.0);
}
assert_eq!(vals[ofs + 0], 4.0);
assert_eq!(vals[ofs + 1], 3.0);
assert_eq!(vals[ofs + 2], 2.0);
assert_eq!(vals[ofs + 3], 1.0);
assert_eq!(vals[ofs + 4], 0.0);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_storeu_ps() {
let mut vals = [0.0f32; 8];
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let mut ofs = 0;
let mut p = vals.as_mut_ptr();
// Make sure p is **not** aligned to 16-byte boundary
if (p as usize) & 0xf == 0 {
ofs = 1;
p = p.offset(1);
}
_mm_storeu_ps(p, *black_box(&a));
if ofs > 0 {
assert_eq!(vals[ofs - 1], 0.0);
}
assert_eq!(vals[ofs + 0], 1.0);
assert_eq!(vals[ofs + 1], 2.0);
assert_eq!(vals[ofs + 2], 3.0);
assert_eq!(vals[ofs + 3], 4.0);
assert_eq!(vals[ofs + 4], 0.0);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_move_ss() {
let a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let b = _mm_setr_ps(5.0, 6.0, 7.0, 8.0);
let r = _mm_move_ss(a, b);
let e = _mm_setr_ps(5.0, 2.0, 3.0, 4.0);
assert_eq_m128(e, r);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_movemask_ps() {
let r = _mm_movemask_ps(_mm_setr_ps(-1.0, 5.0, -5.0, 0.0));
assert_eq!(r, 0b0101);
let r = _mm_movemask_ps(_mm_setr_ps(-1.0, -5.0, -5.0, 0.0));
assert_eq!(r, 0b0111);
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_sfence() {
_mm_sfence();
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_getcsr_setcsr_1() {
let saved_csr = _mm_getcsr();
let a = _mm_setr_ps(1.1e-36, 0.0, 0.0, 1.0);
let b = _mm_setr_ps(0.001, 0.0, 0.0, 1.0);
_MM_SET_FLUSH_ZERO_MODE(_MM_FLUSH_ZERO_ON);
let r = _mm_mul_ps(*black_box(&a), *black_box(&b));
_mm_setcsr(saved_csr);
let exp = _mm_setr_ps(0.0, 0.0, 0.0, 1.0);
assert_eq_m128(r, exp); // first component is a denormalized f32
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_getcsr_setcsr_2() {
// Same as _mm_setcsr_1 test, but with opposite flag value.
let saved_csr = _mm_getcsr();
let a = _mm_setr_ps(1.1e-36, 0.0, 0.0, 1.0);
let b = _mm_setr_ps(0.001, 0.0, 0.0, 1.0);
_MM_SET_FLUSH_ZERO_MODE(_MM_FLUSH_ZERO_OFF);
let r = _mm_mul_ps(*black_box(&a), *black_box(&b));
_mm_setcsr(saved_csr);
let exp = _mm_setr_ps(1.1e-39, 0.0, 0.0, 1.0);
assert_eq_m128(r, exp); // first component is a denormalized f32
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_getcsr_setcsr_underflow() {
_MM_SET_EXCEPTION_STATE(0);
let a = _mm_setr_ps(1.1e-36, 0.0, 0.0, 1.0);
let b = _mm_setr_ps(1e-5, 0.0, 0.0, 1.0);
assert_eq!(_MM_GET_EXCEPTION_STATE(), 0); // just to be sure
let r = _mm_mul_ps(*black_box(&a), *black_box(&b));
let exp = _mm_setr_ps(1.1e-41, 0.0, 0.0, 1.0);
assert_eq_m128(r, exp);
let underflow = _MM_GET_EXCEPTION_STATE() & _MM_EXCEPT_UNDERFLOW != 0;
assert_eq!(underflow, true);
}
#[simd_test(enable = "sse")]
unsafe fn test_MM_TRANSPOSE4_PS() {
let mut a = _mm_setr_ps(1.0, 2.0, 3.0, 4.0);
let mut b = _mm_setr_ps(5.0, 6.0, 7.0, 8.0);
let mut c = _mm_setr_ps(9.0, 10.0, 11.0, 12.0);
let mut d = _mm_setr_ps(13.0, 14.0, 15.0, 16.0);
_MM_TRANSPOSE4_PS(&mut a, &mut b, &mut c, &mut d);
assert_eq_m128(a, _mm_setr_ps(1.0, 5.0, 9.0, 13.0));
assert_eq_m128(b, _mm_setr_ps(2.0, 6.0, 10.0, 14.0));
assert_eq_m128(c, _mm_setr_ps(3.0, 7.0, 11.0, 15.0));
assert_eq_m128(d, _mm_setr_ps(4.0, 8.0, 12.0, 16.0));
}
#[repr(align(16))]
struct Memory {
pub data: [f32; 4],
}
#[simd_test(enable = "sse")]
unsafe fn test_mm_stream_ps() {
let a = _mm_set1_ps(7.0);
let mut mem = Memory { data: [-1.0; 4] };
_mm_stream_ps(&mut mem.data[0] as *mut f32, a);
for i in 0..4 {
assert_eq!(mem.data[i], get_m128(a, i));
}
}
}
| _mm_prefetch |
strings.rs | use std::iter::Iterator;
use codemap::{Span, Spanned};
use peekmore::PeekMoreIterator;
use crate::common::QuoteKind;
use crate::error::SassResult;
use crate::selector::Selector;
use crate::value::Value;
use crate::{Scope, Token};
use super::{as_hex, hex_char_for, is_name, is_name_start, parse_interpolation};
pub(crate) fn is_ident(s: &str) -> bool {
let mut chars = s.chars().peekable();
match chars.next() {
Some(c) if is_name_start(c) && !c.is_numeric() => {}
Some(..) | None => return false,
}
while let Some(c) = chars.next() {
if c == '\\' {
for _ in 0..6 {
let next = match chars.next() {
Some(t) => t,
None => return true,
};
if !next.is_ascii_hexdigit() {
break;
}
}
match chars.peek() {
Some(c) if c.is_whitespace() => {
chars.next();
}
_ => {}
};
continue;
}
if !is_name(c) {
return false;
}
}
true
}
fn | <I: Iterator<Item = Token>>(
toks: &mut PeekMoreIterator<I>,
unit: bool,
mut span: Span,
) -> SassResult<Spanned<String>> {
let mut text = String::new();
while let Some(tok) = toks.peek() {
span = span.merge(tok.pos());
if unit && tok.kind == '-' {
// Disallow `-` followed by a dot or a digit digit in units.
let second = match toks.peek_forward(1) {
Some(v) => *v,
None => break,
};
toks.peek_backward(1).unwrap();
if second.kind == '.' || second.kind.is_ascii_digit() {
break;
}
toks.next();
text.push('-');
} else if is_name(tok.kind) {
text.push(toks.next().unwrap().kind);
} else if tok.kind == '\\' {
toks.next();
text.push_str(&escape(toks, false)?);
} else {
break;
}
}
Ok(Spanned { node: text, span })
}
fn interpolated_ident_body<I: Iterator<Item = Token>>(
toks: &mut PeekMoreIterator<I>,
scope: &Scope,
super_selector: &Selector,
mut span: Span,
buf: &mut String,
) -> SassResult<Span> {
while let Some(tok) = toks.peek() {
match tok.kind {
'a'..='z' | 'A'..='Z' | '0'..='9' | '_' | '-' | '\u{80}'..=std::char::MAX => {
span = span.merge(tok.pos());
buf.push(toks.next().unwrap().kind);
}
'\\' => {
toks.next();
buf.push_str(&escape(toks, false)?);
}
'#' => {
if let Some(Token { kind: '{', pos }) = toks.peek_forward(1).cloned() {
toks.next();
toks.next();
// TODO: if ident, interpolate literally
let interpolation = parse_interpolation(toks, scope, super_selector, pos)?;
buf.push_str(&interpolation.node.to_css_string(interpolation.span)?);
} else {
toks.reset_view();
break;
}
}
_ => break,
}
}
Ok(span)
}
fn escape<I: Iterator<Item = Token>>(
toks: &mut PeekMoreIterator<I>,
identifier_start: bool,
) -> SassResult<String> {
let mut value = 0;
let first = match toks.peek() {
Some(t) => t,
None => return Ok(String::new()),
};
let mut span = first.pos();
if first.kind == '\n' {
return Err(("Expected escape sequence.", span).into());
} else if first.kind.is_ascii_hexdigit() {
for _ in 0..6 {
let next = match toks.peek() {
Some(t) => t,
None => break,
};
if !next.kind.is_ascii_hexdigit() {
break;
}
value *= 16;
span = span.merge(next.pos());
value += as_hex(toks.next().unwrap().kind)
}
if toks.peek().is_some() && toks.peek().unwrap().kind.is_whitespace() {
toks.next();
}
} else {
let next = toks.next().unwrap();
span = span.merge(next.pos());
value = next.kind as u32;
}
let c = std::char::from_u32(value).ok_or(("Invalid escape sequence.", span))?;
if (identifier_start && is_name_start(c) && !c.is_digit(10))
|| (!identifier_start && is_name(c))
{
Ok(c.to_string())
} else if value <= 0x1F || value == 0x7F || (identifier_start && c.is_digit(10)) {
let mut buf = String::with_capacity(4);
buf.push('\\');
if value > 0xF {
buf.push(hex_char_for(value >> 4));
}
buf.push(hex_char_for(value & 0xF));
buf.push(' ');
Ok(buf)
} else {
Ok(format!("\\{}", c))
}
}
pub(crate) fn eat_ident<I: Iterator<Item = Token>>(
toks: &mut PeekMoreIterator<I>,
scope: &Scope,
super_selector: &Selector,
span_before: Span,
) -> SassResult<Spanned<String>> {
let Token {
kind,
pos: mut span,
} = toks.peek().ok_or(("Expected identifier.", span_before))?;
let mut text = String::new();
if kind == &'-' {
toks.next();
text.push('-');
match toks.peek() {
Some(Token { kind: '-', .. }) => {
toks.next();
text.push('-');
let body_span =
interpolated_ident_body(toks, scope, super_selector, span, &mut text)?;
span = span.merge(body_span);
return Ok(Spanned { node: text, span });
}
Some(..) => {}
None => return Ok(Spanned { node: text, span }),
}
}
let Token { kind: first, pos } = match toks.peek() {
Some(v) => *v,
None => return Err(("Expected identifier.", span).into()),
};
if is_name_start(first) {
text.push(toks.next().unwrap().kind);
} else if first == '\\' {
toks.next();
text.push_str(&escape(toks, true)?);
// TODO: peekmore
// (first == '#' && scanner.peekChar(1) == $lbrace)
} else if first == '#' {
toks.next();
let Token { kind, pos } = if let Some(tok) = toks.peek() {
*tok
} else {
return Err(("Expected identifier.", pos).into());
};
if kind == '{' {
toks.next();
text.push_str(
&match parse_interpolation(toks, scope, super_selector, pos)?.node {
Value::String(s, ..) => s,
v => v.to_css_string(span)?.into(),
},
);
} else {
return Err(("Expected identifier.", pos).into());
}
} else {
return Err(("Expected identifier.", pos).into());
}
let body_span = interpolated_ident_body(toks, scope, super_selector, pos, &mut text)?;
span = span.merge(body_span);
Ok(Spanned { node: text, span })
}
pub(crate) fn eat_ident_no_interpolation<I: Iterator<Item = Token>>(
toks: &mut PeekMoreIterator<I>,
unit: bool,
span_before: Span,
) -> SassResult<Spanned<String>> {
let Token {
kind,
pos: mut span,
} = toks.peek().ok_or(("Expected identifier.", span_before))?;
let mut text = String::new();
if kind == &'-' {
toks.next();
text.push('-');
match toks.peek() {
Some(Token { kind: '-', .. }) => {
toks.next();
text.push('-');
text.push_str(&ident_body_no_interpolation(toks, unit, span)?.node);
return Ok(Spanned { node: text, span });
}
Some(..) => {}
None => return Ok(Spanned { node: text, span }),
}
}
let first = match toks.next() {
Some(v) => v,
None => return Err(("Expected identifier.", span).into()),
};
if is_name_start(first.kind) {
text.push(first.kind);
} else if first.kind == '\\' {
text.push_str(&escape(toks, true)?);
} else {
return Err(("Expected identifier.", first.pos).into());
}
let body = ident_body_no_interpolation(toks, unit, span)?;
span = span.merge(body.span);
text.push_str(&body.node);
Ok(Spanned { node: text, span })
}
pub(crate) fn parse_quoted_string<I: Iterator<Item = Token>>(
toks: &mut PeekMoreIterator<I>,
scope: &Scope,
q: char,
super_selector: &Selector,
span_before: Span,
) -> SassResult<Spanned<Value>> {
let mut s = String::new();
let mut span = toks
.peek()
.ok_or((format!("Expected {}.", q), span_before))?
.pos();
while let Some(tok) = toks.next() {
span = span.merge(tok.pos());
match tok.kind {
'"' if q == '"' => break,
'\'' if q == '\'' => break,
'#' => {
if let Some(Token { kind: '{', pos }) = toks.peek().cloned() {
toks.next();
let interpolation = parse_interpolation(toks, scope, super_selector, pos)?;
s.push_str(&match interpolation.node {
Value::String(s, ..) => s,
v => v.to_css_string(interpolation.span)?.into(),
});
continue;
} else {
s.push('#');
continue;
}
}
'\n' => return Err(("Expected \".", tok.pos()).into()),
'\\' => {
let first = match toks.peek() {
Some(c) => c,
None => {
s.push('\u{FFFD}');
continue;
}
};
if first.kind == '\n' {
toks.next();
continue;
}
if first.kind.is_ascii_hexdigit() {
let mut value = 0;
for _ in 0..6 {
// todo: or patterns
let next = match toks.peek() {
Some(c) => c,
None => break,
};
if !next.kind.is_ascii_hexdigit() {
break;
}
value = (value << 4) + as_hex(toks.next().unwrap().kind);
}
if toks.peek().is_some() && toks.peek().unwrap().kind.is_ascii_whitespace() {
toks.next();
}
if value == 0 || (value >= 0xD800 && value <= 0xDFFF) || value >= 0x0010_FFFF {
s.push('\u{FFFD}');
} else {
s.push(std::char::from_u32(value).unwrap());
}
} else {
s.push(toks.next().unwrap().kind);
}
}
_ => s.push(tok.kind),
}
}
Ok(Spanned {
node: Value::String(s, QuoteKind::Quoted),
span,
})
}
| ident_body_no_interpolation |
AccountsCommandSet.ts | import { CommandSet } from 'pip-services3-commons-node';
import { ICommand } from 'pip-services3-commons-node';
import { Command } from 'pip-services3-commons-node';
import { Schema } from 'pip-services3-commons-node';
import { Parameters } from 'pip-services3-commons-node';
import { FilterParams } from 'pip-services3-commons-node';
import { PagingParams } from 'pip-services3-commons-node'; | import { ObjectSchema } from 'pip-services3-commons-node';
import { TypeCode } from 'pip-services3-commons-node';
import { FilterParamsSchema } from 'pip-services3-commons-node';
import { PagingParamsSchema } from 'pip-services3-commons-node';
import { AccountV1Schema } from '../data/version1/AccountV1Schema';
import { IAccountsController } from './IAccountsController';
export class AccountsCommandSet extends CommandSet {
private _logic: IAccountsController;
constructor(logic: IAccountsController) {
super();
this._logic = logic;
// Register commands to the business logic
this.addCommand(this.makeGetAccountsCommand());
this.addCommand(this.makeGetAccountByIdCommand());
this.addCommand(this.makeGetAccountByLoginCommand());
this.addCommand(this.makeGetAccountByIdOrLoginCommand());
this.addCommand(this.makeCreateAccountCommand());
this.addCommand(this.makeUpdateAccountCommand());
this.addCommand(this.makeDeleteAccountByIdCommand());
}
private makeGetAccountsCommand(): ICommand {
return new Command(
"get_accounts",
new ObjectSchema(true)
.withOptionalProperty('filter', new FilterParamsSchema())
.withOptionalProperty('paging', new PagingParamsSchema()),
(correlationId: string, args: Parameters, callback: (err: any, result: any) => void) => {
let filter = FilterParams.fromValue(args.get("filter"));
let paging = PagingParams.fromValue(args.get("paging"));
this._logic.getAccounts(correlationId, filter, paging, callback);
}
);
}
private makeGetAccountByIdCommand(): ICommand {
return new Command(
"get_account_by_id",
new ObjectSchema(true)
.withRequiredProperty('account_id', TypeCode.String),
(correlationId: string, args: Parameters, callback: (err: any, result: any) => void) => {
let accountId = args.getAsNullableString("account_id");
this._logic.getAccountById(correlationId, accountId, callback);
}
);
}
private makeGetAccountByLoginCommand(): ICommand {
return new Command(
"get_account_by_login",
new ObjectSchema(true)
.withRequiredProperty('login', TypeCode.String),
(correlationId: string, args: Parameters, callback: (err: any, result: any) => void) => {
let accountId = args.getAsNullableString("login");
this._logic.getAccountByLogin(correlationId, accountId, callback);
}
);
}
private makeGetAccountByIdOrLoginCommand(): ICommand {
return new Command(
"get_account_by_id_or_login",
new ObjectSchema(true)
.withRequiredProperty('id_or_login', TypeCode.String),
(correlationId: string, args: Parameters, callback: (err: any, result: any) => void) => {
let idOrLogin = args.getAsNullableString("id_or_login");
this._logic.getAccountByIdOrLogin(correlationId, idOrLogin, callback);
}
);
}
private makeCreateAccountCommand(): ICommand {
return new Command(
"create_account",
null,
(correlationId: string, args: Parameters, callback: (err: any, result: any) => void) => {
let account = args.get("account");
this._logic.createAccount(correlationId, account, callback);
}
);
}
private makeUpdateAccountCommand(): ICommand {
return new Command(
"update_account",
new ObjectSchema(true)
.withRequiredProperty('account', new AccountV1Schema()),
(correlationId: string, args: Parameters, callback: (err: any, result: any) => void) => {
let account = args.get("account");
this._logic.updateAccount(correlationId, account, callback);
}
);
}
private makeDeleteAccountByIdCommand(): ICommand {
return new Command(
"delete_account_by_id",
new ObjectSchema(true)
.withRequiredProperty('account_id', TypeCode.String),
(correlationId: string, args: Parameters, callback: (err: any, result: any) => void) => {
let accountId = args.getAsNullableString("account_id");
this._logic.deleteAccountById(correlationId, accountId, callback);
}
);
}
} | |
ArbitraryCS_main.py | # coding:utf8
import torch as t
import torchvision as tv
import torchnet as tnt
from torch.utils import data
from transformer_net import TransformerNet
import utils
from PackedVGG import Vgg16
from torch.nn import functional as F
import tqdm
import os
import ipdb
# from WCT2_train import WCT2
# import model
from LapSobGaus_train import Lap_Sob_Gaus
import net
import Ovodus_Laplace_model
import utils_
from WCT2_train import train_transform
from tensorboardX import SummaryWriter
from pathlib import Path
from torchvision.utils import save_image
mean = [0.485, 0.456, 0.406]
std = [0.229, 0.224, 0.225]
IMAGENET_MEAN = [0.485, 0.456, 0.406]
IMAGENET_STD = [0.229, 0.224, 0.225]
class Config(object):
# General Args
use_gpu = True
model_path = None # pretrain model path (for resume training or test)
# Train Args
image_size = 448 # image crop_size for training
batch_size = 2
data_root = r'F:\DataSets\train2017' # 'data/' dataset root:$data_root/coco/a.jpg D:\CoCo_Dataset\train2017
num_workers = 4 # dataloader num of workers
lr = 1e-4
epoches = 20 # total epoch to train
content_weight = 1e10 # weight of content_loss
style_weight = 1e2 # weight of style_loss
style_path = 'style_input' # style image path
env = 'onlyencodercontent_58_Laps_test_nores_noDynamic_10_2' # visdom env
plot_every = 1 # visualize in visdom for every 10 batch
debug_file = '/tmp/debugnn' # touch $debug_fie to interrupt and enter ipdb
# Test Args
content_path = 'input.png' # input file to do style transfer [for test]
result_path = 'output.png' # style transfer result [for test]
option_unpool = 'sum'
cpu = False
transfer_at_encoder = True
transfer_at_decoder = True
transfer_at_skip = True
verbose = True
save_dir = './onlyencodercontent/nores_noDynamic/58_LapSobGaus_experiments_10_2'
log_dir = './onlyencodercontent/nores_noDynamic/58_LapSobGaus_logs_10_2'
lr_decay = 5e-5
def adjust_learning_rate(lr ,optimizer, iteration_count, lr_decay):
"""Imitating the original implementation"""
lr = lr / (1.0 + lr_decay * iteration_count)
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def train(**kwargs):
opt = Config()
for k_, v_ in kwargs.items():
setattr(opt, k_, v_)
device = 'cpu' if opt.cpu or not t.cuda.is_available() else 'cuda:0'
device = t.device(device)
# device=t.device('cuda') if opt.use_gpu else t.device('cpu')
vis = utils_.Visualizer(opt.env)
save_dir = Path(opt.save_dir)
save_dir.mkdir(exist_ok=True, parents=True)
log_dir = Path(opt.log_dir)
log_dir.mkdir(exist_ok=True, parents=True)
writer = SummaryWriter(log_dir=str(log_dir))
# Data loading
transfroms = tv.transforms.Compose([
tv.transforms.Resize(opt.image_size),
tv.transforms.CenterCrop(opt.image_size),
tv.transforms.ToTensor(),
#tv.transforms.Normalize(mean=IMAGENET_MEAN, std=IMAGENET_STD),
#tv.transforms.Lambda(lambda x: x*255)
])
dataset = tv.datasets.ImageFolder(opt.data_root, transfroms)
dataloader = data.DataLoader(dataset, opt.batch_size)
# style transformer network
# transformer = TransformerNet()
print('come!')
# visualizer = Visualizer(config) # create a visualizer that display/save images and plots
# device = 'cpu' if opt.cpu or not t.cuda.is_available() else 'cuda:0'
# device = t.device(device)
transfer_at = set()
if opt.transfer_at_encoder:
transfer_at.add('encoder')
if opt.transfer_at_decoder:
transfer_at.add('decoder')
if opt.transfer_at_skip:
transfer_at.add('skip')
# save_dir = Path(config.save_dir)
# save_dir.mkdir(exist_ok=True, parents=True)
# log_dir = Path(config.log_dir)
# log_dir.mkdir(exist_ok=True, parents=True)
# writer = SummaryWriter(log_dir=str(log_dir))
# vgg = net.vgg
wct2 = Lap_Sob_Gaus(transfer_at=transfer_at, option_unpool=opt.option_unpool, device=device,
verbose=False)
encoder = Ovodus_Laplace_model.Lap_Sob_GausEncoder(opt.option_unpool).to(device)
decoder = Ovodus_Laplace_model.Lap_Sob_GausDecoder(opt.option_unpool).to(device)
# vgg.load_state_dict(torch.load(config.vgg))
# vgg = nn.Sequential(*list(vgg.children())[:31])
laps = Lap_Sob_Gaus(transfer_at=transfer_at, option_unpool='sum', device=device)
network = net.Net(encoder, decoder)
network.train()
network.to(device)
transformer = network
if opt.model_path:
transformer.load_state_dict(t.load(opt.model_path, map_location=lambda _s, _: _s))
transformer.to(device)
# Vgg16 for Perceptual Loss
# vgg = Vgg16().eval()
# vgg.to(device)
# for param in vgg.parameters():
# param.requires_grad = False
# Optimizer
# optimizer = t.optim.Adam(transformer.parameters(), opt.lr)
enoptimizer = t.optim.Adam(network.encoder.parameters(), lr=opt.lr, betas=(0.9, 0.999))
deoptimizer = t.optim.Adam(network.decoder.parameters(), lr=opt.lr, betas=(0.9, 0.999))
# # Get style image
# style_dataloader = utils_.get_style_data(opt.style_path, opt.batch_size)
# #style_list = list(enumerate(style_dataloader))
# for ii, (style, _) in tqdm.tqdm(enumerate(style_dataloader)):
# #a = style
# style = style.expand(opt.batch_size, 3, 256, 256)
# vis.img('style', (style.data[0] * 0.225 + 0.45).clamp(min=0, max=1))
# #style_list.append(style)
#
# style = style.to(device)
# #
# # #
# # # # gram matrix for style image
# with t.no_grad():
# features_style = vgg(style)
# gram_style = [utils_.gram_matrix(y) for y in features_style]
# Loss meter
style_meter = tnt.meter.AverageValueMeter()
content_meter = tnt.meter.AverageValueMeter()
for epoch in range(opt.epoches):
# for jj, (style, _) in tqdm.tqdm(enumerate(style_dataloader)):
# a = style
# vis.img('style', (style.data[0] * 0.225 + 0.45).clamp(min=0, max=1))
# style = style.to(device)
#
content_meter.reset()
style_meter.reset()
for ii, (x, _) in tqdm.tqdm(enumerate(dataloader)):
if epoch == 0:
adjust_learning_rate(opt.lr, enoptimizer, iteration_count=ii, lr_decay=opt.lr_decay)
adjust_learning_rate(opt.lr, deoptimizer, iteration_count=ii, lr_decay=opt.lr_decay)
print(opt.lr)
# style = style_list[ii][1][0]
# # style = style_list[ii]
# style = style.to(device)
# # # gram matrix for style image
# with t.no_grad():
# features_style = vgg(style)
# gram_style = [utils_.gram_matrix(y) for y in features_style]
style_dataloader = utils_.get_style_data(opt.style_path, opt.batch_size)
# style_list = list(enumerate(style_dataloader))
for jj, (style, _) in tqdm.tqdm(enumerate(style_dataloader)):
# a = style
style = style.expand(opt.batch_size, 3, 256, 256)
#vis.img('style', (style.data[0] * 0.225 + 0.45).clamp(min=0, max=1))
vis.img('style', (style.data[0]).clamp(min=0, max=1))
# style_list.append(style)
style = style.to(device)
#
# #
# # # gram matrix for style image
# with t.no_grad():
# features_style = vgg(style)
# gram_style = [utils_.gram_matrix(y) for y in features_style]
# Train
enoptimizer.zero_grad()
deoptimizer.zero_grad()
x = x.to(device)
#y = network(x, style, Laps=laps)
# if (ii + 1) % 10 == 0:
# print(y)
# y = y.clamp_(0, 1) * 255
#y = utils_.normalize_batch(y)
#x = utils_.normalize_batch(x)
# features_y = vgg(y)
# features_x = vgg(x)
# # content loss
# content_loss = opt.content_weight * F.mse_loss(features_y.relu2_2, features_x.relu2_2)
#
# # style loss
# style_loss = 0
#
# for ft_y, gm_s in zip(features_y, gram_style):
# gram_y = utils_.gram_matrix(ft_y)
# style_loss += F.mse_loss(gram_y, gm_s.expand_as(gram_y))
y, content_feats, content_loss, style_loss = network(x, style, Laps=laps)
content_loss *= opt.content_weight
style_loss *= opt.style_weight
total_loss = content_loss + style_loss
total_loss.backward()
enoptimizer.step()
deoptimizer.step()
# Loss smooth for visualization
content_meter.add(content_loss.item())
style_meter.add(style_loss.item())
if ii % 50 == 1:
print('\n')
print('iters:', ii, 'total_loss:', total_loss, 'loss_c:', content_loss, 'loss_s: ', style_loss)
if (ii + 1) % opt.plot_every == 0:
if os.path.exists(opt.debug_file):
ipdb.set_trace()
# visualization
vis.plot('content_loss', content_meter.value()[0])
vis.plot('style_loss', style_meter.value()[0])
# denorm input/output, since we have applied (utils.normalize_batch)
vis.img('output1', (y.data.cpu()[0]).clamp(min=0, max=1))
vis.img('input1', (x.data.cpu()[0]).clamp(min=0, max=1))
vis.img('decoder_1', (content_feats['decoder'][0][0].data.cpu()[0]).clamp(min=0, max=1))
vis.img('decoder_2', (content_feats['decoder'][1][0].data.cpu()[0]).clamp(min=0, max=1))
vis.img('decoder_3', (content_feats['decoder'][2][0].data.cpu()[0]).clamp(min=0, max=1))
vis.img('decoder_4', (content_feats['decoder'][3][0].data.cpu()[0]).clamp(min=0, max=1))
#save_image(content_feat.clamp_(0, 1), fname_output + "decoder{:d}".format(level), padding=0)
if (ii) % 1000 == 0:
if not os.path.exists(save_dir /'epoch_{:d}'.format(epoch)):
os.makedirs(save_dir /'epoch_{:d}'.format(epoch))
de_state_dict = network.decoder.state_dict()
en_state_dict = network.encoder.state_dict()
for key in de_state_dict.keys():
de_state_dict[key] = de_state_dict[key].to(t.device('cpu'))
t.save(de_state_dict, save_dir /'epoch_{:d}'.format(epoch)/
'decoder_iter_{:d}.pth.tar'.format(ii + 1))
for key in en_state_dict.keys():
en_state_dict[key] = en_state_dict[key].to(t.device('cpu'))
t.save(en_state_dict, save_dir /'epoch_{:d}'.format(epoch)/
'encoder_iter_{:d}.pth.tar'.format(ii + 1))
de_state_dict = network.decoder.state_dict()
en_state_dict = network.encoder.state_dict()
for key in de_state_dict.keys():
de_state_dict[key] = de_state_dict[key].to(t.device('cpu'))
t.save(de_state_dict, save_dir /
'epoch_decoder_iter_{:d}.pth.tar'.format(epoch + 1))
for key in en_state_dict.keys():
en_state_dict[key] = en_state_dict[key].to(t.device('cpu'))
t.save(en_state_dict, save_dir /
'epoch_encoder_iter_{:d}.pth.tar'.format(epoch + 1))
# save checkpoints
vis.save([opt.env])
t.save(network.state_dict(), 'checkpoints/epoch_%s_style.pth' % epoch)
writer.close()
@t.no_grad()
def st | *kwargs):
"""
perform style transfer
"""
opt = Config()
for k_, v_ in kwargs.items():
setattr(opt, k_, v_)
device = t.device('cuda') if opt.use_gpu else t.device('cpu')
# input image preprocess
content_image = tv.datasets.folder.default_loader(opt.content_path)
content_transform = tv.transforms.Compose([
tv.transforms.ToTensor(),
tv.transforms.Lambda(lambda x: x.mul(255))
])
content_image = content_transform(content_image)
content_image = content_image.unsqueeze(0).to(device).detach()
# model setup
style_model = TransformerNet().eval()
style_model.load_state_dict(t.load(opt.model_path, map_location=lambda _s, _: _s))
style_model.to(device)
# style transfer and save output
output = style_model(content_image)
output_data = output.cpu().data[0]
tv.utils.save_image(((output_data / 255)).clamp(min=0, max=1), opt.result_path)
if __name__ == '__main__':
import fire
fire.Fire()
train() | ylize(* |
train.py | """ Train RelBERT model. """
import argparse
import logging
import relbert
def config(parser):
# optimization
parser.add_argument('-s', '--softmax-loss', help='softmax loss', action='store_true')
parser.add_argument('-n', '--in-batch-negative', help='in batch negative', action='store_true')
parser.add_argument('-p', '--parent-contrast', help='hierarchical contrastive loss', action='store_true')
parser.add_argument('-e', '--epoch', help='training epochs', default=1, type=int)
parser.add_argument('--mse-margin', help='contrastive loss margin', default=1, type=int)
parser.add_argument('-b', '--batch', help='batch size', default=64, type=int)
parser.add_argument('--lr', help='learning rate', default=0.00002, type=float)
parser.add_argument('--random-seed', help='random seed', default=0, type=int)
parser.add_argument('--lr-decay', help='linear decay of learning rate after warmup', action='store_true')
parser.add_argument("--lr-warmup", help="linear warmup of lr", default=10, type=int)
parser.add_argument("--weight-decay", help="l2 penalty for weight decay", default=0, type=float)
parser.add_argument('--optimizer', help='optimizer `adam`/`adamax`/`adam`', default='adam', type=str)
parser.add_argument("--momentum", help="sgd momentum", default=0.9, type=float)
# training environment
parser.add_argument('--cache-dir', help='cache directory to store dataset', default=None, type=str)
parser.add_argument('--num-workers', help='workers for dataloder', default=5, type=int)
parser.add_argument('--fp16', help='fp16 for training', action='store_true')
parser.add_argument('--epoch-save', help='interval to save model weight', default=5, type=int)
parser.add_argument('--debug', help='log level', action='store_true')
parser.add_argument('--export', help='directory to export model weight file', required=True, type=str)
# language model
parser.add_argument('-m', '--model', help='language model', default='roberta-large', type=str)
parser.add_argument('-l', '--max-length', help='length', default=64, type=int)
parser.add_argument('--mode', help='lm mode', default='average_no_mask', type=str)
# data
parser.add_argument('--data', help='dataset', default='semeval2012', type=str)
parser.add_argument('--n-sample', help='sample size', default=10, type=int)
parser.add_argument('-t', '--template-type', help='template type or path to generated prompt file',
default='a', type=str)
return parser
def main():
argument_parser = argparse.ArgumentParser(description='Train RelBERT.')
argument_parser = config(argument_parser)
opt = argument_parser.parse_args()
# logging
level = logging.DEBUG if opt.debug else logging.INFO
logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s', level=level, datefmt='%Y-%m-%d %H:%M:%S')
trainer = relbert.Trainer(
model=opt.model,
max_length=opt.max_length,
mode=opt.mode,
data=opt.data,
n_sample=opt.n_sample,
template_type=opt.template_type,
softmax_loss=opt.softmax_loss,
in_batch_negative=opt.in_batch_negative,
parent_contrast=opt.parent_contrast,
mse_margin=opt.mse_margin,
epoch=opt.epoch,
export=opt.export, | lr=opt.lr,
lr_decay=opt.lr_decay,
lr_warmup=opt.lr_warmup,
weight_decay=opt.weight_decay,
optimizer=opt.optimizer,
momentum=opt.momentum,
fp16=opt.fp16,
random_seed=opt.random_seed,
cache_dir=opt.cache_dir)
# add file handler
logger = logging.getLogger()
file_handler = logging.FileHandler('{}/training.log'.format(trainer.checkpoint_dir))
file_handler.setLevel(level)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)-8s %(message)s'))
logger.addHandler(file_handler)
trainer.train(num_workers=opt.num_workers, epoch_save=opt.epoch_save)
if __name__ == '__main__':
main() | batch=opt.batch, |
audio_driver.rs | extern crate emu_audio;
use emu_audio::audio_driver::{AudioDriver, RenderCallback};
use emu_audio::audio_driver_factory::create_default;
use std::f64::consts::PI;
use std::thread;
struct TestUserResource {
name: String,
phase: f64
}
impl TestUserResource {
fn new(name: String) -> TestUserResource {
println!("Test user resource created ({})", name);
TestUserResource { name: name, phase: 0.0 }
}
}
impl Drop for TestUserResource {
fn drop(&mut self) {
println!("Test user resource destroyed ({})", self.name);
}
}
fn | () {
let mut driver = {
let mut test_user_resource = TestUserResource::new(String::from("a"));
let callback: Box<RenderCallback> = Box::new(move |buffer, num_frames| {
for i in 0..num_frames {
let value = (test_user_resource.phase * PI).sin() as f32;
let buffer_index = i * 2;
buffer[buffer_index + 0] = value;
buffer[buffer_index + 1] = value;
test_user_resource.phase += 440.0 / 44100.0;
}
});
let mut ret = create_default();
ret.set_render_callback(Some(callback));
ret
};
println!("All systems are go.");
println!("Starting render callback tests.");
thread::sleep_ms(1000);
println!("Swapping callback...");
{
let mut test_user_resource = TestUserResource::new(String::from("b"));
let callback: Box<RenderCallback> = Box::new(move |buffer, num_frames| {
for i in 0..num_frames {
let value = (test_user_resource.phase * 2.0 * PI).sin() as f32;
let buffer_index = i * 2;
buffer[buffer_index + 0] = value;
buffer[buffer_index + 1] = value;
test_user_resource.phase += 440.0 / 44100.0;
}
});
driver.set_render_callback(Some(callback));
}
println!("Callback swapped");
thread::sleep_ms(1000);
println!("Render callback tests completed.");
println!("Starting is enabled tests.");
println!("Driver is enabled: {}", driver.is_enabled());
thread::sleep_ms(1000);
driver.set_is_enabled(false);
println!("Driver is enabled: {}", driver.is_enabled());
thread::sleep_ms(1000);
driver.set_is_enabled(true);
println!("Driver is enabled: {}", driver.is_enabled());
thread::sleep_ms(1000);
println!("Is enabled tests completed.");
println!("Starting sample rate tests.");
println!("Driver sample rate: {}", driver.sample_rate());
thread::sleep_ms(1000);
driver.set_sample_rate(32000);
println!("Driver sample rate: {}", driver.sample_rate());
thread::sleep_ms(1000);
driver.set_sample_rate(22050);
println!("Driver sample rate: {}", driver.sample_rate());
thread::sleep_ms(1000);
driver.set_sample_rate(11025);
println!("Driver sample rate: {}", driver.sample_rate());
thread::sleep_ms(1000);
driver.set_sample_rate(96000);
println!("Driver sample rate: {}", driver.sample_rate());
thread::sleep_ms(1000);
driver.set_sample_rate(44100);
println!("Driver sample rate: {}", driver.sample_rate());
thread::sleep_ms(1000);
println!("Sample rate tests completed.");
//let mut derp = String::new();
//io::stdin().read_line(&mut derp).ok();
}
| main |
hosts_widget.rs | use super::block;
use crate::{app::App, ssh_config_store::SshGroupItem, THEME};
use chrono::{DateTime, Utc};
use std::{
io::Stdout,
time::{Duration, UNIX_EPOCH},
};
use tui::{
backend::CrosstermBackend,
layout::{Constraint, Rect},
style::Style,
widgets::{Cell, Row, Table},
Frame,
};
pub struct HostsWidget {}
impl HostsWidget {
pub fn render(app: &mut App, area: Rect, frame: &mut Frame<CrosstermBackend<Stdout>>) {
let block = block::new(" Hosts ");
let header = HostsWidget::create_header();
let rows = HostsWidget::create_rows_from_items(&app.get_items_based_on_mode());
let t = Table::new(rows)
.header(header)
.block(block)
.highlight_style(Style::default().fg(THEME.text_primary()))
.style(Style::default().fg(THEME.text_secondary()))
.highlight_symbol(">> ")
.widths(&[
Constraint::Percentage(30),
Constraint::Percentage(40),
Constraint::Percentage(30),
]);
frame.render_stateful_widget(t, area, &mut app.host_state);
}
fn create_header() -> Row<'static> {
let header_cells = ["Host", "Last Used", "# of Conn"]
.iter()
.map(|h| Cell::from(*h).style(Style::default().fg(THEME.text_secondary())));
Row::new(header_cells)
.style(Style::default())
.height(1)
.bottom_margin(1)
}
fn create_rows_from_items(items: &[&SshGroupItem]) -> Vec<Row<'static>> {
let style = Style::default();
items
.iter()
.map(|item| {
let timestamp_str = HostsWidget::format_last_used_date(item);
let cells = [
Cell::from(item.name.to_string()).style(style),
Cell::from(timestamp_str).style(style),
Cell::from(item.connection_count.to_string()).style(style),
];
Row::new(cells).height(1).bottom_margin(1)
})
.collect::<Vec<Row<'static>>>()
}
fn format_last_used_date(item: &SshGroupItem) -> String {
let mut timestamp_str = "Never".to_string();
if item.last_used != 0 |
timestamp_str
}
}
| {
let d = UNIX_EPOCH + Duration::from_secs(item.last_used as u64);
let dt = DateTime::<Utc>::from(d);
timestamp_str = dt.format("%D %R").to_string();
} |
insertid.go | package pgkebab
import (
"context"
"errors"
"fmt"
"strings"
"time"
)
// InsertID inserts a new record into given table and returns the last inserted id
// The 3rd param is the optional field name. If not given, the default value "id" will be used
func (l *DBLink) InsertID(table string, pairs map[string]interface{}, idFieldName ...string) (int64, error) {
if !l.supposedReady {
return 0, fmt.Errorf("connection not properly initialized")
}
if len(pairs) == 0 {
return 0, errors.New(`pgkebab.InsertID(undefined values)`)
} | fields []string
placeholders []string
parameters []interface{}
i uint
)
idField := "id"
for _, x := range idFieldName {
idField = x
break
}
for k, v := range pairs {
fields = append(fields, k)
i++
placeholders = append(placeholders, fmt.Sprintf("$%d", i))
parameters = append(parameters, v)
}
sqlQuery := fmt.Sprintf("INSERT INTO %s (%s) VALUES (%s) RETURNING %s", table, strings.Join(fields, ","), strings.Join(placeholders, ","), idField)
ctx, cancel := context.WithTimeout(context.Background(), time.Duration(l.executionTimeoutSeconds)*time.Second)
defer cancel()
var lastInsertedID int64
if err := l.db.QueryRowContext(ctx, sqlQuery, parameters...).Scan(&lastInsertedID); err != nil {
l.log(`pgkebab.InsertID %s db.QueryRowContext has failed: "%v"`, table, err)
return 0, err
}
return lastInsertedID, nil
} |
var ( |
help.py | from loguru import logger
from bot import embeds
@logger.catch
def help_commands(bot):
@bot.group(invoke_without_command=True)
async def help(ctx):
await ctx.send(embed=embeds.help())
@help.command()
async def balance(ctx):
await ctx.send(embed=embeds.help_balance())
@help.command()
async def deposit(ctx):
await ctx.send(embed=embeds.help_deposit())
@help.command()
async def tip(ctx):
|
@help.command()
async def withdraw(ctx):
await ctx.send(embed=embeds.help_withdraw())
@help.command(name="tokens")
async def _tokens(ctx):
await ctx.send(embed=embeds.help_tokens())
| await ctx.send(embed=embeds.help_tip()) |
build.rs | use std::env;
use std::fs::{self, File};
use std::io::prelude::*;
use cargo::util::paths::dylib_path_envvar;
use support::paths::{root, CargoPathExt};
use support::registry::Package;
use support::ProjectBuilder;
use support::{
basic_bin_manifest, basic_lib_manifest, basic_manifest, is_nightly, rustc_host, sleep_ms,
};
use support::{main_file, project, Execs};
#[test]
fn cargo_compile_simple() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
p.cargo("build").run();
assert!(p.bin("foo").is_file());
p.process(&p.bin("foo")).with_stdout("i am foo\n").run();
}
#[test]
fn cargo_fail_with_no_stderr() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &String::from("refusal"))
.build();
p.cargo("build --message-format=json")
.with_status(101)
.with_stderr_does_not_contain("--- stderr")
.run();
}
/// Check that the `CARGO_INCREMENTAL` environment variable results in
/// `rustc` getting `-Zincremental` passed to it.
#[test]
fn cargo_compile_incremental() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
p.cargo("build -v")
.env("CARGO_INCREMENTAL", "1")
.with_stderr_contains(
"[RUNNING] `rustc [..] -C incremental=[..]/target/debug/incremental[..]`\n",
).run();
p.cargo("test -v")
.env("CARGO_INCREMENTAL", "1")
.with_stderr_contains(
"[RUNNING] `rustc [..] -C incremental=[..]/target/debug/incremental[..]`\n",
).run();
}
#[test]
fn incremental_profile() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[profile.dev]
incremental = false
[profile.release]
incremental = true
"#,
).file("src/main.rs", "fn main() {}")
.build();
p.cargo("build -v")
.env_remove("CARGO_INCREMENTAL")
.with_stderr_does_not_contain("[..]C incremental=[..]")
.run();
p.cargo("build -v")
.env("CARGO_INCREMENTAL", "1")
.with_stderr_contains("[..]C incremental=[..]")
.run();
p.cargo("build --release -v")
.env_remove("CARGO_INCREMENTAL")
.with_stderr_contains("[..]C incremental=[..]")
.run();
p.cargo("build --release -v")
.env("CARGO_INCREMENTAL", "0")
.with_stderr_does_not_contain("[..]C incremental=[..]")
.run();
}
#[test]
fn incremental_config() {
let p = project()
.file("src/main.rs", "fn main() {}")
.file(
".cargo/config",
r#"
[build]
incremental = false
"#,
).build();
p.cargo("build -v")
.env_remove("CARGO_INCREMENTAL")
.with_stderr_does_not_contain("[..]C incremental=[..]")
.run();
p.cargo("build -v")
.env("CARGO_INCREMENTAL", "1")
.with_stderr_contains("[..]C incremental=[..]")
.run();
}
#[test]
fn cargo_compile_with_workspace_excluded() {
let p = project().file("src/main.rs", "fn main() {}").build();
p.cargo("build --all --exclude foo")
.with_stderr_does_not_contain("[..]virtual[..]")
.with_stderr_contains("[..]no packages to compile")
.with_status(101)
.run();
}
#[test]
fn cargo_compile_manifest_path() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
p.cargo("build --manifest-path foo/Cargo.toml")
.cwd(p.root().parent().unwrap())
.run();
assert!(p.bin("foo").is_file());
}
#[test]
fn cargo_compile_with_invalid_manifest() {
let p = project().file("Cargo.toml", "").build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[..]`
Caused by:
virtual manifests must be configured with [workspace]
",
).run();
}
#[test]
fn cargo_compile_with_invalid_manifest2() {
let p = project()
.file(
"Cargo.toml",
r"
[project]
foo = bar
",
).build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[..]`
Caused by:
could not parse input as TOML
Caused by:
invalid number at line 3
",
).run();
}
#[test]
fn cargo_compile_with_invalid_manifest3() {
let p = project().file("src/Cargo.toml", "a = bar").build();
p.cargo("build --manifest-path src/Cargo.toml")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[..]`
Caused by:
could not parse input as TOML
Caused by:
invalid number at line 1
",
).run();
}
#[test]
fn cargo_compile_duplicate_build_targets() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "main"
path = "src/main.rs"
crate-type = ["dylib"]
[dependencies]
"#,
).file("src/main.rs", "#![allow(warnings)] fn main() {}")
.build();
p.cargo("build")
.with_stderr(
"\
warning: file found to be present in multiple build targets: [..]main.rs
[COMPILING] foo v0.0.1 ([..])
[FINISHED] [..]
",
).run();
}
#[test]
fn cargo_compile_with_invalid_version() {
let p = project()
.file("Cargo.toml", &basic_manifest("foo", "1.0"))
.build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Expected dot for key `package.version`
",
).run();
}
#[test]
fn cargo_compile_with_empty_package_name() {
let p = project()
.file("Cargo.toml", &basic_manifest("", "0.0.0"))
.build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[..]`
Caused by:
package name cannot be an empty string
",
).run();
}
#[test]
fn cargo_compile_with_invalid_package_name() {
let p = project()
.file("Cargo.toml", &basic_manifest("foo::bar", "0.0.0"))
.build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[..]`
Caused by:
Invalid character `:` in package name: `foo::bar`
",
).run();
}
#[test]
fn cargo_compile_with_invalid_bin_target_name() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = ""
"#,
).build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[..]`
Caused by:
binary target names cannot be empty
",
).run();
}
#[test]
fn cargo_compile_with_forbidden_bin_target_name() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "build"
"#,
).build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[..]`
Caused by:
the binary target name `build` is forbidden
",
).run();
}
#[test]
fn cargo_compile_with_bin_and_crate_type() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "the_foo_bin"
path = "src/foo.rs"
crate-type = ["cdylib", "rlib"]
"#,
).file("src/foo.rs", "fn main() {}")
.build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[..]`
Caused by:
the target `the_foo_bin` is a binary and can't have any crate-types set \
(currently \"cdylib, rlib\")",
).run();
}
#[test]
fn cargo_compile_with_bin_and_proc() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "the_foo_bin"
path = "src/foo.rs"
proc-macro = true
"#,
).file("src/foo.rs", "fn main() {}")
.build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[..]`
Caused by:
the target `the_foo_bin` is a binary and can't have `proc-macro` set `true`",
).run();
}
#[test]
fn cargo_compile_with_invalid_lib_target_name() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[lib]
name = ""
"#,
).build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[..]`
Caused by:
library target names cannot be empty
",
).run();
}
#[test]
fn cargo_compile_with_invalid_non_numeric_dep_version() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
[dependencies]
crossbeam = "y"
"#,
).build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[CWD]/Cargo.toml`
Caused by:
failed to parse the version requirement `y` for dependency `crossbeam`
Caused by:
the given version requirement is invalid
",
).run();
}
#[test]
fn cargo_compile_without_manifest() {
let p = project().no_manifest().build();
p.cargo("build")
.with_status(101)
.with_stderr("[ERROR] could not find `Cargo.toml` in `[..]` or any parent directory")
.run();
}
#[test]
fn cargo_compile_with_invalid_code() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", "invalid rust code!")
.build();
p.cargo("build")
.with_status(101)
.with_stderr_contains(
"\
[ERROR] Could not compile `foo`.
To learn more, run the command again with --verbose.\n",
).run();
assert!(p.root().join("Cargo.lock").is_file());
}
#[test]
fn cargo_compile_with_invalid_code_in_deps() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "../bar"
[dependencies.baz]
path = "../baz"
"#,
).file("src/main.rs", "invalid rust code!")
.build();
let _bar = project()
.at("bar")
.file("Cargo.toml", &basic_bin_manifest("bar"))
.file("src/lib.rs", "invalid rust code!")
.build();
let _baz = project()
.at("baz")
.file("Cargo.toml", &basic_bin_manifest("baz"))
.file("src/lib.rs", "invalid rust code!")
.build();
p.cargo("build").with_status(101).run();
}
#[test]
fn cargo_compile_with_warnings_in_the_root_package() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", "fn main() {} fn dead() {}")
.build();
p.cargo("build")
.with_stderr_contains("[..]function is never used: `dead`[..]")
.run();
}
#[test]
fn cargo_compile_with_warnings_in_a_dep_package() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["[email protected]"]
[dependencies.bar]
path = "bar"
[[bin]]
name = "foo"
"#,
).file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", &basic_lib_manifest("bar"))
.file(
"bar/src/bar.rs",
r#"
pub fn gimme() -> &'static str {
"test passed"
}
fn dead() {}
"#,
).build();
p.cargo("build")
.with_stderr_contains("[..]function is never used: `dead`[..]")
.run();
assert!(p.bin("foo").is_file());
p.process(&p.bin("foo")).with_stdout("test passed\n").run();
}
#[test]
fn cargo_compile_with_nested_deps_inferred() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["[email protected]"]
[dependencies.bar]
path = 'bar'
[[bin]]
name = "foo"
"#,
).file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file(
"bar/Cargo.toml",
r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["[email protected]"]
[dependencies.baz]
path = "../baz"
"#,
).file(
"bar/src/lib.rs",
r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
"#,
).file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0"))
.file(
"baz/src/lib.rs",
r#"
pub fn gimme() -> String {
"test passed".to_string()
}
"#,
).build();
p.cargo("build").run();
assert!(p.bin("foo").is_file());
assert!(!p.bin("libbar.rlib").is_file());
assert!(!p.bin("libbaz.rlib").is_file());
p.process(&p.bin("foo")).with_stdout("test passed\n").run();
}
#[test]
fn cargo_compile_with_nested_deps_correct_bin() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["[email protected]"]
[dependencies.bar]
path = "bar"
[[bin]]
name = "foo"
"#,
).file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file(
"bar/Cargo.toml",
r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["[email protected]"]
[dependencies.baz]
path = "../baz"
"#,
).file(
"bar/src/lib.rs",
r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
"#,
).file("baz/Cargo.toml", &basic_manifest("baz", "0.5.0"))
.file(
"baz/src/lib.rs",
r#"
pub fn gimme() -> String {
"test passed".to_string()
}
"#,
).build();
p.cargo("build").run();
assert!(p.bin("foo").is_file());
assert!(!p.bin("libbar.rlib").is_file());
assert!(!p.bin("libbaz.rlib").is_file());
p.process(&p.bin("foo")).with_stdout("test passed\n").run();
}
#[test]
fn cargo_compile_with_nested_deps_shorthand() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["[email protected]"]
[dependencies.bar]
path = "bar"
"#,
).file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file(
"bar/Cargo.toml",
r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["[email protected]"]
[dependencies.baz]
path = "../baz"
[lib]
name = "bar"
"#,
).file(
"bar/src/bar.rs",
r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
"#,
).file("baz/Cargo.toml", &basic_lib_manifest("baz"))
.file(
"baz/src/baz.rs",
r#"
pub fn gimme() -> String {
"test passed".to_string()
}
"#,
).build();
p.cargo("build").run();
assert!(p.bin("foo").is_file());
assert!(!p.bin("libbar.rlib").is_file());
assert!(!p.bin("libbaz.rlib").is_file());
p.process(&p.bin("foo")).with_stdout("test passed\n").run();
}
#[test]
fn cargo_compile_with_nested_deps_longhand() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["[email protected]"]
[dependencies.bar]
path = "bar"
version = "0.5.0"
[[bin]]
name = "foo"
"#,
).file("src/foo.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file(
"bar/Cargo.toml",
r#"
[project]
name = "bar"
version = "0.5.0"
authors = ["[email protected]"]
[dependencies.baz]
path = "../baz"
version = "0.5.0"
[lib]
name = "bar"
"#,
).file(
"bar/src/bar.rs",
r#"
extern crate baz;
pub fn gimme() -> String {
baz::gimme()
}
"#,
).file("baz/Cargo.toml", &basic_lib_manifest("baz"))
.file(
"baz/src/baz.rs",
r#"
pub fn gimme() -> String {
"test passed".to_string()
}
"#,
).build();
p.cargo("build").run();
assert!(p.bin("foo").is_file());
assert!(!p.bin("libbar.rlib").is_file());
assert!(!p.bin("libbaz.rlib").is_file());
p.process(&p.bin("foo")).with_stdout("test passed\n").run();
}
// Check that Cargo gives a sensible error if a dependency can't be found
// because of a name mismatch.
#[test]
fn cargo_compile_with_dep_name_mismatch() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = ["[email protected]"]
[[bin]]
name = "foo"
[dependencies.notquitebar]
path = "bar"
"#,
).file("src/bin/foo.rs", &main_file(r#""i am foo""#, &["bar"]))
.file("bar/Cargo.toml", &basic_bin_manifest("bar"))
.file("bar/src/bar.rs", &main_file(r#""i am bar""#, &[]))
.build();
p.cargo("build")
.with_status(101)
.with_stderr(
r#"error: no matching package named `notquitebar` found
location searched: [CWD]/bar
required by package `foo v0.0.1 ([CWD])`
"#,
).run();
}
#[test]
fn cargo_compile_with_filename() {
let p = project()
.file("src/lib.rs", "")
.file(
"src/bin/a.rs",
r#"
extern crate foo;
fn main() { println!("hello a.rs"); }
"#,
).file("examples/a.rs", r#"fn main() { println!("example"); }"#)
.build();
p.cargo("build --bin bin.rs")
.with_status(101)
.with_stderr("[ERROR] no bin target named `bin.rs`")
.run();
p.cargo("build --bin a.rs")
.with_status(101)
.with_stderr(
"\
[ERROR] no bin target named `a.rs`
Did you mean `a`?",
).run();
p.cargo("build --example example.rs")
.with_status(101)
.with_stderr("[ERROR] no example target named `example.rs`")
.run();
p.cargo("build --example a.rs")
.with_status(101)
.with_stderr(
"\
[ERROR] no example target named `a.rs`
Did you mean `a`?",
).run();
}
#[test]
fn cargo_compile_path_with_offline() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "bar"
"#,
).file("src/lib.rs", "")
.file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
.file("bar/src/lib.rs", "")
.build();
p.cargo("build -Zoffline")
.masquerade_as_nightly_cargo()
.run();
}
#[test]
fn cargo_compile_with_downloaded_dependency_with_offline() {
Package::new("present_dep", "1.2.3")
.file("Cargo.toml", &basic_manifest("present_dep", "1.2.3"))
.file("src/lib.rs", "")
.publish();
{
// make package downloaded
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
present_dep = "1.2.3"
"#,
).file("src/lib.rs", "")
.build();
p.cargo("build").run();
}
let p2 = project()
.at("bar")
.file(
"Cargo.toml",
r#"
[project]
name = "bar"
version = "0.1.0"
[dependencies]
present_dep = "1.2.3"
"#,
).file("src/lib.rs", "")
.build();
p2.cargo("build -Zoffline")
.masquerade_as_nightly_cargo()
.with_stderr(
"\
[COMPILING] present_dep v1.2.3
[COMPILING] bar v0.1.0 ([..])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
).run();
}
#[test]
fn cargo_compile_offline_not_try_update() {
let p = project()
.at("bar")
.file(
"Cargo.toml",
r#"
[project]
name = "bar"
version = "0.1.0"
[dependencies]
not_cached_dep = "1.2.5"
"#,
).file("src/lib.rs", "")
.build();
p.cargo("build -Zoffline")
.masquerade_as_nightly_cargo()
.with_status(101)
.with_stderr(
"\
error: no matching package named `not_cached_dep` found
location searched: registry `[..]`
required by package `bar v0.1.0 ([..])`
As a reminder, you're using offline mode (-Z offline) \
which can sometimes cause surprising resolution failures, \
if this error is too confusing you may with to retry \
without the offline flag.",
).run();
}
#[test]
fn compile_offline_without_maxvers_cached() {
Package::new("present_dep", "1.2.1").publish();
Package::new("present_dep", "1.2.2").publish();
Package::new("present_dep", "1.2.3")
.file("Cargo.toml", &basic_manifest("present_dep", "1.2.3"))
.file(
"src/lib.rs",
r#"pub fn get_version()->&'static str {"1.2.3"}"#,
).publish();
Package::new("present_dep", "1.2.5")
.file("Cargo.toml", &basic_manifest("present_dep", "1.2.5"))
.file("src/lib.rs", r#"pub fn get_version(){"1.2.5"}"#)
.publish();
{
// make package cached
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
present_dep = "=1.2.3"
"#,
).file("src/lib.rs", "")
.build();
p.cargo("build").run();
}
let p2 = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
present_dep = "1.2"
"#,
).file(
"src/main.rs",
"\
extern crate present_dep;
fn main(){
println!(\"{}\", present_dep::get_version());
}",
).build();
p2.cargo("run -Zoffline")
.masquerade_as_nightly_cargo()
.with_stderr(
"\
[COMPILING] present_dep v1.2.3
[COMPILING] foo v0.1.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
Running `[..]`",
).with_stdout("1.2.3")
.run();
}
#[test]
fn incompatible_dependencies() {
Package::new("bad", "0.1.0").publish();
Package::new("bad", "1.0.0").publish();
Package::new("bad", "1.0.1").publish();
Package::new("bad", "1.0.2").publish();
Package::new("bar", "0.1.0").dep("bad", "0.1.0").publish();
Package::new("baz", "0.1.1").dep("bad", "=1.0.0").publish();
Package::new("baz", "0.1.0").dep("bad", "=1.0.0").publish();
Package::new("qux", "0.1.2").dep("bad", ">=1.0.1").publish();
Package::new("qux", "0.1.1").dep("bad", ">=1.0.1").publish();
Package::new("qux", "0.1.0").dep("bad", ">=1.0.1").publish();
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "0.1.0"
baz = "0.1.0"
qux = "0.1.0"
"#,
).file("src/main.rs", "fn main(){}")
.build();
p.cargo("build")
.with_status(101)
.with_stderr_contains(
"\
error: failed to select a version for `bad`.
... required by package `qux v0.1.0`
... which is depended on by `foo v0.0.1 ([..])`
versions that meet the requirements `>= 1.0.1` are: 1.0.2, 1.0.1
all possible versions conflict with previously selected packages.
previously selected package `bad v1.0.0`
... which is depended on by `baz v0.1.0`
... which is depended on by `foo v0.0.1 ([..])`
failed to select a version for `bad` which could resolve this conflict",
).run();
}
#[test]
fn incompatible_dependencies_with_multi_semver() {
Package::new("bad", "1.0.0").publish();
Package::new("bad", "1.0.1").publish();
Package::new("bad", "2.0.0").publish();
Package::new("bad", "2.0.1").publish();
Package::new("bar", "0.1.0").dep("bad", "=1.0.0").publish();
Package::new("baz", "0.1.0").dep("bad", ">=2.0.1").publish();
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "0.1.0"
baz = "0.1.0"
bad = ">=1.0.1, <=2.0.0"
"#,
).file("src/main.rs", "fn main(){}")
.build();
p.cargo("build")
.with_status(101)
.with_stderr_contains(
"\
error: failed to select a version for `bad`.
... required by package `foo v0.0.1 ([..])`
versions that meet the requirements `>= 1.0.1, <= 2.0.0` are: 2.0.0, 1.0.1
all possible versions conflict with previously selected packages.
previously selected package `bad v2.0.1`
... which is depended on by `baz v0.1.0`
... which is depended on by `foo v0.0.1 ([..])`
previously selected package `bad v1.0.0`
... which is depended on by `bar v0.1.0`
... which is depended on by `foo v0.0.1 ([..])`
failed to select a version for `bad` which could resolve this conflict",
).run();
}
#[test]
fn compile_offline_while_transitive_dep_not_cached() {
let baz = Package::new("baz", "1.0.0");
let baz_path = baz.archive_dst();
baz.publish();
let mut content = Vec::new();
let mut file = File::open(baz_path.clone()).ok().unwrap();
let _ok = file.read_to_end(&mut content).ok().unwrap();
drop(file);
drop(File::create(baz_path.clone()).ok().unwrap());
Package::new("bar", "0.1.0").dep("baz", "1.0.0").publish();
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.0.1"
[dependencies]
bar = "0.1.0"
"#,
).file("src/main.rs", "fn main(){}")
.build();
// simulate download bar, but fail to download baz
p.cargo("build").with_status(101).run();
drop(File::create(baz_path).ok().unwrap().write_all(&content));
p.cargo("build -Zoffline")
.masquerade_as_nightly_cargo()
.with_status(101)
.with_stderr(
"\
error: no matching package named `baz` found
location searched: registry `[..]`
required by package `bar v0.1.0`
... which is depended on by `foo v0.0.1 ([CWD])`
As a reminder, you're using offline mode (-Z offline) \
which can sometimes cause surprising resolution failures, \
if this error is too confusing you may with to retry \
without the offline flag.",
).run();
}
#[test]
fn compile_path_dep_then_change_version() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "bar"
"#,
).file("src/lib.rs", "")
.file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
.file("bar/src/lib.rs", "")
.build();
p.cargo("build").run();
File::create(&p.root().join("bar/Cargo.toml"))
.unwrap()
.write_all(basic_manifest("bar", "0.0.2").as_bytes())
.unwrap();
p.cargo("build").run();
}
#[test]
fn ignores_carriage_return_in_lockfile() {
let p = project()
.file("src/main.rs", r"mod a; fn main() {}")
.file("src/a.rs", "")
.build();
p.cargo("build").run();
let lockfile = p.root().join("Cargo.lock");
let mut lock = String::new();
File::open(&lockfile)
.unwrap()
.read_to_string(&mut lock)
.unwrap();
let lock = lock.replace("\n", "\r\n");
File::create(&lockfile)
.unwrap()
.write_all(lock.as_bytes())
.unwrap();
p.cargo("build").run();
}
#[test]
fn cargo_default_env_metadata_env_var() {
// Ensure that path dep + dylib + env_var get metadata
// (even though path_dep + dylib should not)
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "bar"
"#,
).file("src/lib.rs", "// hi")
.file(
"bar/Cargo.toml",
r#"
[package]
name = "bar"
version = "0.0.1"
authors = []
[lib]
name = "bar"
crate_type = ["dylib"]
"#,
).file("bar/src/lib.rs", "// hello")
.build();
// No metadata on libbar since it's a dylib path dependency
p.cargo("build -v")
.with_stderr(&format!(
"\
[COMPILING] bar v0.0.1 ([CWD]/bar)
[RUNNING] `rustc --crate-name bar bar/src/lib.rs --color never --crate-type dylib \
--emit=dep-info,link \
-C prefer-dynamic -C debuginfo=2 \
-C metadata=[..] \
--out-dir [..] \
-L dependency=[CWD]/target/debug/deps`
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
-C extra-filename=[..] \
--out-dir [..] \
-L dependency=[CWD]/target/debug/deps \
--extern bar=[CWD]/target/debug/deps/{prefix}bar{suffix}`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
prefix = env::consts::DLL_PREFIX,
suffix = env::consts::DLL_SUFFIX,
)).run();
p.cargo("clean").run();
// If you set the env-var, then we expect metadata on libbar
p.cargo("build -v")
.env("__CARGO_DEFAULT_LIB_METADATA", "stable")
.with_stderr(&format!(
"\
[COMPILING] bar v0.0.1 ([CWD]/bar)
[RUNNING] `rustc --crate-name bar bar/src/lib.rs --color never --crate-type dylib \
--emit=dep-info,link \
-C prefer-dynamic -C debuginfo=2 \
-C metadata=[..] \
--out-dir [..] \
-L dependency=[CWD]/target/debug/deps`
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
-C extra-filename=[..] \
--out-dir [..] \
-L dependency=[CWD]/target/debug/deps \
--extern bar=[CWD]/target/debug/deps/{prefix}bar-[..]{suffix}`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
prefix = env::consts::DLL_PREFIX,
suffix = env::consts::DLL_SUFFIX,
)).run();
}
#[test]
fn crate_env_vars() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.5.1-alpha.1"
description = "This is foo"
homepage = "http://example.com"
authors = ["[email protected]"]
"#,
).file(
"src/main.rs",
r#"
extern crate foo;
static VERSION_MAJOR: &'static str = env!("CARGO_PKG_VERSION_MAJOR");
static VERSION_MINOR: &'static str = env!("CARGO_PKG_VERSION_MINOR");
static VERSION_PATCH: &'static str = env!("CARGO_PKG_VERSION_PATCH");
static VERSION_PRE: &'static str = env!("CARGO_PKG_VERSION_PRE");
static VERSION: &'static str = env!("CARGO_PKG_VERSION");
static CARGO_MANIFEST_DIR: &'static str = env!("CARGO_MANIFEST_DIR");
static PKG_NAME: &'static str = env!("CARGO_PKG_NAME");
static HOMEPAGE: &'static str = env!("CARGO_PKG_HOMEPAGE");
static DESCRIPTION: &'static str = env!("CARGO_PKG_DESCRIPTION");
fn main() {
let s = format!("{}-{}-{} @ {} in {}", VERSION_MAJOR,
VERSION_MINOR, VERSION_PATCH, VERSION_PRE,
CARGO_MANIFEST_DIR);
assert_eq!(s, foo::version());
println!("{}", s);
assert_eq!("foo", PKG_NAME);
assert_eq!("http://example.com", HOMEPAGE);
assert_eq!("This is foo", DESCRIPTION);
let s = format!("{}.{}.{}-{}", VERSION_MAJOR,
VERSION_MINOR, VERSION_PATCH, VERSION_PRE);
assert_eq!(s, VERSION);
}
"#,
).file(
"src/lib.rs",
r#"
pub fn version() -> String {
format!("{}-{}-{} @ {} in {}",
env!("CARGO_PKG_VERSION_MAJOR"),
env!("CARGO_PKG_VERSION_MINOR"),
env!("CARGO_PKG_VERSION_PATCH"),
env!("CARGO_PKG_VERSION_PRE"),
env!("CARGO_MANIFEST_DIR"))
}
"#,
).build();
println!("build");
p.cargo("build -v").run();
println!("bin");
p.process(&p.bin("foo")).with_stdout("0-5-1 @ alpha.1 in [CWD]").run();
println!("test");
p.cargo("test -v").run();
}
#[test]
fn crate_authors_env_vars() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.5.1-alpha.1"
authors = ["[email protected]", "[email protected]"]
"#,
).file(
"src/main.rs",
r#"
extern crate foo;
static AUTHORS: &'static str = env!("CARGO_PKG_AUTHORS");
fn main() {
let s = "[email protected]:[email protected]";
assert_eq!(AUTHORS, foo::authors());
println!("{}", AUTHORS);
assert_eq!(s, AUTHORS);
}
"#,
).file(
"src/lib.rs",
r#"
pub fn authors() -> String {
format!("{}", env!("CARGO_PKG_AUTHORS"))
}
"#,
).build();
println!("build");
p.cargo("build -v").run();
println!("bin");
p.process(&p.bin("foo"))
.with_stdout("[email protected]:[email protected]")
.run();
println!("test");
p.cargo("test -v").run();
}
// The tester may already have LD_LIBRARY_PATH=::/foo/bar which leads to a false positive error
fn setenv_for_removing_empty_component(mut execs: Execs) -> Execs {
let v = dylib_path_envvar();
if let Ok(search_path) = env::var(v) {
let new_search_path =
env::join_paths(env::split_paths(&search_path).filter(|e| !e.as_os_str().is_empty()))
.expect("join_paths");
execs.env(v, new_search_path); // build_command() will override LD_LIBRARY_PATH accordingly
}
execs
}
// Regression test for #4277
#[test]
fn crate_library_path_env_var() {
let p = project()
.file(
"src/main.rs",
&format!(
r##"
fn main() {{
let search_path = env!("{}");
let paths = std::env::split_paths(&search_path).collect::<Vec<_>>();
assert!(!paths.contains(&"".into()));
}}
"##,
dylib_path_envvar()
),
).build();
setenv_for_removing_empty_component(p.cargo("run")).run();
}
// Regression test for #4277
#[test]
fn build_with_fake_libc_not_loading() {
let p = project()
.file("src/main.rs", "fn main() {}")
.file("src/lib.rs", r#" "#)
.file("libc.so.6", r#""#)
.build();
setenv_for_removing_empty_component(p.cargo("build")).run();
}
// this is testing that src/<pkg-name>.rs still works (for now)
#[test]
fn many_crate_types_old_style_lib_location() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["[email protected]"]
[lib]
name = "foo"
crate_type = ["rlib", "dylib"]
"#,
).file("src/foo.rs", "pub fn foo() {}")
.build();
p.cargo("build")
.with_stderr_contains(
"\
[WARNING] path `[..]src/foo.rs` was erroneously implicitly accepted for library `foo`,
please rename the file to `src/lib.rs` or set lib.path in Cargo.toml",
).run();
assert!(p.root().join("target/debug/libfoo.rlib").is_file());
let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
assert!(p.root().join("target/debug").join(&fname).is_file());
}
#[test]
fn many_crate_types_correct() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["[email protected]"]
[lib]
name = "foo"
crate_type = ["rlib", "dylib"]
"#,
).file("src/lib.rs", "pub fn foo() {}")
.build();
p.cargo("build").run();
assert!(p.root().join("target/debug/libfoo.rlib").is_file());
let fname = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
assert!(p.root().join("target/debug").join(&fname).is_file());
}
#[test]
fn self_dependency() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "test"
version = "0.0.0"
authors = []
[dependencies.test]
path = "."
[lib]
name = "test"
path = "src/test.rs"
"#,
).file("src/test.rs", "fn main() {}")
.build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
[ERROR] cyclic package dependency: package `test v0.0.0 ([CWD])` depends on itself. Cycle:
package `test v0.0.0 ([CWD])`",
).run();
}
#[test]
fn ignore_broken_symlinks() {
// windows and symlinks don't currently agree that well
if cfg!(windows) {
return;
}
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.symlink("Notafile", "bar")
.build();
p.cargo("build").run();
assert!(p.bin("foo").is_file());
p.process(&p.bin("foo")).with_stdout("i am foo\n").run();
}
#[test]
fn missing_lib_and_bin() {
let p = project().build();
p.cargo("build")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[..]Cargo.toml`
Caused by:
no targets specified in the manifest
either src/lib.rs, src/main.rs, a [lib] section, or [[bin]] section must be present\n",
).run();
}
#[test]
fn lto_build() {
// FIXME: currently this hits a linker bug on 32-bit MSVC
if cfg!(all(target_env = "msvc", target_pointer_width = "32")) {
return;
}
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "test"
version = "0.0.0"
authors = []
[profile.release]
lto = true
"#,
).file("src/main.rs", "fn main() {}")
.build();
p.cargo("build -v --release")
.with_stderr(
"\
[COMPILING] test v0.0.0 ([CWD])
[RUNNING] `rustc --crate-name test src/main.rs --color never --crate-type bin \
--emit=dep-info,link \
-C opt-level=3 \
-C lto \
-C metadata=[..] \
--out-dir [CWD]/target/release/deps \
-L dependency=[CWD]/target/release/deps`
[FINISHED] release [optimized] target(s) in [..]
",
).run();
}
#[test]
fn verbose_build() {
let p = project().file("src/lib.rs", "").build();
p.cargo("build -v")
.with_stderr(
"\
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
--emit=dep-info,link -C debuginfo=2 \
-C metadata=[..] \
--out-dir [..] \
-L dependency=[CWD]/target/debug/deps`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
}
#[test]
fn verbose_release_build() {
let p = project().file("src/lib.rs", "").build();
p.cargo("build -v --release")
.with_stderr(
"\
[COMPILING] foo v0.0.1 ([CWD])
[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
--emit=dep-info,link \
-C opt-level=3 \
-C metadata=[..] \
--out-dir [..] \
-L dependency=[CWD]/target/release/deps`
[FINISHED] release [optimized] target(s) in [..]
",
).run();
}
#[test]
fn verbose_release_build_deps() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "test"
version = "0.0.0"
authors = []
[dependencies.foo]
path = "foo"
"#,
).file("src/lib.rs", "")
.file(
"foo/Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
[lib]
name = "foo"
crate_type = ["dylib", "rlib"]
"#,
).file("foo/src/lib.rs", "")
.build();
p.cargo("build -v --release")
.with_stderr(&format!(
"\
[COMPILING] foo v0.0.0 ([CWD]/foo)
[RUNNING] `rustc --crate-name foo foo/src/lib.rs --color never \
--crate-type dylib --crate-type rlib \
--emit=dep-info,link \
-C prefer-dynamic \
-C opt-level=3 \
-C metadata=[..] \
--out-dir [..] \
-L dependency=[CWD]/target/release/deps`
[COMPILING] test v0.0.0 ([CWD])
[RUNNING] `rustc --crate-name test src/lib.rs --color never --crate-type lib \
--emit=dep-info,link \
-C opt-level=3 \
-C metadata=[..] \
--out-dir [..] \
-L dependency=[CWD]/target/release/deps \
--extern foo=[CWD]/target/release/deps/{prefix}foo{suffix} \
--extern foo=[CWD]/target/release/deps/libfoo.rlib`
[FINISHED] release [optimized] target(s) in [..]
",
prefix = env::consts::DLL_PREFIX,
suffix = env::consts::DLL_SUFFIX
)).run();
}
#[test]
fn explicit_examples() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "1.0.0"
authors = []
[lib]
name = "foo"
path = "src/lib.rs"
[[example]]
name = "hello"
path = "examples/ex-hello.rs"
[[example]]
name = "goodbye"
path = "examples/ex-goodbye.rs"
"#,
).file(
"src/lib.rs",
r#"
pub fn get_hello() -> &'static str { "Hello" }
pub fn get_goodbye() -> &'static str { "Goodbye" }
pub fn get_world() -> &'static str { "World" }
"#,
).file(
"examples/ex-hello.rs",
r#"
extern crate foo;
fn main() { println!("{}, {}!", foo::get_hello(), foo::get_world()); }
"#,
).file(
"examples/ex-goodbye.rs",
r#"
extern crate foo;
fn main() { println!("{}, {}!", foo::get_goodbye(), foo::get_world()); }
"#,
).build();
p.cargo("test -v").run();
p.process(&p.bin("examples/hello"))
.with_stdout("Hello, World!\n")
.run();
p.process(&p.bin("examples/goodbye"))
.with_stdout("Goodbye, World!\n")
.run();
}
#[test]
fn non_existing_example() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "1.0.0"
authors = []
[lib]
name = "foo"
path = "src/lib.rs"
[[example]]
name = "hello"
"#,
).file("src/lib.rs", "")
.build();
p.cargo("test -v")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[..]`
Caused by:
can't find `hello` example, specify example.path",
).run();
}
#[test]
fn non_existing_binary() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/lib.rs", "")
.file("src/bin/ehlo.rs", "")
.build();
p.cargo("build -v")
.with_status(101)
.with_stderr(
"\
[ERROR] failed to parse manifest at `[..]`
Caused by:
can't find `foo` bin, specify bin.path",
).run();
}
#[test]
fn legacy_binary_paths_warnings() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "1.0.0"
authors = []
[[bin]]
name = "bar"
"#,
).file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.build();
p.cargo("build -v")
.with_stderr_contains(
"\
[WARNING] path `[..]src/main.rs` was erroneously implicitly accepted for binary `bar`,
please set bin.path in Cargo.toml",
).run();
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "1.0.0"
authors = []
[[bin]]
name = "bar"
"#,
).file("src/lib.rs", "")
.file("src/bin/main.rs", "fn main() {}")
.build();
p.cargo("build -v")
.with_stderr_contains(
"\
[WARNING] path `[..]src/bin/main.rs` was erroneously implicitly accepted for binary `bar`,
please set bin.path in Cargo.toml",
).run();
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "1.0.0"
authors = []
[[bin]]
name = "bar"
"#,
).file("src/bar.rs", "fn main() {}")
.build();
p.cargo("build -v")
.with_stderr_contains(
"\
[WARNING] path `[..]src/bar.rs` was erroneously implicitly accepted for binary `bar`,
please set bin.path in Cargo.toml",
).run();
}
#[test]
fn implicit_examples() {
let p = project()
.file(
"src/lib.rs",
r#"
pub fn get_hello() -> &'static str { "Hello" }
pub fn get_goodbye() -> &'static str { "Goodbye" }
pub fn get_world() -> &'static str { "World" }
"#,
).file(
"examples/hello.rs",
r#"
extern crate foo;
fn main() {
println!("{}, {}!", foo::get_hello(), foo::get_world());
}
"#,
).file(
"examples/goodbye.rs",
r#"
extern crate foo;
fn main() {
println!("{}, {}!", foo::get_goodbye(), foo::get_world());
}
"#,
).build();
p.cargo("test").run();
p.process(&p.bin("examples/hello"))
.with_stdout("Hello, World!\n")
.run();
p.process(&p.bin("examples/goodbye"))
.with_stdout("Goodbye, World!\n")
.run();
}
#[test]
fn standard_build_no_ndebug() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file(
"src/foo.rs",
r#"
fn main() {
if cfg!(debug_assertions) {
println!("slow")
} else {
println!("fast")
}
}
"#,
).build();
p.cargo("build").run();
p.process(&p.bin("foo")).with_stdout("slow\n").run();
}
#[test]
fn release_build_ndebug() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file(
"src/foo.rs",
r#"
fn main() {
if cfg!(debug_assertions) {
println!("slow")
} else {
println!("fast")
}
}
"#,
).build();
p.cargo("build --release").run();
p.process(&p.release_bin("foo")).with_stdout("fast\n").run();
}
#[test]
fn inferred_main_bin() {
let p = project().file("src/main.rs", "fn main() {}").build();
p.cargo("build").run();
p.process(&p.bin("foo")).run();
}
#[test]
fn deletion_causes_failure() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.bar]
path = "bar"
"#,
).file("src/main.rs", "extern crate bar; fn main() {}")
.file("bar/Cargo.toml", &basic_manifest("bar", "0.0.1"))
.file("bar/src/lib.rs", "")
.build();
p.cargo("build").run();
p.change_file("Cargo.toml", &basic_manifest("foo", "0.0.1"));
p.cargo("build").with_status(101).run();
}
#[test]
fn bad_cargo_toml_in_target_dir() {
let p = project()
.file("src/main.rs", "fn main() {}")
.file("target/Cargo.toml", "bad-toml")
.build();
p.cargo("build").run();
p.process(&p.bin("foo")).run();
}
#[test]
fn lib_with_standard_name() {
let p = project()
.file("Cargo.toml", &basic_manifest("syntax", "0.0.1"))
.file("src/lib.rs", "pub fn foo() {}")
.file(
"src/main.rs",
"extern crate syntax; fn main() { syntax::foo() }",
).build();
p.cargo("build")
.with_stderr(
"\
[COMPILING] syntax v0.0.1 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
}
#[test]
fn simple_staticlib() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
[lib]
name = "foo"
crate-type = ["staticlib"]
"#,
).file("src/lib.rs", "pub fn foo() {}")
.build();
// env var is a test for #1381
p.cargo("build").env("RUST_LOG", "nekoneko=trace").run();
}
#[test]
fn staticlib_rlib_and_bin() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
[lib]
name = "foo"
crate-type = ["staticlib", "rlib"]
"#,
).file("src/lib.rs", "pub fn foo() {}")
.file("src/main.rs", "extern crate foo; fn main() { foo::foo(); }")
.build();
p.cargo("build -v").run();
}
#[test]
fn opt_out_of_bin() {
let p = project()
.file(
"Cargo.toml",
r#"
bin = []
[package]
name = "foo"
authors = []
version = "0.0.1"
"#,
).file("src/lib.rs", "")
.file("src/main.rs", "bad syntax")
.build();
p.cargo("build").run();
}
#[test]
fn single_lib() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
authors = []
version = "0.0.1"
[lib]
name = "foo"
path = "src/bar.rs"
"#,
).file("src/bar.rs", "")
.build();
p.cargo("build").run();
}
#[test]
fn freshness_ignores_excluded() {
let foo = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
build = "build.rs"
exclude = ["src/b*.rs"]
"#,
).file("build.rs", "fn main() {}")
.file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
.build();
foo.root().move_into_the_past();
foo.cargo("build")
.with_stderr(
"\
[COMPILING] foo v0.0.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
// Smoke test to make sure it doesn't compile again
println!("first pass");
foo.cargo("build").with_stdout("").run();
// Modify an ignored file and make sure we don't rebuild
println!("second pass");
File::create(&foo.root().join("src/bar.rs")).unwrap();
foo.cargo("build").with_stdout("").run();
}
#[test]
fn rebuild_preserves_out_dir() {
let foo = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
build = 'build.rs'
"#,
).file(
"build.rs",
r#"
use std::env;
use std::fs::File;
use std::path::Path;
fn main() {
let path = Path::new(&env::var("OUT_DIR").unwrap()).join("foo");
if env::var_os("FIRST").is_some() {
File::create(&path).unwrap();
} else {
File::create(&path).unwrap();
}
}
"#,
).file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
.build();
foo.root().move_into_the_past();
foo.cargo("build")
.env("FIRST", "1")
.with_stderr(
"\
[COMPILING] foo v0.0.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
File::create(&foo.root().join("src/bar.rs")).unwrap();
foo.cargo("build")
.with_stderr(
"\
[COMPILING] foo v0.0.0 ([CWD])
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
}
#[test]
fn dep_no_libs() {
let foo = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
[dependencies.bar]
path = "bar"
"#,
).file("src/lib.rs", "pub fn bar() -> i32 { 1 }")
.file("bar/Cargo.toml", &basic_manifest("bar", "0.0.0"))
.file("bar/src/main.rs", "")
.build();
foo.cargo("build").run();
}
#[test]
fn recompile_space_in_name() {
let foo = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.0"
authors = []
[lib]
name = "foo"
path = "src/my lib.rs"
"#,
).file("src/my lib.rs", "")
.build();
foo.cargo("build").run();
foo.root().move_into_the_past();
foo.cargo("build").with_stdout("").run();
}
#[cfg(unix)]
#[test]
fn ignore_bad_directories() {
use std::os::unix::prelude::*;
let foo = project()
.file("Cargo.toml", &basic_manifest("foo", "0.0.0"))
.file("src/lib.rs", "")
.build();
let dir = foo.root().join("tmp");
fs::create_dir(&dir).unwrap();
let stat = fs::metadata(&dir).unwrap();
let mut perms = stat.permissions();
perms.set_mode(0o644);
fs::set_permissions(&dir, perms.clone()).unwrap();
foo.cargo("build").run();
perms.set_mode(0o755);
fs::set_permissions(&dir, perms).unwrap();
}
#[test]
fn bad_cargo_config() {
let foo = project()
.file("Cargo.toml", &basic_manifest("foo", "0.0.0"))
.file("src/lib.rs", "")
.file(".cargo/config", "this is not valid toml")
.build();
foo.cargo("build -v")
.with_status(101)
.with_stderr(
"\
[ERROR] could not load Cargo configuration
Caused by:
could not parse TOML configuration in `[..]`
Caused by:
could not parse input as TOML
Caused by:
expected an equals, found an identifier at line 1
",
).run();
}
#[test]
fn cargo_platform_specific_dependency() {
let host = rustc_host();
let p = project()
.file(
"Cargo.toml",
&format!(
r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["[email protected]"]
build = "build.rs"
[target.{host}.dependencies]
dep = {{ path = "dep" }}
[target.{host}.build-dependencies]
build = {{ path = "build" }}
[target.{host}.dev-dependencies]
dev = {{ path = "dev" }}
"#,
host = host
),
).file("src/main.rs", "extern crate dep; fn main() { dep::dep() }")
.file(
"tests/foo.rs",
"extern crate dev; #[test] fn foo() { dev::dev() }",
).file(
"build.rs",
"extern crate build; fn main() { build::build(); }",
).file("dep/Cargo.toml", &basic_manifest("dep", "0.5.0"))
.file("dep/src/lib.rs", "pub fn dep() {}")
.file("build/Cargo.toml", &basic_manifest("build", "0.5.0"))
.file("build/src/lib.rs", "pub fn build() {}")
.file("dev/Cargo.toml", &basic_manifest("dev", "0.5.0"))
.file("dev/src/lib.rs", "pub fn dev() {}")
.build();
p.cargo("build").run();
assert!(p.bin("foo").is_file());
p.cargo("test").run();
}
#[test]
fn bad_platform_specific_dependency() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["[email protected]"]
[target.wrong-target.dependencies.bar]
path = "bar"
"#,
).file("src/main.rs", &main_file(r#""{}", bar::gimme()"#, &["bar"]))
.file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0"))
.file(
"bar/src/lib.rs",
r#"extern crate baz; pub fn gimme() -> String { format!("") }"#,
).build();
p.cargo("build").with_status(101).run();
}
#[test]
fn cargo_platform_specific_dependency_wrong_platform() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["[email protected]"]
[target.non-existing-triplet.dependencies.bar]
path = "bar"
"#,
).file("src/main.rs", "fn main() {}")
.file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0"))
.file(
"bar/src/lib.rs",
"invalid rust file, should not be compiled",
).build();
p.cargo("build").run();
assert!(p.bin("foo").is_file());
p.process(&p.bin("foo")).run();
let loc = p.root().join("Cargo.lock");
let mut lockfile = String::new();
File::open(&loc)
.unwrap()
.read_to_string(&mut lockfile)
.unwrap();
assert!(lockfile.contains("bar"));
}
#[test]
fn example_as_lib() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[[example]]
name = "ex"
crate-type = ["lib"]
"#,
).file("src/lib.rs", "")
.file("examples/ex.rs", "")
.build();
p.cargo("build --example=ex").run();
assert!(p.example_lib("ex", "lib").is_file());
}
#[test]
fn example_as_rlib() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[[example]]
name = "ex"
crate-type = ["rlib"]
"#,
).file("src/lib.rs", "")
.file("examples/ex.rs", "")
.build();
p.cargo("build --example=ex").run();
assert!(p.example_lib("ex", "rlib").is_file());
}
#[test]
fn example_as_dylib() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[[example]]
name = "ex"
crate-type = ["dylib"]
"#,
).file("src/lib.rs", "")
.file("examples/ex.rs", "")
.build();
p.cargo("build --example=ex").run();
assert!(p.example_lib("ex", "dylib").is_file());
}
#[test]
fn example_as_proc_macro() {
if !is_nightly() {
return;
}
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[[example]]
name = "ex"
crate-type = ["proc-macro"]
"#,
).file("src/lib.rs", "")
.file("examples/ex.rs", "#![feature(proc_macro)]")
.build();
p.cargo("build --example=ex").run();
assert!(p.example_lib("ex", "proc-macro").is_file());
}
#[test]
fn example_bin_same_name() {
let p = project()
.file("src/main.rs", "fn main() {}")
.file("examples/foo.rs", "fn main() {}")
.build();
p.cargo("test --no-run -v").run();
assert!(!p.bin("foo").is_file());
// We expect a file of the form bin/foo-{metadata_hash}
assert!(p.bin("examples/foo").is_file());
p.cargo("test --no-run -v").run();
assert!(!p.bin("foo").is_file());
// We expect a file of the form bin/foo-{metadata_hash}
assert!(p.bin("examples/foo").is_file());
}
#[test]
fn compile_then_delete() {
let p = project().file("src/main.rs", "fn main() {}").build();
p.cargo("run -v").run();
assert!(p.bin("foo").is_file());
if cfg!(windows) {
// On windows unlinking immediately after running often fails, so sleep
sleep_ms(100);
}
fs::remove_file(&p.bin("foo")).unwrap();
p.cargo("run -v").run();
}
#[test]
fn transitive_dependencies_not_available() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.aaaaa]
path = "a"
"#,
).file(
"src/main.rs",
"extern crate bbbbb; extern crate aaaaa; fn main() {}",
).file(
"a/Cargo.toml",
r#"
[package]
name = "aaaaa"
version = "0.0.1"
authors = []
[dependencies.bbbbb]
path = "../b"
"#,
).file("a/src/lib.rs", "extern crate bbbbb;")
.file("b/Cargo.toml", &basic_manifest("bbbbb", "0.0.1"))
.file("b/src/lib.rs", "")
.build();
p.cargo("build -v")
.with_status(101)
.with_stderr_contains("[..] can't find crate for `bbbbb`[..]")
.run();
}
#[test]
fn cyclic_deps_rejected() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.a]
path = "a"
"#,
).file("src/lib.rs", "")
.file(
"a/Cargo.toml",
r#"
[package]
name = "a"
version = "0.0.1"
authors = []
[dependencies.foo]
path = ".."
"#,
).file("a/src/lib.rs", "")
.build();
p.cargo("build -v")
.with_status(101)
.with_stderr(
"[ERROR] cyclic package dependency: package `a v0.0.1 ([CWD]/a)` depends on itself. Cycle:
package `a v0.0.1 ([CWD]/a)`
... which is depended on by `foo v0.0.1 ([CWD])`",
).run();
}
#[test]
fn predictable_filenames() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo"
crate-type = ["dylib", "rlib"]
"#,
).file("src/lib.rs", "")
.build();
p.cargo("build -v").run();
assert!(p.root().join("target/debug/libfoo.rlib").is_file());
let dylib_name = format!("{}foo{}", env::consts::DLL_PREFIX, env::consts::DLL_SUFFIX);
assert!(p.root().join("target/debug").join(dylib_name).is_file());
}
#[test]
fn dashes_to_underscores() {
let p = project()
.file("Cargo.toml", &basic_manifest("foo-bar", "0.0.1"))
.file("src/lib.rs", "")
.file("src/main.rs", "extern crate foo_bar; fn main() {}")
.build();
p.cargo("build -v").run();
assert!(p.bin("foo-bar").is_file());
}
#[test]
fn dashes_in_crate_name_bad() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[lib]
name = "foo-bar"
"#,
).file("src/lib.rs", "")
.file("src/main.rs", "extern crate foo_bar; fn main() {}")
.build();
p.cargo("build -v").with_status(101).run();
}
#[test]
fn rustc_env_var() {
let p = project().file("src/lib.rs", "").build();
p.cargo("build -v")
.env("RUSTC", "rustc-that-does-not-exist")
.with_status(101)
.with_stderr(
"\
[ERROR] could not execute process `rustc-that-does-not-exist -vV` ([..])
Caused by:
[..]
",
).run();
assert!(!p.bin("a").is_file());
}
#[test]
fn filtering() {
let p = project()
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.build();
p.cargo("build --lib").run();
assert!(!p.bin("a").is_file());
p.cargo("build --bin=a --example=a").run();
assert!(p.bin("a").is_file());
assert!(!p.bin("b").is_file());
assert!(p.bin("examples/a").is_file());
assert!(!p.bin("examples/b").is_file());
}
#[test]
fn filtering_implicit_bins() {
let p = project()
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.build();
p.cargo("build --bins").run();
assert!(p.bin("a").is_file());
assert!(p.bin("b").is_file());
assert!(!p.bin("examples/a").is_file());
assert!(!p.bin("examples/b").is_file());
}
#[test]
fn filtering_implicit_examples() {
let p = project()
.file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/a.rs", "fn main() {}")
.file("examples/b.rs", "fn main() {}")
.build();
p.cargo("build --examples").run();
assert!(!p.bin("a").is_file());
assert!(!p.bin("b").is_file());
assert!(p.bin("examples/a").is_file());
assert!(p.bin("examples/b").is_file());
}
#[test]
fn ignore_dotfile() {
let p = project()
.file("src/bin/.a.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.build();
p.cargo("build").run();
}
#[test]
fn ignore_dotdirs() {
let p = project()
.file("src/bin/a.rs", "fn main() {}")
.file(".git/Cargo.toml", "")
.file(".pc/dummy-fix.patch/Cargo.toml", "")
.build();
p.cargo("build").run();
}
#[test]
fn dotdir_root() {
let p = ProjectBuilder::new(root().join(".foo"))
.file("src/bin/a.rs", "fn main() {}")
.build();
p.cargo("build").run();
}
#[test]
fn custom_target_dir_env() {
let p = project().file("src/main.rs", "fn main() {}").build();
let exe_name = format!("foo{}", env::consts::EXE_SUFFIX);
p.cargo("build").env("CARGO_TARGET_DIR", "foo/target").run();
assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
assert!(!p.root().join("target/debug").join(&exe_name).is_file());
p.cargo("build").run();
assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
assert!(p.root().join("target/debug").join(&exe_name).is_file());
fs::create_dir(p.root().join(".cargo")).unwrap();
File::create(p.root().join(".cargo/config"))
.unwrap()
.write_all(
br#"
[build]
target-dir = "foo/target"
"#,
).unwrap();
p.cargo("build").env("CARGO_TARGET_DIR", "bar/target").run();
assert!(p.root().join("bar/target/debug").join(&exe_name).is_file());
assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
assert!(p.root().join("target/debug").join(&exe_name).is_file());
}
#[test]
fn custom_target_dir_line_parameter() {
let p = project().file("src/main.rs", "fn main() {}").build();
let exe_name = format!("foo{}", env::consts::EXE_SUFFIX);
p.cargo("build --target-dir foo/target").run();
assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
assert!(!p.root().join("target/debug").join(&exe_name).is_file());
p.cargo("build").run();
assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
assert!(p.root().join("target/debug").join(&exe_name).is_file());
fs::create_dir(p.root().join(".cargo")).unwrap();
File::create(p.root().join(".cargo/config"))
.unwrap()
.write_all(
br#"
[build]
target-dir = "foo/target"
"#,
).unwrap();
p.cargo("build --target-dir bar/target").run();
assert!(p.root().join("bar/target/debug").join(&exe_name).is_file());
assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
assert!(p.root().join("target/debug").join(&exe_name).is_file());
p.cargo("build --target-dir foobar/target")
.env("CARGO_TARGET_DIR", "bar/target")
.run();
assert!(
p.root()
.join("foobar/target/debug")
.join(&exe_name)
.is_file()
);
assert!(p.root().join("bar/target/debug").join(&exe_name).is_file());
assert!(p.root().join("foo/target/debug").join(&exe_name).is_file());
assert!(p.root().join("target/debug").join(&exe_name).is_file());
}
#[test]
fn build_multiple_packages() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.d1]
path = "d1"
[dependencies.d2]
path = "d2"
[[bin]]
name = "foo"
"#,
).file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.file("d1/Cargo.toml", &basic_bin_manifest("d1"))
.file("d1/src/lib.rs", "")
.file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
.file(
"d2/Cargo.toml",
r#"
[package]
name = "d2"
version = "0.0.1"
authors = []
[[bin]]
name = "d2"
doctest = false
"#,
).file("d2/src/main.rs", "fn main() { println!(\"d2\"); }")
.build();
p.cargo("build -p d1 -p d2 -p foo").run();
assert!(p.bin("foo").is_file());
p.process(&p.bin("foo")).with_stdout("i am foo\n").run();
let d1_path = &p
.build_dir()
.join("debug")
.join(format!("d1{}", env::consts::EXE_SUFFIX));
let d2_path = &p
.build_dir()
.join("debug")
.join(format!("d2{}", env::consts::EXE_SUFFIX));
assert!(d1_path.is_file());
p.process(d1_path).with_stdout("d1").run();
assert!(d2_path.is_file());
p.process(d2_path).with_stdout("d2").run();
}
#[test]
fn invalid_spec() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[dependencies.d1]
path = "d1"
[[bin]]
name = "foo"
"#,
).file("src/bin/foo.rs", &main_file(r#""i am foo""#, &[]))
.file("d1/Cargo.toml", &basic_bin_manifest("d1"))
.file("d1/src/lib.rs", "")
.file("d1/src/main.rs", "fn main() { println!(\"d1\"); }")
.build();
p.cargo("build -p notAValidDep")
.with_status(101)
.with_stderr("[ERROR] package id specification `notAValidDep` matched no packages")
.run();
p.cargo("build -p d1 -p notAValidDep")
.with_status(101)
.with_stderr("[ERROR] package id specification `notAValidDep` matched no packages")
.run();
}
#[test]
fn manifest_with_bom_is_ok() {
let p = project()
.file(
"Cargo.toml",
"\u{FEFF}
[package]
name = \"foo\"
version = \"0.0.1\"
authors = []
",
).file("src/lib.rs", "")
.build();
p.cargo("build -v").run();
}
#[test]
fn panic_abort_compiles_with_panic_abort() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[profile.dev]
panic = 'abort'
"#,
).file("src/lib.rs", "")
.build();
p.cargo("build -v")
.with_stderr_contains("[..] -C panic=abort [..]")
.run();
}
#[test]
fn explicit_color_config_is_propagated_to_rustc() {
let p = project()
.file("Cargo.toml", &basic_manifest("test", "0.0.0"))
.file("src/lib.rs", "")
.build();
p.cargo("build -v --color always")
.with_stderr_contains("[..]rustc [..] src/lib.rs --color always[..]")
.run();
p.cargo("clean").run();
p.cargo("build -v --color never")
.with_stderr(
"\
[COMPILING] test v0.0.0 ([..])
[RUNNING] `rustc [..] --color never [..]`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]
",
).run();
}
#[test]
fn compiler_json_error_format() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.5.0"
authors = ["[email protected]"]
[dependencies.bar]
path = "bar"
"#,
).file(
"build.rs",
"fn main() { println!(\"cargo:rustc-cfg=xyz\") }",
).file("src/main.rs", "fn main() { let unused = 92; }")
.file("bar/Cargo.toml", &basic_manifest("bar", "0.5.0"))
.file("bar/src/lib.rs", r#"fn dead() {}"#)
.build();
// Using jobs=1 to ensure that the order of messages is consistent.
p.cargo("build -v --message-format=json --jobs=1")
.with_json(
r#"
{
"reason":"compiler-artifact",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["custom-build"],
"crate_types":["bin"],
"edition": "2015",
"name":"build-script-build",
"src_path":"[..]build.rs"
},
"profile": {
"debug_assertions": true,
"debuginfo": 2,
"opt_level": "0",
"overflow_checks": true,
"test": false
},
"features": [],
"filenames": "{...}",
"fresh": false
}
{
"reason":"compiler-message",
"package_id":"bar 0.5.0 ([..])",
"target":{
"kind":["lib"],
"crate_types":["lib"],
"edition": "2015",
"name":"bar",
"src_path":"[..]lib.rs"
},
"message":"{...}"
}
{
"reason":"compiler-artifact",
"profile": {
"debug_assertions": true,
"debuginfo": 2,
"opt_level": "0",
"overflow_checks": true,
"test": false
},
"features": [],
"package_id":"bar 0.5.0 ([..])",
"target":{
"kind":["lib"],
"crate_types":["lib"],
"edition": "2015",
"name":"bar",
"src_path":"[..]lib.rs"
},
"filenames":["[..].rlib"],
"fresh": false
}
{
"reason":"build-script-executed",
"package_id":"foo 0.5.0 ([..])",
"linked_libs":[],
"linked_paths":[],
"env":[],
"cfgs":["xyz"]
}
{
"reason":"compiler-message",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["bin"],
"crate_types":["bin"],
"edition": "2015",
"name":"foo",
"src_path":"[..]main.rs"
},
"message":"{...}"
}
{
"reason":"compiler-artifact",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["bin"],
"crate_types":["bin"],
"edition": "2015",
"name":"foo",
"src_path":"[..]main.rs"
},
"profile": {
"debug_assertions": true,
"debuginfo": 2,
"opt_level": "0",
"overflow_checks": true,
"test": false
},
"features": [],
"filenames": "{...}",
"fresh": false
}
"#,
).run();
// With fresh build, we should repeat the artifacts,
// but omit compiler warnings.
p.cargo("build -v --message-format=json --jobs=1")
.with_json(
r#"
{
"reason":"compiler-artifact",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["custom-build"],
"crate_types":["bin"],
"edition": "2015",
"name":"build-script-build",
"src_path":"[..]build.rs"
},
"profile": {
"debug_assertions": true,
"debuginfo": 2,
"opt_level": "0",
"overflow_checks": true,
"test": false
},
"features": [],
"filenames": "{...}",
"fresh": true
}
{
"reason":"compiler-artifact",
"profile": {
"debug_assertions": true,
"debuginfo": 2,
"opt_level": "0",
"overflow_checks": true,
"test": false
},
"features": [],
"package_id":"bar 0.5.0 ([..])",
"target":{
"kind":["lib"],
"crate_types":["lib"],
"edition": "2015",
"name":"bar",
"src_path":"[..]lib.rs"
},
"filenames":["[..].rlib"],
"fresh": true
}
{
"reason":"build-script-executed",
"package_id":"foo 0.5.0 ([..])",
"linked_libs":[],
"linked_paths":[],
"env":[],
"cfgs":["xyz"]
}
{
"reason":"compiler-artifact",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["bin"],
"crate_types":["bin"],
"edition": "2015",
"name":"foo",
"src_path":"[..]main.rs"
},
"profile": {
"debug_assertions": true,
"debuginfo": 2,
"opt_level": "0",
"overflow_checks": true,
"test": false
},
"features": [],
"filenames": "{...}",
"fresh": true
}
"#,
).run();
}
#[test]
fn wrong_message_format_option() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/main.rs", "fn main() {}")
.build();
p.cargo("build --message-format XML")
.with_status(1)
.with_stderr_contains(
"\
error: 'XML' isn't a valid value for '--message-format <FMT>'
<tab>[possible values: human, json, short]
",
).run();
}
#[test]
fn message_format_json_forward_stderr() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/main.rs", "fn main() { let unused = 0; }")
.build();
p.cargo("rustc --release --bin foo --message-format JSON")
.with_json(
r#"
{
"reason":"compiler-message",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["bin"],
"crate_types":["bin"],
"edition": "2015",
"name":"foo",
"src_path":"[..]"
},
"message":"{...}"
}
{
"reason":"compiler-artifact",
"package_id":"foo 0.5.0 ([..])",
"target":{
"kind":["bin"],
"crate_types":["bin"],
"edition": "2015",
"name":"foo",
"src_path":"[..]"
},
"profile":{
"debug_assertions":false,
"debuginfo":null,
"opt_level":"3",
"overflow_checks": false,
"test":false
},
"features":[],
"filenames": "{...}",
"fresh": false
}
"#,
).run();
}
#[test]
fn no_warn_about_package_metadata() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
[package.metadata]
foo = "bar"
a = true
b = 3
[package.metadata.another]
bar = 3
"#,
).file("src/lib.rs", "")
.build();
p.cargo("build")
.with_stderr(
"[..] foo v0.0.1 ([..])\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
).run();
}
#[test]
fn cargo_build_empty_target() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/main.rs", "fn main() {}")
.build();
p.cargo("build --target")
.arg("")
.with_status(101)
.with_stderr_contains("[..] target was empty")
.run();
}
#[test]
fn build_all_workspace() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = "bar" }
[workspace]
"#,
).file("src/main.rs", "fn main() {}")
.file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
.file("bar/src/lib.rs", "pub fn bar() {}")
.build();
p.cargo("build --all")
.with_stderr(
"[..] Compiling bar v0.1.0 ([..])\n\
[..] Compiling foo v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
).run();
}
#[test]
fn build_all_exclude() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.1.0"
[workspace]
members = ["bar", "baz"]
"#,
).file("src/main.rs", "fn main() {}")
.file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
.file("bar/src/lib.rs", "pub fn bar() {}")
.file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
.file("baz/src/lib.rs", "pub fn baz() { break_the_build(); }")
.build();
p.cargo("build --all --exclude baz")
.with_stderr_contains("[..]Compiling foo v0.1.0 [..]")
.with_stderr_contains("[..]Compiling bar v0.1.0 [..]")
.with_stderr_does_not_contain("[..]Compiling baz v0.1.0 [..]")
.run();
}
#[test]
fn build_all_workspace_implicit_examples() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
bar = { path = "bar" }
[workspace]
"#,
).file("src/lib.rs", "")
.file("src/bin/a.rs", "fn main() {}")
.file("src/bin/b.rs", "fn main() {}")
.file("examples/c.rs", "fn main() {}")
.file("examples/d.rs", "fn main() {}")
.file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
.file("bar/src/lib.rs", "")
.file("bar/src/bin/e.rs", "fn main() {}")
.file("bar/src/bin/f.rs", "fn main() {}")
.file("bar/examples/g.rs", "fn main() {}")
.file("bar/examples/h.rs", "fn main() {}")
.build();
p.cargo("build --all --examples")
.with_stderr(
"[..] Compiling bar v0.1.0 ([..])\n\
[..] Compiling foo v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
).run();
assert!(!p.bin("a").is_file());
assert!(!p.bin("b").is_file());
assert!(p.bin("examples/c").is_file());
assert!(p.bin("examples/d").is_file());
assert!(!p.bin("e").is_file());
assert!(!p.bin("f").is_file());
assert!(p.bin("examples/g").is_file());
assert!(p.bin("examples/h").is_file());
}
#[test]
fn build_all_virtual_manifest() {
let p = project()
.file(
"Cargo.toml",
r#" | ).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
.file("bar/src/lib.rs", "pub fn bar() {}")
.file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
.file("baz/src/lib.rs", "pub fn baz() {}")
.build();
// The order in which bar and baz are built is not guaranteed
p.cargo("build --all")
.with_stderr_contains("[..] Compiling baz v0.1.0 ([..])")
.with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
.with_stderr(
"[..] Compiling [..] v0.1.0 ([..])\n\
[..] Compiling [..] v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
).run();
}
#[test]
fn build_virtual_manifest_all_implied() {
let p = project()
.file(
"Cargo.toml",
r#"
[workspace]
members = ["bar", "baz"]
"#,
).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
.file("bar/src/lib.rs", "pub fn bar() {}")
.file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
.file("baz/src/lib.rs", "pub fn baz() {}")
.build();
// The order in which bar and baz are built is not guaranteed
p.cargo("build")
.with_stderr_contains("[..] Compiling baz v0.1.0 ([..])")
.with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
.with_stderr(
"[..] Compiling [..] v0.1.0 ([..])\n\
[..] Compiling [..] v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
).run();
}
#[test]
fn build_virtual_manifest_one_project() {
let p = project()
.file(
"Cargo.toml",
r#"
[workspace]
members = ["bar", "baz"]
"#,
).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
.file("bar/src/lib.rs", "pub fn bar() {}")
.file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
.file("baz/src/lib.rs", "pub fn baz() {}")
.build();
p.cargo("build -p bar")
.with_stderr_does_not_contain("[..]baz[..]")
.with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
.with_stderr(
"[..] Compiling [..] v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
).run();
}
#[test]
fn build_all_virtual_manifest_implicit_examples() {
let p = project()
.file(
"Cargo.toml",
r#"
[workspace]
members = ["bar", "baz"]
"#,
).file("bar/Cargo.toml", &basic_manifest("bar", "0.1.0"))
.file("bar/src/lib.rs", "")
.file("bar/src/bin/a.rs", "fn main() {}")
.file("bar/src/bin/b.rs", "fn main() {}")
.file("bar/examples/c.rs", "fn main() {}")
.file("bar/examples/d.rs", "fn main() {}")
.file("baz/Cargo.toml", &basic_manifest("baz", "0.1.0"))
.file("baz/src/lib.rs", "")
.file("baz/src/bin/e.rs", "fn main() {}")
.file("baz/src/bin/f.rs", "fn main() {}")
.file("baz/examples/g.rs", "fn main() {}")
.file("baz/examples/h.rs", "fn main() {}")
.build();
// The order in which bar and baz are built is not guaranteed
p.cargo("build --all --examples")
.with_stderr_contains("[..] Compiling baz v0.1.0 ([..])")
.with_stderr_contains("[..] Compiling bar v0.1.0 ([..])")
.with_stderr(
"[..] Compiling [..] v0.1.0 ([..])\n\
[..] Compiling [..] v0.1.0 ([..])\n\
[..] Finished dev [unoptimized + debuginfo] target(s) in [..]\n",
).run();
assert!(!p.bin("a").is_file());
assert!(!p.bin("b").is_file());
assert!(p.bin("examples/c").is_file());
assert!(p.bin("examples/d").is_file());
assert!(!p.bin("e").is_file());
assert!(!p.bin("f").is_file());
assert!(p.bin("examples/g").is_file());
assert!(p.bin("examples/h").is_file());
}
#[test]
fn build_all_member_dependency_same_name() {
let p = project()
.file(
"Cargo.toml",
r#"
[workspace]
members = ["a"]
"#,
).file(
"a/Cargo.toml",
r#"
[project]
name = "a"
version = "0.1.0"
[dependencies]
a = "0.1.0"
"#,
).file("a/src/lib.rs", "pub fn a() {}")
.build();
Package::new("a", "0.1.0").publish();
p.cargo("build --all")
.with_stderr(
"[UPDATING] `[..]` index\n\
[DOWNLOADING] crates ...\n\
[DOWNLOADED] a v0.1.0 ([..])\n\
[COMPILING] a v0.1.0\n\
[COMPILING] a v0.1.0 ([..])\n\
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]\n",
).run();
}
#[test]
fn run_proper_binary() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "main"
[[bin]]
name = "other"
"#,
).file("src/lib.rs", "")
.file(
"src/bin/main.rs",
r#"fn main() { panic!("This should never be run."); }"#,
).file("src/bin/other.rs", "fn main() {}")
.build();
p.cargo("run --bin other").run();
}
#[test]
fn run_proper_binary_main_rs() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/lib.rs", "")
.file("src/bin/main.rs", "fn main() {}")
.build();
p.cargo("run --bin foo").run();
}
#[test]
fn run_proper_alias_binary_from_src() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "foo"
[[bin]]
name = "bar"
"#,
).file("src/foo.rs", r#"fn main() { println!("foo"); }"#)
.file("src/bar.rs", r#"fn main() { println!("bar"); }"#)
.build();
p.cargo("build --all").run();
p.process(&p.bin("foo")).with_stdout("foo\n").run();
p.process(&p.bin("bar")).with_stdout("bar\n").run();
}
#[test]
fn run_proper_alias_binary_main_rs() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
authors = []
version = "0.0.0"
[[bin]]
name = "foo"
[[bin]]
name = "bar"
"#,
).file("src/main.rs", r#"fn main() { println!("main"); }"#)
.build();
p.cargo("build --all").run();
p.process(&p.bin("foo")).with_stdout("main\n").run();
p.process(&p.bin("bar")).with_stdout("main\n").run();
}
#[test]
fn run_proper_binary_main_rs_as_foo() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file(
"src/foo.rs",
r#" fn main() { panic!("This should never be run."); }"#,
).file("src/main.rs", "fn main() {}")
.build();
p.cargo("run --bin foo").run();
}
#[test]
fn rustc_wrapper() {
// We don't have /usr/bin/env on Windows.
if cfg!(windows) {
return;
}
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
p.cargo("build -v")
.env("RUSTC_WRAPPER", "/usr/bin/env")
.with_stderr_contains("[RUNNING] `/usr/bin/env rustc --crate-name foo [..]")
.run();
}
#[test]
fn cdylib_not_lifted() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
authors = []
version = "0.1.0"
[lib]
crate-type = ["cdylib"]
"#,
).file("src/lib.rs", "")
.build();
p.cargo("build").run();
let files = if cfg!(windows) {
vec!["foo.dll.lib", "foo.dll.exp", "foo.dll"]
} else if cfg!(target_os = "macos") {
vec!["libfoo.dylib"]
} else {
vec!["libfoo.so"]
};
for file in files {
println!("checking: {}", file);
assert!(p.root().join("target/debug/deps").join(&file).is_file());
}
}
#[test]
fn cdylib_final_outputs() {
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo-bar"
authors = []
version = "0.1.0"
[lib]
crate-type = ["cdylib"]
"#,
).file("src/lib.rs", "")
.build();
p.cargo("build").run();
let files = if cfg!(windows) {
vec!["foo_bar.dll.lib", "foo_bar.dll"]
} else if cfg!(target_os = "macos") {
vec!["libfoo_bar.dylib"]
} else {
vec!["libfoo_bar.so"]
};
for file in files {
println!("checking: {}", file);
assert!(p.root().join("target/debug").join(&file).is_file());
}
}
#[test]
fn deterministic_cfg_flags() {
// This bug is non-deterministic
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.1.0"
authors = []
build = "build.rs"
[features]
default = ["f_a", "f_b", "f_c", "f_d"]
f_a = []
f_b = []
f_c = []
f_d = []
"#,
).file(
"build.rs",
r#"
fn main() {
println!("cargo:rustc-cfg=cfg_a");
println!("cargo:rustc-cfg=cfg_b");
println!("cargo:rustc-cfg=cfg_c");
println!("cargo:rustc-cfg=cfg_d");
println!("cargo:rustc-cfg=cfg_e");
}
"#,
).file("src/main.rs", "fn main() {}")
.build();
p.cargo("build -v")
.with_stderr(
"\
[COMPILING] foo v0.1.0 [..]
[RUNNING] [..]
[RUNNING] [..]
[RUNNING] `rustc --crate-name foo [..] \
--cfg[..]default[..]--cfg[..]f_a[..]--cfg[..]f_b[..]\
--cfg[..]f_c[..]--cfg[..]f_d[..] \
--cfg cfg_a --cfg cfg_b --cfg cfg_c --cfg cfg_d --cfg cfg_e`
[FINISHED] dev [unoptimized + debuginfo] target(s) in [..]",
).run();
}
#[test]
fn explicit_bins_without_paths() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[[bin]]
name = "foo"
[[bin]]
name = "bar"
"#,
).file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.file("src/bin/bar.rs", "fn main() {}")
.build();
p.cargo("build").run();
}
#[test]
fn no_bin_in_src_with_lib() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/lib.rs", "")
.file("src/foo.rs", "fn main() {}")
.build();
p.cargo("build")
.with_status(101)
.with_stderr_contains(
"\
[ERROR] failed to parse manifest at `[..]`
Caused by:
can't find `foo` bin, specify bin.path",
).run();
}
#[test]
fn inferred_bins() {
let p = project()
.file("src/main.rs", "fn main() {}")
.file("src/bin/bar.rs", "fn main() {}")
.file("src/bin/baz/main.rs", "fn main() {}")
.build();
p.cargo("build").run();
assert!(p.bin("foo").is_file());
assert!(p.bin("bar").is_file());
assert!(p.bin("baz").is_file());
}
#[test]
fn inferred_bins_duplicate_name() {
// this should fail, because we have two binaries with the same name
let p = project()
.file("src/main.rs", "fn main() {}")
.file("src/bin/bar.rs", "fn main() {}")
.file("src/bin/bar/main.rs", "fn main() {}")
.build();
p.cargo("build").with_status(101).with_stderr_contains(
"[..]found duplicate binary name bar, but all binary targets must have a unique name[..]",
)
.run();
}
#[test]
fn inferred_bin_path() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[[bin]]
name = "bar"
# Note, no `path` key!
"#,
).file("src/bin/bar/main.rs", "fn main() {}")
.build();
p.cargo("build").run();
assert!(p.bin("bar").is_file());
}
#[test]
fn inferred_examples() {
let p = project()
.file("src/lib.rs", "fn main() {}")
.file("examples/bar.rs", "fn main() {}")
.file("examples/baz/main.rs", "fn main() {}")
.build();
p.cargo("test").run();
assert!(p.bin("examples/bar").is_file());
assert!(p.bin("examples/baz").is_file());
}
#[test]
fn inferred_tests() {
let p = project()
.file("src/lib.rs", "fn main() {}")
.file("tests/bar.rs", "fn main() {}")
.file("tests/baz/main.rs", "fn main() {}")
.build();
p.cargo("test --test=bar --test=baz").run();
}
#[test]
fn inferred_benchmarks() {
let p = project()
.file("src/lib.rs", "fn main() {}")
.file("benches/bar.rs", "fn main() {}")
.file("benches/baz/main.rs", "fn main() {}")
.build();
p.cargo("bench --bench=bar --bench=baz").run();
}
#[test]
fn target_edition() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
[lib]
edition = "2018"
"#,
).file("src/lib.rs", "")
.build();
p.cargo("build -v")
.without_status() // passes on nightly, fails on stable, b/c --edition is nightly-only
.with_stderr_contains(
"\
[COMPILING] foo v0.0.1 ([..])
[RUNNING] `rustc [..]--edition=2018 [..]
",
).run();
}
#[test]
fn target_edition_override() {
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.0.1"
authors = []
edition = "2018"
[lib]
edition = "2015"
"#,
).file(
"src/lib.rs",
"
pub fn async() {}
pub fn try() {}
pub fn await() {}
"
)
.build();
p.cargo("build -v").run();
}
#[test]
fn same_metadata_different_directory() {
// A top-level crate built in two different workspaces should have the
// same metadata hash.
let p = project()
.at("foo1")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
let output = t!(String::from_utf8(
t!(p.cargo("build -v").exec_with_output()).stderr,
));
let metadata = output
.split_whitespace()
.find(|arg| arg.starts_with("metadata="))
.unwrap();
let p = project()
.at("foo2")
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
p.cargo("build -v")
.with_stderr_contains(format!("[..]{}[..]", metadata))
.run();
}
#[test]
fn building_a_dependent_crate_witout_bin_should_fail() {
Package::new("testless", "0.1.0")
.file(
"Cargo.toml",
r#"
[project]
name = "testless"
version = "0.1.0"
[[bin]]
name = "a_bin"
"#,
).file("src/lib.rs", "")
.publish();
let p = project()
.file(
"Cargo.toml",
r#"
[project]
name = "foo"
version = "0.1.0"
[dependencies]
testless = "0.1.0"
"#,
).file("src/lib.rs", "")
.build();
p.cargo("build")
.with_status(101)
.with_stderr_contains("[..]can't find `a_bin` bin, specify bin.path")
.run();
}
#[test]
fn uplift_dsym_of_bin_on_mac() {
if !cfg!(any(target_os = "macos", target_os = "ios")) {
return;
}
let p = project()
.file("src/main.rs", "fn main() { panic!(); }")
.file("src/bin/b.rs", "fn main() { panic!(); }")
.file("examples/c.rs", "fn main() { panic!(); }")
.file("tests/d.rs", "fn main() { panic!(); }")
.build();
p.cargo("build --bins --examples --tests").run();
assert!(p.bin("foo.dSYM").is_dir());
assert!(p.bin("b.dSYM").is_dir());
assert!(
p.bin("b.dSYM")
.symlink_metadata()
.expect("read metadata from b.dSYM")
.file_type()
.is_symlink()
);
assert!(!p.bin("c.dSYM").is_dir());
assert!(!p.bin("d.dSYM").is_dir());
}
#[test]
fn uplift_pdb_of_bin_on_windows() {
if !cfg!(all(target_os = "windows", target_env = "msvc")) {
return;
}
let p = project()
.file("src/main.rs", "fn main() { panic!(); }")
.file("src/bin/b.rs", "fn main() { panic!(); }")
.file("examples/c.rs", "fn main() { panic!(); }")
.file("tests/d.rs", "fn main() { panic!(); }")
.build();
p.cargo("build --bins --examples --tests").run();
assert!(p.target_debug_dir().join("foo.pdb").is_file());
assert!(p.target_debug_dir().join("b.pdb").is_file());
assert!(!p.target_debug_dir().join("c.pdb").is_file());
assert!(!p.target_debug_dir().join("d.pdb").is_file());
}
// Make sure that `cargo build` chooses the correct profile for building
// targets based on filters (assuming --profile is not specified).
#[test]
fn build_filter_infer_profile() {
let p = project()
.file("src/lib.rs", "")
.file("src/main.rs", "fn main() {}")
.file("tests/t1.rs", "")
.file("benches/b1.rs", "")
.file("examples/ex1.rs", "fn main() {}")
.build();
p.cargo("build -v")
.with_stderr_contains(
"\
[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
--emit=dep-info,link[..]",
).with_stderr_contains(
"\
[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
--emit=dep-info,link[..]",
).run();
p.root().join("target").rm_rf();
p.cargo("build -v --test=t1")
.with_stderr_contains(
"\
[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
--emit=dep-info,link[..]",
).with_stderr_contains(
"[RUNNING] `rustc --crate-name t1 tests/t1.rs --color never --emit=dep-info,link[..]",
).with_stderr_contains(
"\
[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
--emit=dep-info,link[..]",
).run();
p.root().join("target").rm_rf();
p.cargo("build -v --bench=b1")
.with_stderr_contains(
"\
[RUNNING] `rustc --crate-name foo src/lib.rs --color never --crate-type lib \
--emit=dep-info,link[..]",
).with_stderr_contains(
"\
[RUNNING] `rustc --crate-name b1 benches/b1.rs --color never --emit=dep-info,link \
-C opt-level=3[..]",
).with_stderr_contains(
"\
[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
--emit=dep-info,link[..]",
).run();
}
#[test]
fn targets_selected_default() {
let p = project().file("src/main.rs", "fn main() {}").build();
p.cargo("build -v")
// bin
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
--emit=dep-info,link[..]")
// bench
.with_stderr_does_not_contain("\
[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
-C opt-level=3 --test [..]")
// unit test
.with_stderr_does_not_contain("\
[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
-C debuginfo=2 --test [..]").run();
}
#[test]
fn targets_selected_all() {
let p = project().file("src/main.rs", "fn main() {}").build();
p.cargo("build -v --all-targets")
// bin
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
--emit=dep-info,link[..]")
// bench
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
-C opt-level=3 --test [..]")
// unit test
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
-C debuginfo=2 --test [..]").run();
}
#[test]
fn all_targets_no_lib() {
let p = project().file("src/main.rs", "fn main() {}").build();
p.cargo("build -v --all-targets")
// bin
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src/main.rs --color never --crate-type bin \
--emit=dep-info,link[..]")
// bench
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
-C opt-level=3 --test [..]")
// unit test
.with_stderr_contains("\
[RUNNING] `rustc --crate-name foo src/main.rs --color never --emit=dep-info,link \
-C debuginfo=2 --test [..]").run();
}
#[test]
fn no_linkable_target() {
// Issue 3169. This is currently not an error as per discussion in PR #4797
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "foo"
version = "0.1.0"
authors = []
[dependencies]
the_lib = { path = "the_lib" }
"#,
).file("src/main.rs", "fn main() {}")
.file(
"the_lib/Cargo.toml",
r#"
[package]
name = "the_lib"
version = "0.1.0"
[lib]
name = "the_lib"
crate-type = ["staticlib"]
"#,
).file("the_lib/src/lib.rs", "pub fn foo() {}")
.build();
p.cargo("build")
.with_stderr_contains(
"\
[WARNING] The package `the_lib` provides no linkable [..] \
while compiling `foo`. [..] in `the_lib`'s Cargo.toml. [..]",
).run();
}
#[test]
fn avoid_dev_deps() {
Package::new("foo", "1.0.0").publish();
let p = project()
.file(
"Cargo.toml",
r#"
[package]
name = "bar"
version = "0.1.0"
authors = []
[dev-dependencies]
baz = "1.0.0"
"#,
).file("src/main.rs", "fn main() {}")
.build();
p.cargo("build").with_status(101).run();
p.cargo("build -Zavoid-dev-deps")
.masquerade_as_nightly_cargo()
.run();
}
#[test]
fn invalid_jobs() {
let p = project()
.file("Cargo.toml", &basic_bin_manifest("foo"))
.file("src/foo.rs", &main_file(r#""i am foo""#, &[]))
.build();
p.cargo("build --jobs over9000")
.with_status(1)
.with_stderr("error: Invalid value: could not parse `over9000` as a number")
.run();
}
#[test]
fn target_filters_workspace() {
let ws = project()
.at("ws")
.file(
"Cargo.toml",
r#"
[workspace]
members = ["a", "b"]
"#,
).file("a/Cargo.toml", &basic_lib_manifest("a"))
.file("a/src/lib.rs", "")
.file("a/examples/ex1.rs", "fn main() {}")
.file("b/Cargo.toml", &basic_bin_manifest("b"))
.file("b/src/main.rs", "fn main() {}")
.file("b/examples/ex1.rs", "fn main() {}")
.build();
ws.cargo("build -v --example ex")
.with_status(101)
.with_stderr(
"\
[ERROR] no example target named `ex`
Did you mean `ex1`?",
).run();
ws.cargo("build -v --lib")
.with_status(0)
.with_stderr_contains("[RUNNING] `rustc [..]a/src/lib.rs[..]")
.run();
ws.cargo("build -v --example ex1")
.with_status(0)
.with_stderr_contains("[RUNNING] `rustc [..]a/examples/ex1.rs[..]")
.with_stderr_contains("[RUNNING] `rustc [..]b/examples/ex1.rs[..]")
.run();
}
#[test]
fn target_filters_workspace_not_found() {
let ws = project()
.at("ws")
.file(
"Cargo.toml",
r#"
[workspace]
members = ["a", "b"]
"#,
).file("a/Cargo.toml", &basic_bin_manifest("a"))
.file("a/src/main.rs", "fn main() {}")
.file("b/Cargo.toml", &basic_bin_manifest("b"))
.file("b/src/main.rs", "fn main() {}")
.build();
ws.cargo("build -v --lib")
.with_status(101)
.with_stderr("[ERROR] no library targets found in packages: a, b")
.run();
} | [workspace]
members = ["bar", "baz"]
"#, |
generics3.rs | // An imaginary magical school has a new report card generation system written in Rust!
// Currently the system only supports creating report cards where the student's grade
// is represented numerically (e.g. 1.0 -> 5.5).
// However, the school also issues alphabetical grades (A+ -> F-) and needs
// to be able to print both types of report card!
// Make the necessary code changes in the struct ReportCard and the impl block
// to support alphabetical report cards. Change the Grade in the second test to "A+"
// to show that your changes allow alphabetical grades.
// Execute 'rustlings hint generics3' for hints!
pub struct ReportCard<T: std::fmt::Display> {
pub grade: T,
pub student_name: String,
pub student_age: u8,
}
impl<T: std::fmt::Display> ReportCard<T> {
pub fn print(&self) -> String {
format!(
"{} ({}) - achieved a grade of {}",
&self.student_name, &self.student_age, &self.grade
)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn generate_numeric_report_card() {
let report_card = ReportCard {
grade: 2.1,
student_name: "Tom Wriggle".to_string(),
student_age: 12,
};
assert_eq!(
report_card.print(),
"Tom Wriggle (12) - achieved a grade of 2.1"
);
}
#[test]
fn | () {
let report_card = ReportCard {
grade: "A+",
student_name: "Gary Plotter".to_string(),
student_age: 11,
};
assert_eq!(
report_card.print(),
"Gary Plotter (11) - achieved a grade of A+"
);
}
}
| generate_alphabetic_report_card |
router.go | package router
import (
"context"
"errors"
"net/http"
"github.com/containous/alice"
"github.com/containous/traefik/v2/pkg/config/runtime"
"github.com/containous/traefik/v2/pkg/log"
"github.com/containous/traefik/v2/pkg/middlewares/accesslog"
"github.com/containous/traefik/v2/pkg/middlewares/recovery"
"github.com/containous/traefik/v2/pkg/middlewares/tracing"
"github.com/containous/traefik/v2/pkg/responsemodifiers"
"github.com/containous/traefik/v2/pkg/rules"
"github.com/containous/traefik/v2/pkg/server/internal"
"github.com/containous/traefik/v2/pkg/server/middleware"
"github.com/containous/traefik/v2/pkg/server/service"
)
const (
recoveryMiddlewareName = "traefik-internal-recovery"
)
// NewManager Creates a new Manager
func NewManager(conf *runtime.Configuration,
serviceManager *service.Manager,
middlewaresBuilder *middleware.Builder,
modifierBuilder *responsemodifiers.Builder,
) *Manager |
// Manager A route/router manager
type Manager struct {
routerHandlers map[string]http.Handler
serviceManager *service.Manager
middlewaresBuilder *middleware.Builder
modifierBuilder *responsemodifiers.Builder
conf *runtime.Configuration
}
func (m *Manager) getHTTPRouters(ctx context.Context, entryPoints []string, tls bool) map[string]map[string]*runtime.RouterInfo {
if m.conf != nil {
return m.conf.GetRoutersByEntryPoints(ctx, entryPoints, tls)
}
return make(map[string]map[string]*runtime.RouterInfo)
}
// BuildHandlers Builds handler for all entry points
func (m *Manager) BuildHandlers(rootCtx context.Context, entryPoints []string, tls bool) map[string]http.Handler {
entryPointHandlers := make(map[string]http.Handler)
for entryPointName, routers := range m.getHTTPRouters(rootCtx, entryPoints, tls) {
entryPointName := entryPointName
ctx := log.With(rootCtx, log.Str(log.EntryPointName, entryPointName))
handler, err := m.buildEntryPointHandler(ctx, routers)
if err != nil {
log.FromContext(ctx).Error(err)
continue
}
handlerWithAccessLog, err := alice.New(func(next http.Handler) (http.Handler, error) {
return accesslog.NewFieldHandler(next, log.EntryPointName, entryPointName, accesslog.AddOriginFields), nil
}).Then(handler)
if err != nil {
log.FromContext(ctx).Error(err)
entryPointHandlers[entryPointName] = handler
} else {
entryPointHandlers[entryPointName] = handlerWithAccessLog
}
}
m.serviceManager.LaunchHealthCheck()
return entryPointHandlers
}
func (m *Manager) buildEntryPointHandler(ctx context.Context, configs map[string]*runtime.RouterInfo) (http.Handler, error) {
router, err := rules.NewRouter()
if err != nil {
return nil, err
}
for routerName, routerConfig := range configs {
ctxRouter := log.With(internal.AddProviderInContext(ctx, routerName), log.Str(log.RouterName, routerName))
logger := log.FromContext(ctxRouter)
handler, err := m.buildRouterHandler(ctxRouter, routerName, routerConfig)
if err != nil {
routerConfig.AddError(err, true)
logger.Error(err)
continue
}
err = router.AddRoute(routerConfig.Rule, routerConfig.Priority, handler)
if err != nil {
routerConfig.AddError(err, true)
logger.Error(err)
continue
}
}
router.SortRoutes()
chain := alice.New()
chain = chain.Append(func(next http.Handler) (http.Handler, error) {
return recovery.New(ctx, next, recoveryMiddlewareName)
})
return chain.Then(router)
}
func (m *Manager) buildRouterHandler(ctx context.Context, routerName string, routerConfig *runtime.RouterInfo) (http.Handler, error) {
if handler, ok := m.routerHandlers[routerName]; ok {
return handler, nil
}
handler, err := m.buildHTTPHandler(ctx, routerConfig, routerName)
if err != nil {
return nil, err
}
handlerWithAccessLog, err := alice.New(func(next http.Handler) (http.Handler, error) {
return accesslog.NewFieldHandler(next, accesslog.RouterName, routerName, nil), nil
}).Then(handler)
if err != nil {
log.FromContext(ctx).Error(err)
m.routerHandlers[routerName] = handler
} else {
m.routerHandlers[routerName] = handlerWithAccessLog
}
return m.routerHandlers[routerName], nil
}
func (m *Manager) buildHTTPHandler(ctx context.Context, router *runtime.RouterInfo, routerName string) (http.Handler, error) {
var qualifiedNames []string
for _, name := range router.Middlewares {
qualifiedNames = append(qualifiedNames, internal.GetQualifiedName(ctx, name))
}
router.Middlewares = qualifiedNames
rm := m.modifierBuilder.Build(ctx, qualifiedNames)
if router.Service == "" {
return nil, errors.New("the service is missing on the router")
}
sHandler, err := m.serviceManager.BuildHTTP(ctx, router.Service, rm)
if err != nil {
return nil, err
}
mHandler := m.middlewaresBuilder.BuildChain(ctx, router.Middlewares)
tHandler := func(next http.Handler) (http.Handler, error) {
return tracing.NewForwarder(ctx, routerName, router.Service, next), nil
}
return alice.New().Extend(*mHandler).Append(tHandler).Then(sHandler)
}
| {
return &Manager{
routerHandlers: make(map[string]http.Handler),
serviceManager: serviceManager,
middlewaresBuilder: middlewaresBuilder,
modifierBuilder: modifierBuilder,
conf: conf,
}
} |
context.rs | use crate::data::{
primitive::{PrimitiveObject, PrimitiveType},
Client, Hold, Interval, Literal,
};
use crate::interpreter::{json_to_literal, memory_to_literal};
use nom::lib::std::collections::HashMap;
////////////////////////////////////////////////////////////////////////////////
// DATA STRUCTURE
////////////////////////////////////////////////////////////////////////////////
#[derive(Debug, Clone)]
pub struct ApiInfo {
pub client: Client,
pub fn_endpoint: String,
}
#[derive(Debug, Clone)]
pub struct | {
pub current: HashMap<String, Literal>,
pub metadata: HashMap<String, Literal>,
pub api_info: Option<ApiInfo>,
pub hold: Option<Hold>,
pub step: String,
pub flow: String,
}
////////////////////////////////////////////////////////////////////////////////
// STATIC FUNCTIONS
////////////////////////////////////////////////////////////////////////////////
pub fn get_hashmap_from_mem(lit: &serde_json::Value, flow_name: &str) -> HashMap<String, Literal> {
match memory_to_literal(
lit,
Interval {
start_line: 0,
start_column: 0,
end_line: None,
end_column: None,
offset: 0,
},
flow_name,
) {
Ok(vars) if vars.primitive.get_type() == PrimitiveType::PrimitiveObject => {
match vars.primitive.as_any().downcast_ref::<PrimitiveObject>() {
Some(map) => map.value.clone(),
None => HashMap::new(),
}
}
_ => HashMap::new(),
}
}
pub fn get_hashmap_from_json(lit: &serde_json::Value, flow_name: &str) -> HashMap<String, Literal> {
match json_to_literal(
lit,
Interval {
start_line: 0,
start_column: 0,
end_line: None,
end_column: None,
offset: 0,
},
flow_name
) {
Ok(vars) if vars.primitive.get_type() == PrimitiveType::PrimitiveObject => {
match vars.primitive.as_any().downcast_ref::<PrimitiveObject>() {
Some(map) => map.value.clone(),
None => HashMap::new(),
}
}
_ => HashMap::new(),
}
}
impl Context {
pub fn new(
current: HashMap<String, Literal>,
metadata: HashMap<String, Literal>,
api_info: Option<ApiInfo>,
hold: Option<Hold>,
step: &str,
flow: &str,
) -> Self {
Self {
current,
metadata,
api_info,
hold,
step: step.to_owned(),
flow: flow.to_owned(),
}
}
}
////////////////////////////////////////////////////////////////////////////////
// PUBLIC FUNCTIONS
////////////////////////////////////////////////////////////////////////////////
pub fn get_hashmap(lit: &serde_json::Value, flow_name: &str) -> HashMap<String, Literal> {
match json_to_literal(
lit,
Interval {
start_line: 0,
start_column: 0,
end_line: None,
end_column: None,
offset: 0,
},
flow_name
) {
Ok(vars) if vars.primitive.get_type() == PrimitiveType::PrimitiveObject => {
match vars.primitive.as_any().downcast_ref::<PrimitiveObject>() {
Some(map) => map.value.clone(),
None => HashMap::new(),
}
}
_ => HashMap::new(),
}
}
| Context |
buscar.js | const {ObjectId} = require('mongoose').Types;
const {Usuario, Categoria, Producto, Role} = require('../models')
const coleccionesPermitidas = [
'usuarios',
'categorias',
'productos',
'roles'
];
const buscarUsuarios = async(termino = '', res) => {
const esMongoId = ObjectId.isValid(termino); // Verificar si el termino se trata de un id de MongoDB
if(esMongoId){
const usuario = await Usuario.findById(termino);
return res.json({
results: (usuario)? [usuario] : []
});
}
const regex = new RegExp(termino, 'i'); // Busqueda case insensitive
const usuarios = await Usuario.find({
$or: [{nombre: regex}, {correo: regex}],
$and: [{estado: true}]
});
res.json({
results: usuarios
});
};
const buscarCategorias = async(termino = '', res) => {
const esMongoId = ObjectId.isValid(termino); // Verificar si el termino se trata de un id de MongoDB
if(esMongoId){
const categoria = await Categoria.findById(termino);
return res.json({
results: (categoria)? [categoria] : []
});
}
const regex = new RegExp(termino, 'i'); // Busqueda case insensitive
const categorias = await Categoria.find({
$and: [{nombre: regex}, {estado: true}],
});
res.json({
results: categorias
});
};
const buscarProductos = async(termino = '', res) => {
const esMongoId = ObjectId.isValid(termino); // Verificar si el termino se trata de un id de MongoDB
if(esMongoId){
const producto = await Producto.findById(termino).populate('categoria', 'nombre');
return res.json({
results: (producto)? [producto] : []
});
}
const regex = new RegExp(termino, 'i'); // Busqueda case insensitive
const productos = await Producto.find({nombre: regex, estado: true}).populate('categoria', 'nombre').populate('usuario', 'nombre');
res.json({
results: productos
});
};
const buscarRoles = async(termino = '', res) => {
const esMongoId = ObjectId.isValid(termino); // Verificar si el termino se trata de un id de MongoDB
if(esMongoId){
const rol = await Role.findById(termino);
return res.json({
results: (rol)? [rol] : []
});
}
const regex = new RegExp(termino, 'i'); // Busqueda case insensitive
const roles = await Role.find({rol: regex});
res.json({
results: roles
}); | };
const buscar = (req, res = response) => {
const {coleccion, termino} = req.params;
if(!coleccionesPermitidas.includes(coleccion)){
return res.status(400).json({
msg: `Las colecciones permitidas son: ${coleccionesPermitidas}`
});
}
switch (coleccion) {
case 'usuarios':
buscarUsuarios(termino, res);
break;
case 'categorias':
buscarCategorias(termino, res);
break;
case 'productos':
buscarProductos(termino, res);
break;
case 'roles':
buscarRoles(termino, res);
break;
default:
// res.status(500).json({
// msg: `Se me olvidó incluir ${coleccion} en los parámetros de busqueda`
// });
res.json({
results: []
});
}
};
module.exports = {
buscar,
}; | |
write.go | package shell
import (
"github.com/client9/codegen"
)
// ShouldWriteFile returns true of the contents of the file and the given data represent
// and effectively different script. In other words, if the file and the content
// are the same, then do not overwrite the file | func ShouldWriteFile(filename string, content []byte) bool {
return codegen.ShouldWriteFile(filename, content, Equal)
} |
|
HUAWEI-CLOCK-MIB.py | #
# PySNMP MIB module HUAWEI-CLOCK-MIB (http://snmplabs.com/pysmi) | # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint")
PhysicalIndex, = mibBuilder.importSymbols("ENTITY-MIB", "PhysicalIndex")
hwDatacomm, = mibBuilder.importSymbols("HUAWEI-MIB", "hwDatacomm")
InterfaceIndex, = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex")
EnabledStatus, = mibBuilder.importSymbols("P-BRIDGE-MIB", "EnabledStatus")
NotificationGroup, ModuleCompliance, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance", "ObjectGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, MibIdentifier, Unsigned32, Gauge32, IpAddress, ObjectIdentity, NotificationType, ModuleIdentity, Counter32, Counter64, Integer32, Bits, iso = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "MibIdentifier", "Unsigned32", "Gauge32", "IpAddress", "ObjectIdentity", "NotificationType", "ModuleIdentity", "Counter32", "Counter64", "Integer32", "Bits", "iso")
DisplayString, TextualConvention, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "RowStatus")
hwClockMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186))
hwClockMIB.setRevisions(('2014-11-29 00:00', '2014-11-03 00:00', '2014-08-13 00:00', '2014-04-21 00:00', '2014-01-07 00:00', '2013-11-12 00:00', '2013-10-31 00:00', '2013-05-23 00:00', '2013-05-14 00:00', '2013-03-20 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: hwClockMIB.setRevisionsDescriptions(('Modify alarm hwClockSourceInputBelowThreshold, hwClockSourceInputBelowThresholdResume.', 'Add alarm hwClockCesDcrMasterPwChange, hwClockCesDcrLockFail,hwClockCesDcrLockFailResume,hwClockSsmPktLos,hwClockSsmPktLosResume and add mib hwClockCesDcrSlot,hwClockCesDcrCard,hwClockCesDcrDomain,hwClockCesDcrOldMasterPwName,hwClockCesDcrNewMasterPwName,hwClockCesDcrLockState,hwClockCesMode', 'Add alarm hwClockSourceInputBelowThreshold, hwClockSourceInputBelowThresholdResume.', 'Add alarm hwClockClusterTopoFail, hwClockClusterTopoFailResume and table hwClockClusterTopoTable.', 'Edit the range of hwClockCesAcrDomianInfoDomain.', 'Add mib hwClockBitsCfgFrameFormat, hwClockAttributeLtiSquelch and hwClockAttributeInputThreshold.', 'Edit the range of hwClockCesAcrRecoveryDomain.', 'Re-edit the range of some nodes.', 'Re-edit the default values of hwClockAttributeTodProtocol node.', 'Some errors have been modified in current version and some nodes have been added into the current version.',))
if mibBuilder.loadTexts: hwClockMIB.setLastUpdated('201411290000Z')
if mibBuilder.loadTexts: hwClockMIB.setOrganization('Huawei Technologies Co.,Ltd. ')
if mibBuilder.loadTexts: hwClockMIB.setContactInfo("Huawei Industrial Base Bantian, Longgang Shenzhen 518129 People's Republic of China Website: http://www.huawei.com Email: [email protected] ")
if mibBuilder.loadTexts: hwClockMIB.setDescription('The MIB contains objects of module clock management and 1588 interface.')
hwClockManageObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1))
hwClockGlobalObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1))
hwClockSourceEthClkEnable = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 1), EnabledStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockSourceEthClkEnable.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceEthClkEnable.setDescription('The flag indicates that the ethernet clock is globally enabled.')
hwClockSourceSsmUnknown = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 4, 8, 11, 15))).clone(namedValues=NamedValues(("prc", 2), ("ssua", 4), ("ssub", 8), ("sec", 11), ("dnu", 15))).clone('dnu')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockSourceSsmUnknown.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceSsmUnknown.setDescription('The quality level of unknown SSM.')
hwClockSourceSysClkWorkMode = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("trace", 1), ("hold", 2), ("freeoscillate", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockSourceSysClkWorkMode.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceSysClkWorkMode.setDescription('The work mode of system clock.')
hwClockSourceForceCloseEnableStatus = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 4), EnabledStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockSourceForceCloseEnableStatus.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceForceCloseEnableStatus.setDescription('The enable status of export forced close.')
hwClockSourceSsmControl = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("extend", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockSourceSsmControl.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceSsmControl.setDescription('The flag whether SSM is concerned with the clock source selection.')
hwClockSourceHoldMode = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("hold24Hours", 1), ("holdForever", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockSourceHoldMode.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceHoldMode.setDescription('The hold mode of clock source.')
hwClockSourceFreqCheckEnable = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 7), EnabledStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockSourceFreqCheckEnable.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceFreqCheckEnable.setDescription('The enable flag of frequency check.')
hwClockSourceFreqCheckLeftRange = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(50, 1000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockSourceFreqCheckLeftRange.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceFreqCheckLeftRange.setDescription('The left range of frequency check, unit in 0.01ppm.')
hwClockSourceFreqCheckRightRange = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(50, 1000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockSourceFreqCheckRightRange.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceFreqCheckRightRange.setDescription('The right range of frequency check, unit in 0.01ppm.')
hwClockSourceRetrieveMode = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("retrieve", 1), ("noRetrieve", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockSourceRetrieveMode.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceRetrieveMode.setDescription('The retrieve mode of clock source.')
hwClockTimeUsedSource = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("srcDclsTimeBit0", 1), ("srcDclsTimeBit1", 2), ("src1ppsTodBit0", 3), ("src1ppsTodBit1", 4), ("srcPtp", 5), ("srcFreeRun", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockTimeUsedSource.setStatus('current')
if mibBuilder.loadTexts: hwClockTimeUsedSource.setDescription('The clock time used source.')
hwClockExtTimeInputType = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("typeDclsTime", 1), ("type1ppsTodRs232", 2), ("type1ppsTodGps", 3), ("typeNone", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockExtTimeInputType.setStatus('current')
if mibBuilder.loadTexts: hwClockExtTimeInputType.setDescription('The input time type of clock extern time.')
hwClockExtTimeOutputType = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("typeDclsTime", 1), ("type1ppsTodRs232", 2), ("type1ppsTodGps", 3), ("typeNone", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockExtTimeOutputType.setStatus('current')
if mibBuilder.loadTexts: hwClockExtTimeOutputType.setDescription('The output time type of clock extern time.')
hwClockAlarmThresholdFrequencyOffset = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(10, 92)).clone(92)).setUnits('100ppb').setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAlarmThresholdFrequencyOffset.setStatus('current')
if mibBuilder.loadTexts: hwClockAlarmThresholdFrequencyOffset.setDescription('The Threshold of clock alarm.')
hwClockFrequencyOffsetMax = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 15), Integer32()).setUnits('ppb').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockFrequencyOffsetMax.setStatus('current')
if mibBuilder.loadTexts: hwClockFrequencyOffsetMax.setDescription('The max offset of clock frequency.')
hwClockFrequencyOffsetMin = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 16), Integer32()).setUnits('ppb').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockFrequencyOffsetMin.setStatus('current')
if mibBuilder.loadTexts: hwClockFrequencyOffsetMin.setDescription('The min offset of clock frequency.')
hwClockFrequencyOffsetMean = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 17), Integer32()).setUnits('ppb').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockFrequencyOffsetMean.setStatus('current')
if mibBuilder.loadTexts: hwClockFrequencyOffsetMean.setDescription('The mean offset of clock frequency.')
hwClockFrequencyOffset = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 1, 18), Integer32()).setUnits('ppb').setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockFrequencyOffset.setStatus('current')
if mibBuilder.loadTexts: hwClockFrequencyOffset.setDescription('The current offset of clock frequency.')
hwClockSourceSelTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 2), )
if mibBuilder.loadTexts: hwClockSourceSelTable.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceSelTable.setDescription('The system clock source selection table.')
hwClockSourceSelEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 2, 1), ).setIndexNames((0, "HUAWEI-CLOCK-MIB", "hwClockSourceSelChassisIndex"), (0, "HUAWEI-CLOCK-MIB", "hwClockSourceSelType"))
if mibBuilder.loadTexts: hwClockSourceSelEntry.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceSelEntry.setDescription('The entry of system clock source selection table.')
hwClockSourceSelChassisIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 2, 1, 1), PhysicalIndex())
if mibBuilder.loadTexts: hwClockSourceSelChassisIndex.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceSelChassisIndex.setDescription('The chassis index.')
hwClockSourceSelType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 100)))
if mibBuilder.loadTexts: hwClockSourceSelType.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceSelType.setDescription('The select type.')
hwClockSourceSelMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("auto", 1), ("manual", 2), ("force", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockSourceSelMode.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceSelMode.setDescription('The mode of clock source selection.')
hwClockSourceSelSourceId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 2, 1, 4), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockSourceSelSourceId.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceSelSourceId.setDescription('The source ID of the clock traced.')
hwClockSourceCfgTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3), )
if mibBuilder.loadTexts: hwClockSourceCfgTable.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceCfgTable.setDescription('The clock source config table.')
hwClockSourceCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1), ).setIndexNames((0, "HUAWEI-CLOCK-MIB", "hwClockCfgChassisIndex"), (0, "HUAWEI-CLOCK-MIB", "hwClockCfgSourceIndex"))
if mibBuilder.loadTexts: hwClockSourceCfgEntry.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceCfgEntry.setDescription('The entry of clock source config table.')
hwClockCfgChassisIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 1), PhysicalIndex())
if mibBuilder.loadTexts: hwClockCfgChassisIndex.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgChassisIndex.setDescription('The index of the chassis whitch the clock source belongs to.')
hwClockCfgSourceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 20)))
if mibBuilder.loadTexts: hwClockCfgSourceIndex.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgSourceIndex.setDescription('The clock source index.')
hwClockCfgSourceId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockCfgSourceId.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgSourceId.setDescription('The clock source ID.')
hwClockCfgSourceDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 4), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockCfgSourceDescr.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgSourceDescr.setDescription('The clock source description.')
hwClockCfgWtrTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 12))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgWtrTime.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgWtrTime.setDescription('The waiting for restore time of clock source.')
hwClockCfgBadDetect = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 6), EnabledStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgBadDetect.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgBadDetect.setDescription('The enable status of clock source bad detecting.')
hwClockCfgSystemPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgSystemPriority.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgSystemPriority.setDescription('The priority of system clock source.')
hwClockCfgBits0Priority = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgBits0Priority.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgBits0Priority.setDescription('The priority of BITS0 clock source.')
hwClockCfgBits1Priority = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgBits1Priority.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgBits1Priority.setDescription('The priority of BITS1 clock source.')
hwClockCfgSystemLockOut = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 10), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgSystemLockOut.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgSystemLockOut.setDescription('The lock out of system clock source.')
hwClockCfgBits0LockOut = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 11), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgBits0LockOut.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgBits0LockOut.setDescription('The lock out of BITS0 clock source.')
hwClockCfgBits1LockOut = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 12), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgBits1LockOut.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgBits1LockOut.setDescription('The lock out of BITS1 clock source.')
hwClockCfgSourceSsm = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("ssmPrc", 1), ("ssmSsut", 2), ("ssmSsul", 3), ("ssmSec", 4), ("ssmDnu", 5), ("ssmUnknown", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgSourceSsm.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgSourceSsm.setDescription('The SSM quality of clock source.')
hwClockCfgSourceSsmSetMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("manual", 1), ("auto", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgSourceSsmSetMode.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgSourceSsmSetMode.setDescription('The set mode of SSM.')
hwClockCfgExportEnableStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 15), EnabledStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgExportEnableStatus.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgExportEnableStatus.setDescription('The enable status of clock source export.')
hwClockCfgSwiEnableStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 16), EnabledStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgSwiEnableStatus.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgSwiEnableStatus.setDescription('he enable status of clock source switch.')
hwClockCfgSourceState = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("normal", 1), ("abnormal", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgSourceState.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgSourceState.setDescription('The state of clock source.')
hwClockCfgSsmThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("qlDnu", 1), ("qlSec", 2), ("qlSsub", 3), ("qlSsua", 4), ("qlPrc", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgSsmThreshold.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgSsmThreshold.setDescription('The SSM quality level threshold of clock source.')
hwClockCfgSourceS1Id = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockCfgSourceS1Id.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgSourceS1Id.setDescription('The S1 byte of the clock.')
hwClockCfgFreqCheckResult = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 20), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgFreqCheckResult.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgFreqCheckResult.setDescription('The result of frequency check, unit in 0.01ppm.')
hwClockCfgHoldOffTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(3, 18))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgHoldOffTime.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgHoldOffTime.setDescription('The hold off time of clock, unit in 100ms.')
hwClockCfgPriRvtEnableStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 22), EnabledStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgPriRvtEnableStatus.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgPriRvtEnableStatus.setDescription('The enable status of switch according priority.')
hwClockCfgSwitchCondition = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("noSwitch", 1), ("switch", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockCfgSwitchCondition.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgSwitchCondition.setDescription('The condition of clock switch.')
hwClockCfgClkSourceType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 3, 1, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("bits", 1), ("line", 2), ("inner", 3), ("system", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockCfgClkSourceType.setStatus('current')
if mibBuilder.loadTexts: hwClockCfgClkSourceType.setDescription('The type of clock source.')
hwClockBitsCfgTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4), )
if mibBuilder.loadTexts: hwClockBitsCfgTable.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgTable.setDescription('The clock bits congfig table.')
hwClockBitsCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1), ).setIndexNames((0, "HUAWEI-CLOCK-MIB", "hwClockBitsCfgChassisIndex"), (0, "HUAWEI-CLOCK-MIB", "hwClockBitsCfgBitsIndex"))
if mibBuilder.loadTexts: hwClockBitsCfgEntry.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgEntry.setDescription('The entry of clock bits congfig table.')
hwClockBitsCfgChassisIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 1), PhysicalIndex())
if mibBuilder.loadTexts: hwClockBitsCfgChassisIndex.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgChassisIndex.setDescription('The index of the chassis whitch the clock source belongs to.')
hwClockBitsCfgBitsIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10)))
if mibBuilder.loadTexts: hwClockBitsCfgBitsIndex.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgBitsIndex.setDescription('The index of BITS clock.')
hwClockBitsCfgName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 3), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockBitsCfgName.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgName.setDescription('The name of clock.')
hwClockBitsCfgBitsPortType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("portRj45", 1), ("portSMB", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockBitsCfgBitsPortType.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgBitsPortType.setDescription('The BITS port type.')
hwClockBitsCfgBitsType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("type2Mbps", 0), ("type2Mhz", 1), ("typeDclsTime", 2), ("type1ppsTod", 3), ("none", 4), ("type1544Mbps", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockBitsCfgBitsType.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgBitsType.setDescription('The BITS type.')
hwClockBitsCfgDirection = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("in", 1), ("out", 2), ("inAndOut", 3), ("none", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockBitsCfgDirection.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgDirection.setDescription('The direction of BITS.')
hwClockBitsCfgRecvSaBit = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("sa4", 4), ("sa5", 5), ("sa6", 6), ("sa7", 7), ("sa8", 8)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockBitsCfgRecvSaBit.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgRecvSaBit.setDescription('The received SA bit.')
hwClockBitsCfgSendSaBit = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("sa4", 4), ("sa5", 5), ("sa6", 6), ("sa7", 7), ("sa8", 8)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockBitsCfgSendSaBit.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgSendSaBit.setDescription('The sent SA bit.')
hwClockBitsCfgForceOutS1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 4, 8, 11, 15))).clone(namedValues=NamedValues(("unk", 0), ("prc", 2), ("ssua", 4), ("ssub", 8), ("sec", 11), ("dnu", 15)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockBitsCfgForceOutS1.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgForceOutS1.setDescription('The S1 byte of forcing out.')
hwClockBitsCfgSaBit = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("sa4", 4), ("sa5", 5), ("sa6", 6), ("sa7", 7), ("sa8", 8)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockBitsCfgSaBit.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgSaBit.setDescription('The SA bit of SSM information.')
hwClockBitsCfgInputMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("clk2MBits", 0), ("clk2MHz", 1), ("dclsTime", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockBitsCfgInputMode.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgInputMode.setDescription('The input mode of clock source.')
hwClockBitsCfgOutputMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("clk2MBits", 0), ("clk2MHz", 1), ("dclsTime", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockBitsCfgOutputMode.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgOutputMode.setDescription('The output mode of clock source.')
hwClockBitsCfgInvalidCond = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("no", 1), ("ais", 2), ("lof", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockBitsCfgInvalidCond.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgInvalidCond.setDescription('The invalid condition of clock source.')
hwClockBitsCfgSourceId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockBitsCfgSourceId.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgSourceId.setDescription('The clock source ID.')
hwClockBitsCfgTodSignal = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("nmea", 1), ("ubx", 2), ("none", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockBitsCfgTodSignal.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgTodSignal.setDescription('The tod signal of clock source.')
hwClockBitsCfgFrameFormat = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 4, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("none", 0), ("pcm30nocrc", 1), ("pcm30crc", 2), ("pcm31nocrc", 3), ("pcm31crc", 4))).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockBitsCfgFrameFormat.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgFrameFormat.setDescription('Encoding type and frame check format of the extern clock port.')
hwClockPortCfgTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 5), )
if mibBuilder.loadTexts: hwClockPortCfgTable.setStatus('current')
if mibBuilder.loadTexts: hwClockPortCfgTable.setDescription('The clock port config table.')
hwClockPortCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 5, 1), ).setIndexNames((0, "HUAWEI-CLOCK-MIB", "hwClockPortCfgIfIndex"))
if mibBuilder.loadTexts: hwClockPortCfgEntry.setStatus('current')
if mibBuilder.loadTexts: hwClockPortCfgEntry.setDescription('The entry of clock port config table.')
hwClockPortCfgIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 5, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: hwClockPortCfgIfIndex.setStatus('current')
if mibBuilder.loadTexts: hwClockPortCfgIfIndex.setDescription('The interface index.')
hwClockPortCfgLeftFramePri = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 5, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockPortCfgLeftFramePri.setStatus('current')
if mibBuilder.loadTexts: hwClockPortCfgLeftFramePri.setDescription('The clock priority of left frame.')
hwClockPortCfgRightFramePri = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 5, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockPortCfgRightFramePri.setStatus('current')
if mibBuilder.loadTexts: hwClockPortCfgRightFramePri.setDescription('The clock priority of right frame.')
hwClockPortCfgForceOutS1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 5, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockPortCfgForceOutS1.setStatus('current')
if mibBuilder.loadTexts: hwClockPortCfgForceOutS1.setDescription('The S1 byte of forcing out.')
hwClockLineClkCfgTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 6), )
if mibBuilder.loadTexts: hwClockLineClkCfgTable.setStatus('current')
if mibBuilder.loadTexts: hwClockLineClkCfgTable.setDescription('The line clock config table.')
hwClockLineClkCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 6, 1), ).setIndexNames((0, "HUAWEI-CLOCK-MIB", "hwClockLineClkCfgChassisIndex"), (0, "HUAWEI-CLOCK-MIB", "hwClockLineClkCfgSlotIndex"))
if mibBuilder.loadTexts: hwClockLineClkCfgEntry.setStatus('current')
if mibBuilder.loadTexts: hwClockLineClkCfgEntry.setDescription('The entry of line clock config table.')
hwClockLineClkCfgChassisIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 6, 1, 1), PhysicalIndex())
if mibBuilder.loadTexts: hwClockLineClkCfgChassisIndex.setStatus('current')
if mibBuilder.loadTexts: hwClockLineClkCfgChassisIndex.setDescription('The chassis index.')
hwClockLineClkCfgSlotIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 6, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 200)))
if mibBuilder.loadTexts: hwClockLineClkCfgSlotIndex.setStatus('current')
if mibBuilder.loadTexts: hwClockLineClkCfgSlotIndex.setDescription('The slot index of the line clock.')
hwClockLineClkCfgCardId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 6, 1, 3), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockLineClkCfgCardId.setStatus('current')
if mibBuilder.loadTexts: hwClockLineClkCfgCardId.setDescription('The card index witch is seleced to provide line clock.')
hwClockLineClkCfgPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 6, 1, 4), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockLineClkCfgPortId.setStatus('current')
if mibBuilder.loadTexts: hwClockLineClkCfgPortId.setDescription('The port index witch is seleced to provide line clock.')
hwClockLineClkCfgRecvS1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 6, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockLineClkCfgRecvS1.setStatus('current')
if mibBuilder.loadTexts: hwClockLineClkCfgRecvS1.setDescription('The S1 byte value received.')
hwClockLineClkCfgSendS1 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 6, 1, 6), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockLineClkCfgSendS1.setStatus('current')
if mibBuilder.loadTexts: hwClockLineClkCfgSendS1.setDescription('The S1 byte value sent.')
hwClockLineCfgSoureId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 6, 1, 7), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockLineCfgSoureId.setStatus('current')
if mibBuilder.loadTexts: hwClockLineCfgSoureId.setDescription('Description.')
hwClockTrapOid = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7))
hwClockLastSourceName = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 1), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockLastSourceName.setStatus('current')
if mibBuilder.loadTexts: hwClockLastSourceName.setDescription('The last clock source name.')
hwClockCurSourceName = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 2), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockCurSourceName.setStatus('current')
if mibBuilder.loadTexts: hwClockCurSourceName.setDescription('The current clock source name.')
hwClockSourceOldLockMode = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 16, 19))).clone(namedValues=NamedValues(("freeRun", 0), ("fastLock", 1), ("lock", 2), ("hold", 3), ("freeRunJudge", 16), ("holdJudge", 19)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockSourceOldLockMode.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceOldLockMode.setDescription('The old lock mode of clock source.')
hwClockChassisId = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 4), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockChassisId.setStatus('current')
if mibBuilder.loadTexts: hwClockChassisId.setDescription('The chassis ID.')
hwClockOldSourceState = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("initial", 0), ("normal", 1), ("abnormal", 2), ("wtr", 3), ("holdoff", 4)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockOldSourceState.setStatus('current')
if mibBuilder.loadTexts: hwClockOldSourceState.setDescription('The old state of clock source.')
hwClockPllId = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("system", 1), ("sync2M1", 2), ("sync2M2", 3)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockPllId.setStatus('current')
if mibBuilder.loadTexts: hwClockPllId.setDescription('The id of pll.')
hwClockAttributeOutValue = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 4, 8, 11, 15))).clone(namedValues=NamedValues(("unk", 0), ("prc", 2), ("ssua", 4), ("ssub", 8), ("sec", 11), ("dnu", 15)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockAttributeOutValue.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeOutValue.setDescription('The current output value.')
hwClockCesAcrSlot = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 8), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockCesAcrSlot.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrSlot.setDescription('The slot ID of CES ACR clock source.')
hwClockCesAcrCard = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 9), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockCesAcrCard.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrCard.setDescription('The card ID of CES ACR clock source.')
hwClockCesAcrDomain = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 10), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockCesAcrDomain.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrDomain.setDescription('The recovery domain value of CES ACR clock source.')
hwClockCesAcrOldMasterPwName = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 11), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockCesAcrOldMasterPwName.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrOldMasterPwName.setDescription('The master pw SerialPort name of CES ACR old clock source.')
hwClockCesAcrNewMasterPwName = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 12), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockCesAcrNewMasterPwName.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrNewMasterPwName.setDescription('The master pw SerialPort name of CES ACR new clock source.')
hwClockCesAcrLockState = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 13), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockCesAcrLockState.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrLockState.setDescription('The lock state of the CES ACR.')
hwClockCesDcrSlot = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 14), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockCesDcrSlot.setStatus('current')
if mibBuilder.loadTexts: hwClockCesDcrSlot.setDescription('The slot ID of CES DCR clock source.')
hwClockCesDcrCard = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 15), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockCesDcrCard.setStatus('current')
if mibBuilder.loadTexts: hwClockCesDcrCard.setDescription('The card ID of CES DCR clock source.')
hwClockCesDcrDomain = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 16), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockCesDcrDomain.setStatus('current')
if mibBuilder.loadTexts: hwClockCesDcrDomain.setDescription('The recovery domain value of CES DCR clock source.')
hwClockCesDcrOldMasterPwName = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 17), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockCesDcrOldMasterPwName.setStatus('current')
if mibBuilder.loadTexts: hwClockCesDcrOldMasterPwName.setDescription('The master pw SerialPort name of CES DCR old clock source.')
hwClockCesDcrNewMasterPwName = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 18), OctetString()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockCesDcrNewMasterPwName.setStatus('current')
if mibBuilder.loadTexts: hwClockCesDcrNewMasterPwName.setDescription('The master pw SerialPort name of CES DCR new clock source.')
hwClockCesDcrLockState = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 7, 19), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockCesDcrLockState.setStatus('current')
if mibBuilder.loadTexts: hwClockCesDcrLockState.setDescription('The lock state of the CES DCR.')
hwClockNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8))
hwClockSourceSwitch = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 1)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockChassisId"), ("HUAWEI-CLOCK-MIB", "hwClockPllId"), ("HUAWEI-CLOCK-MIB", "hwClockLastSourceName"), ("HUAWEI-CLOCK-MIB", "hwClockCurSourceName"), ("HUAWEI-CLOCK-MIB", "hwClockSrcSelMode"))
if mibBuilder.loadTexts: hwClockSourceSwitch.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceSwitch.setDescription('Clock source switch notification.')
hwClockSourceSysClkLockModeChange = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 2)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockChassisId"), ("HUAWEI-CLOCK-MIB", "hwClockSourceOldLockMode"), ("HUAWEI-CLOCK-MIB", "hwClockAttributeSysClkLockMode"))
if mibBuilder.loadTexts: hwClockSourceSysClkLockModeChange.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceSysClkLockModeChange.setDescription('The lock mode of system clock source change notification.')
hwClockSourceStateChange = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 3)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockChassisId"), ("HUAWEI-CLOCK-MIB", "hwClockCurSourceName"), ("HUAWEI-CLOCK-MIB", "hwClockOldSourceState"), ("HUAWEI-CLOCK-MIB", "hwClockSrcCfgSourceState"))
if mibBuilder.loadTexts: hwClockSourceStateChange.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceStateChange.setDescription('The state of clock source change notification.')
hwClockSourceStateResume = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 4)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockChassisId"), ("HUAWEI-CLOCK-MIB", "hwClockCurSourceName"), ("HUAWEI-CLOCK-MIB", "hwClockOldSourceState"), ("HUAWEI-CLOCK-MIB", "hwClockSrcCfgSourceState"))
if mibBuilder.loadTexts: hwClockSourceStateResume.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceStateResume.setDescription('The state of clock source resume notification.')
hwClockSourceFreqCheck = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 5)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockChassisId"), ("HUAWEI-CLOCK-MIB", "hwClockSrcCfgSourceDescr"), ("HUAWEI-CLOCK-MIB", "hwClockSrcCfgFreqCheckResult"))
if mibBuilder.loadTexts: hwClockSourceFreqCheck.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceFreqCheck.setDescription('The result of clock source frequnce check abnormal notification.')
hwClockSourceOutputBelowThreshold = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 6)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockChassisId"), ("HUAWEI-CLOCK-MIB", "hwClockPllId"), ("HUAWEI-CLOCK-MIB", "hwClockCurSourceName"), ("HUAWEI-CLOCK-MIB", "hwClockAttributeOutThreshold"), ("HUAWEI-CLOCK-MIB", "hwClockAttributeOutValue"))
if mibBuilder.loadTexts: hwClockSourceOutputBelowThreshold.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceOutputBelowThreshold.setDescription('The SSM of output below threshold notification.')
hwClockNotInLockedMode = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 7)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockChassisId"), ("HUAWEI-CLOCK-MIB", "hwClockAttributeSysClkLockMode"))
if mibBuilder.loadTexts: hwClockNotInLockedMode.setStatus('current')
if mibBuilder.loadTexts: hwClockNotInLockedMode.setDescription('The work mode of system clock is not in locked mode.')
hwClockInLockedMode = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 8)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockChassisId"), ("HUAWEI-CLOCK-MIB", "hwClockAttributeSysClkLockMode"))
if mibBuilder.loadTexts: hwClockInLockedMode.setStatus('current')
if mibBuilder.loadTexts: hwClockInLockedMode.setDescription('The work mode of system clock is in locked mode.')
hwClockSourceFailed = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 11)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockChassisId"), ("HUAWEI-CLOCK-MIB", "hwClockCurSourceName"), ("HUAWEI-CLOCK-MIB", "hwClockSrcCfgSourceState"))
if mibBuilder.loadTexts: hwClockSourceFailed.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceFailed.setDescription('The state of clock source is failed.')
hwClockSourceValid = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 12)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockChassisId"), ("HUAWEI-CLOCK-MIB", "hwClockCurSourceName"), ("HUAWEI-CLOCK-MIB", "hwClockSrcCfgSourceState"))
if mibBuilder.loadTexts: hwClockSourceValid.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceValid.setDescription('The state of clock source is valid.')
hwClockSourceFreqCheckResume = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 13)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockChassisId"), ("HUAWEI-CLOCK-MIB", "hwClockSrcCfgSourceDescr"), ("HUAWEI-CLOCK-MIB", "hwClockSrcCfgFreqCheckResult"))
if mibBuilder.loadTexts: hwClockSourceFreqCheckResume.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceFreqCheckResume.setDescription('The result of clock source frequnce check normal notification.')
hwClockSourceOutputBelowThresholdResume = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 14)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockChassisId"), ("HUAWEI-CLOCK-MIB", "hwClockPllId"), ("HUAWEI-CLOCK-MIB", "hwClockAttributeOutThreshold"), ("HUAWEI-CLOCK-MIB", "hwClockAttributeOutValue"), ("HUAWEI-CLOCK-MIB", "hwClockCurSourceName"))
if mibBuilder.loadTexts: hwClockSourceOutputBelowThresholdResume.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceOutputBelowThresholdResume.setDescription('The SSM of output above threshold notification.')
hwClockCesAcrMasterPwChange = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 15)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockCesAcrSlot"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrCard"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrDomain"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrOldMasterPwName"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrNewMasterPwName"))
if mibBuilder.loadTexts: hwClockCesAcrMasterPwChange.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrMasterPwChange.setDescription('CES ACR master PW status change.')
hwClockCesAcrLockFail = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 16)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockCesAcrSlot"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrCard"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrDomain"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrLockState"))
if mibBuilder.loadTexts: hwClockCesAcrLockFail.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrLockFail.setDescription('CES ACR clock source lock fail.')
hwClockCesAcrLockFailResume = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 17)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockCesAcrSlot"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrCard"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrDomain"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrLockState"))
if mibBuilder.loadTexts: hwClockCesAcrLockFailResume.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrLockFailResume.setDescription('CES ACR clock source lock fail resume.')
hwClockClusterTopoFail = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 22)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockClusterSyncType"), ("HUAWEI-CLOCK-MIB", "hwClockClusterTopoType"), ("HUAWEI-CLOCK-MIB", "hwClockClusterTopoLinkType"), ("HUAWEI-CLOCK-MIB", "hwClockClusterTopoStatus"))
if mibBuilder.loadTexts: hwClockClusterTopoFail.setStatus('current')
if mibBuilder.loadTexts: hwClockClusterTopoFail.setDescription('Clock cluster inter-chassis synchronization topo compute failed.')
hwClockClusterTopoFailResume = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 23)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockClusterSyncType"), ("HUAWEI-CLOCK-MIB", "hwClockClusterTopoType"), ("HUAWEI-CLOCK-MIB", "hwClockClusterTopoLinkType"), ("HUAWEI-CLOCK-MIB", "hwClockClusterTopoStatus"))
if mibBuilder.loadTexts: hwClockClusterTopoFailResume.setStatus('current')
if mibBuilder.loadTexts: hwClockClusterTopoFailResume.setDescription('Clock inter-chassis synchronization topo compute successfully.')
hwClockSourceInputBelowThreshold = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 24)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockChassisId"), ("HUAWEI-CLOCK-MIB", "hwClockPllId"), ("HUAWEI-CLOCK-MIB", "hwClockAttributeInputThreshold"), ("HUAWEI-CLOCK-MIB", "hwClockSrcCfgSourceSsm"))
if mibBuilder.loadTexts: hwClockSourceInputBelowThreshold.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceInputBelowThreshold.setDescription('The SSM of input below threshold notification.')
hwClockSourceInputBelowThresholdResume = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 25)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockChassisId"), ("HUAWEI-CLOCK-MIB", "hwClockPllId"), ("HUAWEI-CLOCK-MIB", "hwClockAttributeInputThreshold"), ("HUAWEI-CLOCK-MIB", "hwClockSrcCfgSourceSsm"))
if mibBuilder.loadTexts: hwClockSourceInputBelowThresholdResume.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceInputBelowThresholdResume.setDescription('The SSM of input above or equal threshold notification.')
hwClockSsmPktLos = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 26)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockCurSourceName"))
if mibBuilder.loadTexts: hwClockSsmPktLos.setStatus('current')
if mibBuilder.loadTexts: hwClockSsmPktLos.setDescription('The ssm packet of clock source is lost.')
hwClockSsmPktLosResume = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 27)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockCurSourceName"))
if mibBuilder.loadTexts: hwClockSsmPktLosResume.setStatus('current')
if mibBuilder.loadTexts: hwClockSsmPktLosResume.setDescription('The ssm packet of clock source is normal.')
hwClockCesDcrMasterPwChange = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 28)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockCesDcrSlot"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrCard"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrDomain"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrOldMasterPwName"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrNewMasterPwName"))
if mibBuilder.loadTexts: hwClockCesDcrMasterPwChange.setStatus('current')
if mibBuilder.loadTexts: hwClockCesDcrMasterPwChange.setDescription('CES DCR master PW status change.')
hwClockCesDcrLockFail = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 29)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockCesDcrSlot"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrCard"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrDomain"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrLockState"))
if mibBuilder.loadTexts: hwClockCesDcrLockFail.setStatus('current')
if mibBuilder.loadTexts: hwClockCesDcrLockFail.setDescription('CES DCR clock source lock fail.')
hwClockCesDcrLockFailResume = NotificationType((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 8, 30)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockCesDcrSlot"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrCard"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrDomain"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrLockState"))
if mibBuilder.loadTexts: hwClockCesDcrLockFailResume.setStatus('current')
if mibBuilder.loadTexts: hwClockCesDcrLockFailResume.setDescription('CES DCR clock source lock fail resume.')
hwClockAttributeTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9), )
if mibBuilder.loadTexts: hwClockAttributeTable.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeTable.setDescription('The clock Attribute table.')
hwClockAttributeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1), ).setIndexNames((0, "HUAWEI-CLOCK-MIB", "hwClockAttributeChassisIndex"))
if mibBuilder.loadTexts: hwClockAttributeEntry.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeEntry.setDescription('The entry of clock Attribute table.')
hwClockAttributeChassisIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 1), PhysicalIndex())
if mibBuilder.loadTexts: hwClockAttributeChassisIndex.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeChassisIndex.setDescription('The chassis index.')
hwClockAttributeSysClkRunMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("normal", 0), ("freeRun", 1), ("hold", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAttributeSysClkRunMode.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeSysClkRunMode.setDescription('The run mode of system clock.')
hwClockAttributeSsmControl = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("on", 0), ("off", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAttributeSsmControl.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeSsmControl.setDescription('The flag whether SSM is concerned with the clock source selection.')
hwClockAttributeFreqCheckEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 4), EnabledStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAttributeFreqCheckEnable.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeFreqCheckEnable.setDescription('The enable flag of frequency check.')
hwClockAttributeRetrieveMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("retrieve", 0), ("noRetrieve", 1))).clone('retrieve')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAttributeRetrieveMode.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeRetrieveMode.setDescription('The retrieve mode of system clock.')
hwClockAttributeWtrTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 12)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAttributeWtrTime.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeWtrTime.setDescription('The time waiting for retrieve.')
hwClockAttributeHoldOffTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(300, 1800)).clone(1000)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAttributeHoldOffTime.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeHoldOffTime.setDescription('The holdoff-time when the system source is lost.')
hwClockAttributeOutThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 4, 8, 11, 15))).clone(namedValues=NamedValues(("prc", 2), ("ssua", 4), ("ssub", 8), ("sec", 11), ("dnu", 15)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAttributeOutThreshold.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeOutThreshold.setDescription('The Threshold of out put.')
hwClockAttributeSysMaxOutSsm = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 4, 8, 11))).clone(namedValues=NamedValues(("unk", 0), ("prc", 2), ("ssua", 4), ("ssub", 8), ("sec", 11)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAttributeSysMaxOutSsm.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeSysMaxOutSsm.setDescription('The max ssm of system out put.')
hwClockAttribute2M1MaxOutSsm = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 4, 8, 11))).clone(namedValues=NamedValues(("unk", 0), ("prc", 2), ("ssua", 4), ("ssub", 8), ("sec", 11)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAttribute2M1MaxOutSsm.setStatus('current')
if mibBuilder.loadTexts: hwClockAttribute2M1MaxOutSsm.setDescription('The max ssm of 2msync-1 out put.')
hwClockAttribute2M2MaxOutSsm = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 4, 8, 11))).clone(namedValues=NamedValues(("unk", 0), ("prc", 2), ("ssua", 4), ("ssub", 8), ("sec", 11)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAttribute2M2MaxOutSsm.setStatus('current')
if mibBuilder.loadTexts: hwClockAttribute2M2MaxOutSsm.setDescription('The max ssm of 2msync-2 out put.')
hwClockAttributeSysClkLockMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 16, 19))).clone(namedValues=NamedValues(("freeRun", 0), ("fastLock", 1), ("lock", 2), ("hold", 3), ("freeRunJudge", 16), ("holdJudge", 19)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockAttributeSysClkLockMode.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeSysClkLockMode.setDescription('The Lock mode of system clock.')
hwClockAttributeExtendSsmControl = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("on", 0), ("off", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAttributeExtendSsmControl.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeExtendSsmControl.setDescription('The flag whether Extend SSM is concerned with the clock source selection.')
hwClockAttributeInternalClockId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAttributeInternalClockId.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeInternalClockId.setDescription('The internal clockid of the device.')
hwClockAttributeTodProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("nmea", 1), ("ubx", 2), ("none", 3), ("ccsa", 4))).clone(2)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAttributeTodProtocol.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeTodProtocol.setDescription('1pps bits tod protocol.')
hwClockAttributeLtiSquelch = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 16), EnabledStatus().clone(2)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAttributeLtiSquelch.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeLtiSquelch.setDescription('The frequency signal output squelch flag upon the frequency loss.')
hwClockAttributeInputThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 9, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 4, 8, 11, 15))).clone(namedValues=NamedValues(("prc", 2), ("ssua", 4), ("ssub", 8), ("sec", 11), ("dnu", 15))).clone(15)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockAttributeInputThreshold.setStatus('current')
if mibBuilder.loadTexts: hwClockAttributeInputThreshold.setDescription('The squelch threshold of the external input source.')
hwClockSrcSelTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 10), )
if mibBuilder.loadTexts: hwClockSrcSelTable.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcSelTable.setDescription('The system clock source selection table.')
hwClockSrcSelEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 10, 1), ).setIndexNames((0, "HUAWEI-CLOCK-MIB", "hwClockSrcSelChassisIndex"), (0, "HUAWEI-CLOCK-MIB", "hwClockSrcSelType"))
if mibBuilder.loadTexts: hwClockSrcSelEntry.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcSelEntry.setDescription('The entry of system clock source selection table.')
hwClockSrcSelChassisIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 10, 1, 1), PhysicalIndex())
if mibBuilder.loadTexts: hwClockSrcSelChassisIndex.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcSelChassisIndex.setDescription('The chassis index.')
hwClockSrcSelType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 10, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("system", 1), ("sync2M1", 2), ("sync2M2", 3))))
if mibBuilder.loadTexts: hwClockSrcSelType.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcSelType.setDescription('The PLL Id.')
hwClockSrcSelMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 10, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("auto", 0), ("manual", 1), ("force", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockSrcSelMode.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcSelMode.setDescription('The mode of clock source selection.')
hwClockSrcSelSrcName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 10, 1, 4), OctetString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockSrcSelSrcName.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcSelSrcName.setDescription('The name of clock source for selection.')
hwClockSrcTraceSrcName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 10, 1, 5), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockSrcTraceSrcName.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcTraceSrcName.setDescription('The name of trace source.')
hwClockSrcCfgTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11), )
if mibBuilder.loadTexts: hwClockSrcCfgTable.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgTable.setDescription('The clock source config table.')
hwClockSrcCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1), ).setIndexNames((0, "HUAWEI-CLOCK-MIB", "hwClockSrcCfgChassisIndex"), (0, "HUAWEI-CLOCK-MIB", "hwClockSrcCfgSourceTypeIndex"), (0, "HUAWEI-CLOCK-MIB", "hwClockSrcCfgSourceIndex"))
if mibBuilder.loadTexts: hwClockSrcCfgEntry.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgEntry.setDescription('The entry of clock source config table.')
hwClockSrcCfgChassisIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 1), PhysicalIndex())
if mibBuilder.loadTexts: hwClockSrcCfgChassisIndex.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgChassisIndex.setDescription('The chassis index.')
hwClockSrcCfgSourceTypeIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("bits", 1), ("ptp", 2), ("interface", 3)))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: hwClockSrcCfgSourceTypeIndex.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgSourceTypeIndex.setDescription('The type of clock source.')
hwClockSrcCfgSourceIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 3), Integer32())
if mibBuilder.loadTexts: hwClockSrcCfgSourceIndex.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgSourceIndex.setDescription('The index of clock source.')
hwClockSrcCfgSourceDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 4), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockSrcCfgSourceDescr.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgSourceDescr.setDescription('The description of clock source.')
hwClockSrcCfgClkEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 5), EnabledStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockSrcCfgClkEnable.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgClkEnable.setDescription('The enable flag of clock source.')
hwClockSrcCfgSystemPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockSrcCfgSystemPriority.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgSystemPriority.setDescription('The priority of system clock source.')
hwClockSrcCfg2M1Priority = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockSrcCfg2M1Priority.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfg2M1Priority.setDescription('The priority of 2msync-1 clock source.')
hwClockSrcCfg2M2Priority = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockSrcCfg2M2Priority.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfg2M2Priority.setDescription('The priority of 2msync-2 clock source.')
hwClockSrcCfgSourceSsm = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 4, 8, 11, 15, 16))).clone(namedValues=NamedValues(("unk", 0), ("prc", 2), ("ssua", 4), ("ssub", 8), ("sec", 11), ("dnu", 15), ("unknown", 16)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockSrcCfgSourceSsm.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgSourceSsm.setDescription('The SSM quality of clock source.')
hwClockSrcCfgSsmSetMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("manual", 1), ("auto", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockSrcCfgSsmSetMode.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgSsmSetMode.setDescription('The set mode of SSM.')
hwClockSrcCfgSourceState = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("initial", 0), ("normal", 1), ("abnormal", 2), ("waitwtr", 3), ("holdoff", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockSrcCfgSourceState.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgSourceState.setDescription('The state of clock source.')
hwClockSrcCfgFreqCheckResult = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("abnormal", 0), ("normal", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockSrcCfgFreqCheckResult.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgFreqCheckResult.setDescription('The result of frequency check.')
hwClockSrcCfgSsmInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(512, 8000))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockSrcCfgSsmInterval.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgSsmInterval.setDescription('Description.')
hwClockSrcCfgSsmTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(2000, 32000))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockSrcCfgSsmTimeout.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgSsmTimeout.setDescription('Description.')
hwClockSrcCfgSabit = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(4, 5, 6, 7, 8, 99))).clone(namedValues=NamedValues(("sa4", 4), ("sa5", 5), ("sa6", 6), ("sa7", 7), ("sa8", 8), ("invalid", 99))).clone(4)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockSrcCfgSabit.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgSabit.setDescription('The SA bit of E1 Port SSM information.')
hwClockSrcCfgClockId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 15))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwClockSrcCfgClockId.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgClockId.setDescription('The clockid of clock source.')
hwClockSrcCfgClockIdSetMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("manual", 1), ("auto", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockSrcCfgClockIdSetMode.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgClockIdSetMode.setDescription('The set mode of clockid.')
hwClockSrcCfgOutSsm = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 4, 8, 11, 15, 16, 99))).clone(namedValues=NamedValues(("unk", 0), ("prc", 2), ("ssua", 4), ("ssub", 8), ("sec", 11), ("dnu", 15), ("unknown", 16), ("invalid", 99)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockSrcCfgOutSsm.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgOutSsm.setDescription('Current output ssm.')
hwClockSrcCfgOutClockId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 99))).clone(namedValues=NamedValues(("clockid0", 0), ("clockid1", 1), ("clockid2", 2), ("clockid3", 3), ("clockid4", 4), ("clockid5", 5), ("clockid6", 6), ("clockid7", 7), ("clockid8", 8), ("clockid9", 9), ("clockid10", 10), ("clockid11", 11), ("clockid12", 12), ("clockid13", 13), ("clockid14", 14), ("clockid15", 15), ("notsupport", 99)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockSrcCfgOutClockId.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgOutClockId.setDescription('Current output clockid.')
hwClockSrcCfgRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 20), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockSrcCfgRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgRowStatus.setDescription('The row status.')
hwClockSrcCfgFreqDeviation = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 21), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockSrcCfgFreqDeviation.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgFreqDeviation.setDescription('Freqdeviation value of clock source.')
hwClockSrcCfgPhyState = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("cardTypeNotSupport", 0), ("slave", 1), ("master", 2), ("speedNotSupport", 3), ("portDown", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockSrcCfgPhyState.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgPhyState.setDescription('The PHY clock state of ports.')
hwClockSrcCfgNegotiationSlave = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 11, 1, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("notSupport", 0), ("enable", 1), ("disable", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockSrcCfgNegotiationSlave.setStatus('current')
if mibBuilder.loadTexts: hwClockSrcCfgNegotiationSlave.setDescription('Set PHY clock state to slave.')
hwClockCesAcrPortCfgTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 12), )
if mibBuilder.loadTexts: hwClockCesAcrPortCfgTable.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrPortCfgTable.setDescription('The CES ACR clock port config table.')
hwClockCesAcrPortCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 12, 1), ).setIndexNames((0, "HUAWEI-CLOCK-MIB", "hwClockCesAcrParentIfIndex"), (0, "HUAWEI-CLOCK-MIB", "hwClockCesAcrChannelId"), (0, "HUAWEI-CLOCK-MIB", "hwClockCesAcrIfIndex"))
if mibBuilder.loadTexts: hwClockCesAcrPortCfgEntry.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrPortCfgEntry.setDescription('The entry of CES ACR clock port config table.')
hwClockCesAcrParentIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 12, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: hwClockCesAcrParentIfIndex.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrParentIfIndex.setDescription('Indicates the index of the parent interface.')
hwClockCesAcrChannelId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 12, 1, 2), Integer32())
if mibBuilder.loadTexts: hwClockCesAcrChannelId.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrChannelId.setDescription('Indicates the channel ID.')
hwClockCesAcrIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 12, 1, 3), InterfaceIndex())
if mibBuilder.loadTexts: hwClockCesAcrIfIndex.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrIfIndex.setDescription('Indicates the interface index.')
hwClockCesAcrPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 12, 1, 4), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockCesAcrPortName.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrPortName.setDescription('Port name.')
hwClockCesAcrChannelType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 12, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("t1", 1), ("e1", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockCesAcrChannelType.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrChannelType.setDescription('Indicates the interface type. The type can be E1/CE1 or T1/CT1.')
hwClockCesAcrSourceMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 12, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("master", 1), ("slave", 2), ("recoveryDomain", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockCesAcrSourceMode.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrSourceMode.setDescription('Indicates the clock mode of the interface. master: indicates that the clock works in master mode and uses the internal clock signal. slave: indicates that the clock works in slave mode and uses the line clock signal. recovery-domain: indicates that the clock works in slave mode and uses the recovery domain clock signal. ')
hwClockCesAcrRecoveryDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 12, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockCesAcrRecoveryDomain.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrRecoveryDomain.setDescription('Indicates the clock recovery domain of the interface. DEFVAL is 0.')
hwClockCesAcrPwDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 12, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 8))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockCesAcrPwDomain.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrPwDomain.setDescription('Indicates the clock PW domain of the interface. DEFVAL is 0.')
hwClockCesAcrPortCfgRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 12, 1, 9), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockCesAcrPortCfgRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrPortCfgRowStatus.setDescription('The row status.')
hwClockCesAcrMasterDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 12, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockCesAcrMasterDomain.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrMasterDomain.setDescription('Indicates the clock master domain of the interface. DEFVAL is 0.')
hwClockCesMode = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 12, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("acr", 1), ("dcr", 2)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockCesMode.setStatus('current')
if mibBuilder.loadTexts: hwClockCesMode.setDescription('Indicates the clock CES recovery mode of the interface. DEFVAL is 0.')
hwClockCesAcrCfgTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 13), )
if mibBuilder.loadTexts: hwClockCesAcrCfgTable.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrCfgTable.setDescription('The CES ACR clock source config table.')
hwClockCesAcrCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 13, 1), ).setIndexNames((0, "HUAWEI-CLOCK-MIB", "hwClockCesAcrCfgSlot"), (0, "HUAWEI-CLOCK-MIB", "hwClockCesAcrCfgCard"), (0, "HUAWEI-CLOCK-MIB", "hwClockCesAcrCfgDomain"))
if mibBuilder.loadTexts: hwClockCesAcrCfgEntry.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrCfgEntry.setDescription('The entry of CES ACR clock source config table.')
hwClockCesAcrCfgSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 13, 1, 1), Integer32())
if mibBuilder.loadTexts: hwClockCesAcrCfgSlot.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrCfgSlot.setDescription('The slot ID of CES ACR clock source.')
hwClockCesAcrCfgCard = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 13, 1, 2), Integer32())
if mibBuilder.loadTexts: hwClockCesAcrCfgCard.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrCfgCard.setDescription('The card ID of CES ACR clock source.')
hwClockCesAcrCfgDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 13, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8)))
if mibBuilder.loadTexts: hwClockCesAcrCfgDomain.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrCfgDomain.setDescription('The recovery domain value of CES ACR clock source.')
hwClockCesAcrCfgDescr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 13, 1, 4), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockCesAcrCfgDescr.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrCfgDescr.setDescription('The description of clock source.')
hwClockCesAcrCfgSyncEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 13, 1, 5), EnabledStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockCesAcrCfgSyncEnable.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrCfgSyncEnable.setDescription('The enable flag of CES ACR clock source.')
hwClockCesAcrCfgSystemPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 13, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockCesAcrCfgSystemPriority.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrCfgSystemPriority.setDescription('The priority of system CES ACR clock source. DEFVAL is 0.')
hwClockCesAcrCfgSsm = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 13, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 2, 4, 8, 11, 15, 16))).clone(namedValues=NamedValues(("unk", 0), ("prc", 2), ("ssua", 4), ("ssub", 8), ("sec", 11), ("dnu", 15), ("unknown", 16)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockCesAcrCfgSsm.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrCfgSsm.setDescription('The SSM quality of CES ACR clock source.')
hwClockCesAcrCfgClockId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 13, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 15))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockCesAcrCfgClockId.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrCfgClockId.setDescription('The clockid of clock source. DEFVAL is 0.')
hwClockCesAcrCfgSourceState = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 13, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("initial", 0), ("normal", 1), ("abnormal", 2), ("waitwtr", 3), ("holdoff", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockCesAcrCfgSourceState.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrCfgSourceState.setDescription('The state of CES ACR clock source.')
hwClockCesAcrCfgFreqCheckResult = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 13, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("abnormal", 0), ("normal", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockCesAcrCfgFreqCheckResult.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrCfgFreqCheckResult.setDescription('The result of CES ACR clock source frequency check.')
hwClockCesAcrCfgRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 13, 1, 11), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwClockCesAcrCfgRowStatus.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrCfgRowStatus.setDescription('The row status.')
hwClockCesAcrDomainInfoTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 14), )
if mibBuilder.loadTexts: hwClockCesAcrDomainInfoTable.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrDomainInfoTable.setDescription('The CES ACR domain infomation table.')
hwClockCesAcrDomainInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 14, 1), ).setIndexNames((0, "HUAWEI-CLOCK-MIB", "hwClockCesAcrDomianInfoSlot"), (0, "HUAWEI-CLOCK-MIB", "hwClockCesAcrDomianInfoCard"), (0, "HUAWEI-CLOCK-MIB", "hwClockCesAcrDomianInfoDomain"))
if mibBuilder.loadTexts: hwClockCesAcrDomainInfoEntry.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrDomainInfoEntry.setDescription('The entry of CES ACR domain infomation table.')
hwClockCesAcrDomianInfoSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 14, 1, 1), Integer32())
if mibBuilder.loadTexts: hwClockCesAcrDomianInfoSlot.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrDomianInfoSlot.setDescription('The slot ID of CES ACR clock source.')
hwClockCesAcrDomianInfoCard = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 14, 1, 2), Integer32())
if mibBuilder.loadTexts: hwClockCesAcrDomianInfoCard.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrDomianInfoCard.setDescription('The card ID of CES ACR clock source.')
hwClockCesAcrDomianInfoDomain = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 14, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16)))
if mibBuilder.loadTexts: hwClockCesAcrDomianInfoDomain.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrDomianInfoDomain.setDescription('The recovery domain value of CES ACR clock source.')
hwClockCesAcrDomianInfoMasterPwName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 14, 1, 4), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockCesAcrDomianInfoMasterPwName.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrDomianInfoMasterPwName.setDescription('Port name.')
hwClockCesAcrDomianInfoChannelId = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 14, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockCesAcrDomianInfoChannelId.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrDomianInfoChannelId.setDescription('Indicates the channel ID.')
hwClockCesAcrDomianInfoState = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 14, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("wait", 2), ("lock", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockCesAcrDomianInfoState.setStatus('current')
if mibBuilder.loadTexts: hwClockCesAcrDomianInfoState.setDescription('The state of CES ACR clock source.')
hwClockClusterTopoTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 15), )
if mibBuilder.loadTexts: hwClockClusterTopoTable.setStatus('current')
if mibBuilder.loadTexts: hwClockClusterTopoTable.setDescription('The CES ACR domain infomation table.')
hwClockClusterTopoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 15, 1), ).setIndexNames((0, "HUAWEI-CLOCK-MIB", "hwClockClusterSyncType"), (0, "HUAWEI-CLOCK-MIB", "hwClockClusterTopoType"), (0, "HUAWEI-CLOCK-MIB", "hwClockClusterTopoLinkType"))
if mibBuilder.loadTexts: hwClockClusterTopoEntry.setStatus('current')
if mibBuilder.loadTexts: hwClockClusterTopoEntry.setDescription('Description.')
hwClockClusterSyncType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 15, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("frequency", 1), ("time", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockClusterSyncType.setStatus('current')
if mibBuilder.loadTexts: hwClockClusterSyncType.setDescription('The type of clock inter-chassis sync.')
hwClockClusterTopoType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 15, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("interlink", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockClusterTopoType.setStatus('current')
if mibBuilder.loadTexts: hwClockClusterTopoType.setDescription('The type of clock inter-chassis topo..')
hwClockClusterTopoLinkType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 15, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1))).clone(namedValues=NamedValues(("bits", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockClusterTopoLinkType.setStatus('current')
if mibBuilder.loadTexts: hwClockClusterTopoLinkType.setDescription('The type of clock inter-chassis link.')
hwClockClusterTopoStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 1, 15, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("fail", 1), ("success", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwClockClusterTopoStatus.setStatus('current')
if mibBuilder.loadTexts: hwClockClusterTopoStatus.setDescription('The status of clock inter-chassis topo.')
hwClockConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 10))
hwClockSourceCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 10, 1))
hwClockSourceCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 10, 1, 1)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockManageSysGroup"), ("HUAWEI-CLOCK-MIB", "hwClockSourceCfgGroup"), ("HUAWEI-CLOCK-MIB", "hwClockPortCfgGroup"), ("HUAWEI-CLOCK-MIB", "hwClockBitsCfgGroup"), ("HUAWEI-CLOCK-MIB", "hwClockNotificationsGroup"), ("HUAWEI-CLOCK-MIB", "hwClockSysSelGroup"), ("HUAWEI-CLOCK-MIB", "hwClockTrapOidGroup"), ("HUAWEI-CLOCK-MIB", "hwClockLineCfgGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwClockSourceCompliance = hwClockSourceCompliance.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceCompliance.setDescription('The compliance of clock MIB.')
hwClockSourceGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 10, 2))
hwClockManageSysGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 10, 2, 8)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockSourceSysClkWorkMode"), ("HUAWEI-CLOCK-MIB", "hwClockSourceFreqCheckEnable"), ("HUAWEI-CLOCK-MIB", "hwClockSourceHoldMode"), ("HUAWEI-CLOCK-MIB", "hwClockSourceSsmControl"), ("HUAWEI-CLOCK-MIB", "hwClockSourceFreqCheckRightRange"), ("HUAWEI-CLOCK-MIB", "hwClockSourceFreqCheckLeftRange"), ("HUAWEI-CLOCK-MIB", "hwClockSourceRetrieveMode"), ("HUAWEI-CLOCK-MIB", "hwClockSourceForceCloseEnableStatus"), ("HUAWEI-CLOCK-MIB", "hwClockSourceSsmUnknown"), ("HUAWEI-CLOCK-MIB", "hwClockExtTimeOutputType"), ("HUAWEI-CLOCK-MIB", "hwClockExtTimeInputType"), ("HUAWEI-CLOCK-MIB", "hwClockTimeUsedSource"), ("HUAWEI-CLOCK-MIB", "hwClockSourceEthClkEnable"), ("HUAWEI-CLOCK-MIB", "hwClockAlarmThresholdFrequencyOffset"), ("HUAWEI-CLOCK-MIB", "hwClockFrequencyOffsetMax"), ("HUAWEI-CLOCK-MIB", "hwClockFrequencyOffsetMin"), ("HUAWEI-CLOCK-MIB", "hwClockFrequencyOffsetMean"), ("HUAWEI-CLOCK-MIB", "hwClockFrequencyOffset"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwClockManageSysGroup = hwClockManageSysGroup.setStatus('current')
if mibBuilder.loadTexts: hwClockManageSysGroup.setDescription('The manage group.')
hwClockSysSelGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 10, 2, 9)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockSourceSelMode"), ("HUAWEI-CLOCK-MIB", "hwClockSourceSelSourceId"), ("HUAWEI-CLOCK-MIB", "hwClockCurSourceName"), ("HUAWEI-CLOCK-MIB", "hwClockLastSourceName"), ("HUAWEI-CLOCK-MIB", "hwClockPllId"), ("HUAWEI-CLOCK-MIB", "hwClockSourceOldLockMode"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrOldMasterPwName"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrNewMasterPwName"), ("HUAWEI-CLOCK-MIB", "hwClockAttributeOutValue"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrSlot"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrLockState"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrDomain"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrCard"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrSlot"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrCard"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrDomain"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrOldMasterPwName"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrNewMasterPwName"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrLockState"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwClockSysSelGroup = hwClockSysSelGroup.setStatus('current')
if mibBuilder.loadTexts: hwClockSysSelGroup.setDescription('The system selection group.')
hwClockSourceCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 10, 2, 10)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockCfgSourceId"), ("HUAWEI-CLOCK-MIB", "hwClockCfgPriRvtEnableStatus"), ("HUAWEI-CLOCK-MIB", "hwClockCfgSwitchCondition"), ("HUAWEI-CLOCK-MIB", "hwClockCfgWtrTime"), ("HUAWEI-CLOCK-MIB", "hwClockCfgBadDetect"), ("HUAWEI-CLOCK-MIB", "hwClockCfgSourceSsm"), ("HUAWEI-CLOCK-MIB", "hwClockCfgExportEnableStatus"), ("HUAWEI-CLOCK-MIB", "hwClockCfgSwiEnableStatus"), ("HUAWEI-CLOCK-MIB", "hwClockCfgSourceState"), ("HUAWEI-CLOCK-MIB", "hwClockCfgSourceDescr"), ("HUAWEI-CLOCK-MIB", "hwClockCfgFreqCheckResult"), ("HUAWEI-CLOCK-MIB", "hwClockCfgHoldOffTime"), ("HUAWEI-CLOCK-MIB", "hwClockCfgBits0Priority"), ("HUAWEI-CLOCK-MIB", "hwClockCfgBits1Priority"), ("HUAWEI-CLOCK-MIB", "hwClockCfgSystemPriority"), ("HUAWEI-CLOCK-MIB", "hwClockCfgSourceSsmSetMode"), ("HUAWEI-CLOCK-MIB", "hwClockCfgSourceS1Id"), ("HUAWEI-CLOCK-MIB", "hwClockCfgClkSourceType"), ("HUAWEI-CLOCK-MIB", "hwClockCfgSsmThreshold"), ("HUAWEI-CLOCK-MIB", "hwClockCfgSystemLockOut"), ("HUAWEI-CLOCK-MIB", "hwClockCfgBits0LockOut"), ("HUAWEI-CLOCK-MIB", "hwClockCfgBits1LockOut"), ("HUAWEI-CLOCK-MIB", "hwClockBitsCfgTodSignal"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwClockSourceCfgGroup = hwClockSourceCfgGroup.setStatus('current')
if mibBuilder.loadTexts: hwClockSourceCfgGroup.setDescription('The clock source group.')
hwClockPortCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 10, 2, 13)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockPortCfgLeftFramePri"), ("HUAWEI-CLOCK-MIB", "hwClockPortCfgRightFramePri"), ("HUAWEI-CLOCK-MIB", "hwClockPortCfgForceOutS1"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwClockPortCfgGroup = hwClockPortCfgGroup.setStatus('current')
if mibBuilder.loadTexts: hwClockPortCfgGroup.setDescription('The port config of clock source group.')
hwClockBitsCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 10, 2, 14)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockBitsCfgRecvSaBit"), ("HUAWEI-CLOCK-MIB", "hwClockBitsCfgSendSaBit"), ("HUAWEI-CLOCK-MIB", "hwClockBitsCfgForceOutS1"), ("HUAWEI-CLOCK-MIB", "hwClockBitsCfgName"), ("HUAWEI-CLOCK-MIB", "hwClockBitsCfgBitsType"), ("HUAWEI-CLOCK-MIB", "hwClockBitsCfgDirection"), ("HUAWEI-CLOCK-MIB", "hwClockBitsCfgSaBit"), ("HUAWEI-CLOCK-MIB", "hwClockBitsCfgInputMode"), ("HUAWEI-CLOCK-MIB", "hwClockBitsCfgOutputMode"), ("HUAWEI-CLOCK-MIB", "hwClockBitsCfgSourceId"), ("HUAWEI-CLOCK-MIB", "hwClockBitsCfgInvalidCond"), ("HUAWEI-CLOCK-MIB", "hwClockBitsCfgBitsPortType"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwClockBitsCfgGroup = hwClockBitsCfgGroup.setStatus('current')
if mibBuilder.loadTexts: hwClockBitsCfgGroup.setDescription('The BITS clock source group.')
hwClockTrapOidGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 10, 2, 15)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockLastSourceName"), ("HUAWEI-CLOCK-MIB", "hwClockCurSourceName"), ("HUAWEI-CLOCK-MIB", "hwClockSourceOldLockMode"), ("HUAWEI-CLOCK-MIB", "hwClockChassisId"), ("HUAWEI-CLOCK-MIB", "hwClockOldSourceState"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwClockTrapOidGroup = hwClockTrapOidGroup.setStatus('current')
if mibBuilder.loadTexts: hwClockTrapOidGroup.setDescription('The clock trap group.')
hwClockNotificationsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 10, 2, 16)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockSourceSwitch"), ("HUAWEI-CLOCK-MIB", "hwClockSourceStateChange"), ("HUAWEI-CLOCK-MIB", "hwClockSourceStateResume"), ("HUAWEI-CLOCK-MIB", "hwClockSourceFreqCheck"), ("HUAWEI-CLOCK-MIB", "hwClockSourceFreqCheckResume"), ("HUAWEI-CLOCK-MIB", "hwClockSourceOutputBelowThreshold"), ("HUAWEI-CLOCK-MIB", "hwClockSourceOutputBelowThresholdResume"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrLockFail"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrLockFailResume"), ("HUAWEI-CLOCK-MIB", "hwClockCesAcrMasterPwChange"), ("HUAWEI-CLOCK-MIB", "hwClockSourceValid"), ("HUAWEI-CLOCK-MIB", "hwClockInLockedMode"), ("HUAWEI-CLOCK-MIB", "hwClockClusterTopoFailResume"), ("HUAWEI-CLOCK-MIB", "hwClockClusterTopoFail"), ("HUAWEI-CLOCK-MIB", "hwClockNotInLockedMode"), ("HUAWEI-CLOCK-MIB", "hwClockSourceSysClkLockModeChange"), ("HUAWEI-CLOCK-MIB", "hwClockSourceFailed"), ("HUAWEI-CLOCK-MIB", "hwClockSourceInputBelowThreshold"), ("HUAWEI-CLOCK-MIB", "hwClockSourceInputBelowThresholdResume"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrMasterPwChange"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrLockFail"), ("HUAWEI-CLOCK-MIB", "hwClockCesDcrLockFailResume"), ("HUAWEI-CLOCK-MIB", "hwClockSsmPktLos"), ("HUAWEI-CLOCK-MIB", "hwClockSsmPktLosResume"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwClockNotificationsGroup = hwClockNotificationsGroup.setStatus('current')
if mibBuilder.loadTexts: hwClockNotificationsGroup.setDescription('This is the group of clock notification.')
hwClockLineCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 186, 10, 2, 17)).setObjects(("HUAWEI-CLOCK-MIB", "hwClockLineClkCfgRecvS1"), ("HUAWEI-CLOCK-MIB", "hwClockLineClkCfgSendS1"), ("HUAWEI-CLOCK-MIB", "hwClockLineClkCfgCardId"), ("HUAWEI-CLOCK-MIB", "hwClockLineClkCfgPortId"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwClockLineCfgGroup = hwClockLineCfgGroup.setStatus('current')
if mibBuilder.loadTexts: hwClockLineCfgGroup.setDescription('The line clock group..')
mibBuilder.exportSymbols("HUAWEI-CLOCK-MIB", PYSNMP_MODULE_ID=hwClockMIB, hwClockCfgSourceS1Id=hwClockCfgSourceS1Id, hwClockBitsCfgFrameFormat=hwClockBitsCfgFrameFormat, hwClockCfgSourceId=hwClockCfgSourceId, hwClockAttributeSsmControl=hwClockAttributeSsmControl, hwClockCesAcrDomianInfoDomain=hwClockCesAcrDomianInfoDomain, hwClockSrcCfgNegotiationSlave=hwClockSrcCfgNegotiationSlave, hwClockCurSourceName=hwClockCurSourceName, hwClockSourceInputBelowThresholdResume=hwClockSourceInputBelowThresholdResume, hwClockSrcCfgFreqCheckResult=hwClockSrcCfgFreqCheckResult, hwClockBitsCfgTodSignal=hwClockBitsCfgTodSignal, hwClockSrcCfgSabit=hwClockSrcCfgSabit, hwClockSrcSelSrcName=hwClockSrcSelSrcName, hwClockCesAcrCfgCard=hwClockCesAcrCfgCard, hwClockCesAcrCfgSystemPriority=hwClockCesAcrCfgSystemPriority, hwClockAttributeSysClkRunMode=hwClockAttributeSysClkRunMode, hwClockCesAcrParentIfIndex=hwClockCesAcrParentIfIndex, hwClockCesAcrPortCfgTable=hwClockCesAcrPortCfgTable, hwClockSourceEthClkEnable=hwClockSourceEthClkEnable, hwClockLineClkCfgSlotIndex=hwClockLineClkCfgSlotIndex, hwClockInLockedMode=hwClockInLockedMode, hwClockCesAcrMasterDomain=hwClockCesAcrMasterDomain, hwClockCesAcrCfgSyncEnable=hwClockCesAcrCfgSyncEnable, hwClockPortCfgLeftFramePri=hwClockPortCfgLeftFramePri, hwClockCfgBadDetect=hwClockCfgBadDetect, hwClockSrcCfgSourceTypeIndex=hwClockSrcCfgSourceTypeIndex, hwClockSrcCfgSystemPriority=hwClockSrcCfgSystemPriority, hwClockCesAcrCfgRowStatus=hwClockCesAcrCfgRowStatus, hwClockCfgSourceIndex=hwClockCfgSourceIndex, hwClockSrcCfgClockId=hwClockSrcCfgClockId, hwClockSourceSwitch=hwClockSourceSwitch, hwClockLineClkCfgTable=hwClockLineClkCfgTable, hwClockSrcCfg2M2Priority=hwClockSrcCfg2M2Priority, hwClockSourceValid=hwClockSourceValid, hwClockCesMode=hwClockCesMode, hwClockCfgClkSourceType=hwClockCfgClkSourceType, hwClockBitsCfgDirection=hwClockBitsCfgDirection, hwClockBitsCfgInvalidCond=hwClockBitsCfgInvalidCond, hwClockCfgSwitchCondition=hwClockCfgSwitchCondition, hwClockCesAcrCfgDescr=hwClockCesAcrCfgDescr, hwClockAttribute2M1MaxOutSsm=hwClockAttribute2M1MaxOutSsm, hwClockCesAcrDomianInfoMasterPwName=hwClockCesAcrDomianInfoMasterPwName, hwClockAlarmThresholdFrequencyOffset=hwClockAlarmThresholdFrequencyOffset, hwClockCesAcrCfgSlot=hwClockCesAcrCfgSlot, hwClockChassisId=hwClockChassisId, hwClockGlobalObjects=hwClockGlobalObjects, hwClockBitsCfgSendSaBit=hwClockBitsCfgSendSaBit, hwClockSourceFreqCheckLeftRange=hwClockSourceFreqCheckLeftRange, hwClockSrcCfgFreqDeviation=hwClockSrcCfgFreqDeviation, hwClockSourceCompliances=hwClockSourceCompliances, hwClockClusterTopoType=hwClockClusterTopoType, hwClockSrcCfgSourceSsm=hwClockSrcCfgSourceSsm, hwClockCesAcrDomianInfoSlot=hwClockCesAcrDomianInfoSlot, hwClockSourceCfgGroup=hwClockSourceCfgGroup, hwClockCesDcrOldMasterPwName=hwClockCesDcrOldMasterPwName, hwClockOldSourceState=hwClockOldSourceState, hwClockSourceCompliance=hwClockSourceCompliance, hwClockMIB=hwClockMIB, hwClockLineClkCfgRecvS1=hwClockLineClkCfgRecvS1, hwClockPortCfgIfIndex=hwClockPortCfgIfIndex, hwClockCfgSourceDescr=hwClockCfgSourceDescr, hwClockExtTimeInputType=hwClockExtTimeInputType, hwClockCfgSwiEnableStatus=hwClockCfgSwiEnableStatus, hwClockLineCfgGroup=hwClockLineCfgGroup, hwClockManageObjects=hwClockManageObjects, hwClockBitsCfgSaBit=hwClockBitsCfgSaBit, hwClockSourceFreqCheckRightRange=hwClockSourceFreqCheckRightRange, hwClockSrcSelMode=hwClockSrcSelMode, hwClockClusterTopoTable=hwClockClusterTopoTable, hwClockFrequencyOffset=hwClockFrequencyOffset, hwClockManageSysGroup=hwClockManageSysGroup, hwClockSourceFreqCheckEnable=hwClockSourceFreqCheckEnable, hwClockAttribute2M2MaxOutSsm=hwClockAttribute2M2MaxOutSsm, hwClockCesAcrCfgFreqCheckResult=hwClockCesAcrCfgFreqCheckResult, hwClockCesAcrDomainInfoTable=hwClockCesAcrDomainInfoTable, hwClockCesAcrDomianInfoChannelId=hwClockCesAcrDomianInfoChannelId, hwClockSrcCfgClockIdSetMode=hwClockSrcCfgClockIdSetMode, hwClockSourceSelType=hwClockSourceSelType, hwClockCfgBits0Priority=hwClockCfgBits0Priority, hwClockSrcCfgSsmSetMode=hwClockSrcCfgSsmSetMode, hwClockClusterTopoFail=hwClockClusterTopoFail, hwClockPllId=hwClockPllId, hwClockSrcCfg2M1Priority=hwClockSrcCfg2M1Priority, hwClockSourceHoldMode=hwClockSourceHoldMode, hwClockSrcSelTable=hwClockSrcSelTable, hwClockLineClkCfgCardId=hwClockLineClkCfgCardId, hwClockSsmPktLosResume=hwClockSsmPktLosResume, hwClockSourceSelChassisIndex=hwClockSourceSelChassisIndex, hwClockAttributeExtendSsmControl=hwClockAttributeExtendSsmControl, hwClockSourceOldLockMode=hwClockSourceOldLockMode, hwClockPortCfgRightFramePri=hwClockPortCfgRightFramePri, hwClockCesAcrChannelId=hwClockCesAcrChannelId, hwClockCesAcrCfgSsm=hwClockCesAcrCfgSsm, hwClockSourceSelMode=hwClockSourceSelMode, hwClockSrcCfgSourceDescr=hwClockSrcCfgSourceDescr, hwClockTrapOid=hwClockTrapOid, hwClockAttributeEntry=hwClockAttributeEntry, hwClockCesAcrRecoveryDomain=hwClockCesAcrRecoveryDomain, hwClockCesAcrSlot=hwClockCesAcrSlot, hwClockFrequencyOffsetMax=hwClockFrequencyOffsetMax, hwClockSrcCfgRowStatus=hwClockSrcCfgRowStatus, hwClockCfgSourceState=hwClockCfgSourceState, hwClockBitsCfgOutputMode=hwClockBitsCfgOutputMode, hwClockBitsCfgBitsIndex=hwClockBitsCfgBitsIndex, hwClockFrequencyOffsetMin=hwClockFrequencyOffsetMin, hwClockCfgChassisIndex=hwClockCfgChassisIndex, hwClockLastSourceName=hwClockLastSourceName, hwClockCesAcrNewMasterPwName=hwClockCesAcrNewMasterPwName, hwClockAttributeHoldOffTime=hwClockAttributeHoldOffTime, hwClockClusterTopoLinkType=hwClockClusterTopoLinkType, hwClockCesAcrPortName=hwClockCesAcrPortName, hwClockPortCfgForceOutS1=hwClockPortCfgForceOutS1, hwClockSourceInputBelowThreshold=hwClockSourceInputBelowThreshold, hwClockSrcCfgTable=hwClockSrcCfgTable, hwClockCesAcrChannelType=hwClockCesAcrChannelType, hwClockBitsCfgSourceId=hwClockBitsCfgSourceId, hwClockSourceSelSourceId=hwClockSourceSelSourceId, hwClockAttributeLtiSquelch=hwClockAttributeLtiSquelch, hwClockSourceSysClkWorkMode=hwClockSourceSysClkWorkMode, hwClockCesDcrSlot=hwClockCesDcrSlot, hwClockCfgBits1LockOut=hwClockCfgBits1LockOut, hwClockSrcCfgClkEnable=hwClockSrcCfgClkEnable, hwClockConformance=hwClockConformance, hwClockSysSelGroup=hwClockSysSelGroup, hwClockNotifications=hwClockNotifications, hwClockSourceSelEntry=hwClockSourceSelEntry, hwClockCesAcrDomain=hwClockCesAcrDomain, hwClockCesDcrMasterPwChange=hwClockCesDcrMasterPwChange, hwClockCesAcrCard=hwClockCesAcrCard, hwClockSrcCfgPhyState=hwClockSrcCfgPhyState, hwClockSourceCfgTable=hwClockSourceCfgTable, hwClockNotInLockedMode=hwClockNotInLockedMode, hwClockSourceSsmUnknown=hwClockSourceSsmUnknown, hwClockBitsCfgChassisIndex=hwClockBitsCfgChassisIndex, hwClockCesDcrLockFail=hwClockCesDcrLockFail, hwClockCesAcrPortCfgEntry=hwClockCesAcrPortCfgEntry, hwClockPortCfgTable=hwClockPortCfgTable, hwClockSourceSsmControl=hwClockSourceSsmControl, hwClockCesDcrCard=hwClockCesDcrCard, hwClockSrcTraceSrcName=hwClockSrcTraceSrcName, hwClockSrcCfgSourceState=hwClockSrcCfgSourceState, hwClockBitsCfgForceOutS1=hwClockBitsCfgForceOutS1, hwClockCfgSourceSsm=hwClockCfgSourceSsm, hwClockBitsCfgBitsPortType=hwClockBitsCfgBitsPortType, hwClockLineClkCfgPortId=hwClockLineClkCfgPortId, hwClockCesAcrLockFail=hwClockCesAcrLockFail, hwClockSrcSelChassisIndex=hwClockSrcSelChassisIndex, hwClockAttributeWtrTime=hwClockAttributeWtrTime, hwClockAttributeFreqCheckEnable=hwClockAttributeFreqCheckEnable, hwClockCfgPriRvtEnableStatus=hwClockCfgPriRvtEnableStatus, hwClockLineClkCfgSendS1=hwClockLineClkCfgSendS1, hwClockSourceStateResume=hwClockSourceStateResume, hwClockSrcCfgChassisIndex=hwClockSrcCfgChassisIndex, hwClockCesAcrLockFailResume=hwClockCesAcrLockFailResume, hwClockCesAcrDomianInfoState=hwClockCesAcrDomianInfoState, hwClockExtTimeOutputType=hwClockExtTimeOutputType, hwClockSourceOutputBelowThreshold=hwClockSourceOutputBelowThreshold, hwClockCesAcrMasterPwChange=hwClockCesAcrMasterPwChange, hwClockAttributeInputThreshold=hwClockAttributeInputThreshold, hwClockCesAcrCfgSourceState=hwClockCesAcrCfgSourceState, hwClockSrcCfgEntry=hwClockSrcCfgEntry, hwClockCfgHoldOffTime=hwClockCfgHoldOffTime, hwClockSourceCfgEntry=hwClockSourceCfgEntry, hwClockPortCfgEntry=hwClockPortCfgEntry, hwClockAttributeRetrieveMode=hwClockAttributeRetrieveMode, hwClockCfgSsmThreshold=hwClockCfgSsmThreshold, hwClockSourceFreqCheck=hwClockSourceFreqCheck, hwClockSourceFailed=hwClockSourceFailed, hwClockClusterSyncType=hwClockClusterSyncType, hwClockCesAcrDomianInfoCard=hwClockCesAcrDomianInfoCard, hwClockCfgSystemLockOut=hwClockCfgSystemLockOut, hwClockCesAcrLockState=hwClockCesAcrLockState, hwClockCesAcrCfgClockId=hwClockCesAcrCfgClockId, hwClockLineClkCfgEntry=hwClockLineClkCfgEntry, hwClockSrcSelEntry=hwClockSrcSelEntry, hwClockAttributeSysMaxOutSsm=hwClockAttributeSysMaxOutSsm, hwClockCesAcrPortCfgRowStatus=hwClockCesAcrPortCfgRowStatus, hwClockSourceSysClkLockModeChange=hwClockSourceSysClkLockModeChange, hwClockTrapOidGroup=hwClockTrapOidGroup, hwClockSsmPktLos=hwClockSsmPktLos, hwClockAttributeTable=hwClockAttributeTable, hwClockSourceOutputBelowThresholdResume=hwClockSourceOutputBelowThresholdResume, hwClockSrcCfgOutClockId=hwClockSrcCfgOutClockId, hwClockLineClkCfgChassisIndex=hwClockLineClkCfgChassisIndex, hwClockSrcCfgSsmTimeout=hwClockSrcCfgSsmTimeout, hwClockCesAcrCfgDomain=hwClockCesAcrCfgDomain, hwClockBitsCfgGroup=hwClockBitsCfgGroup, hwClockCfgSourceSsmSetMode=hwClockCfgSourceSsmSetMode, hwClockCfgBits1Priority=hwClockCfgBits1Priority, hwClockBitsCfgRecvSaBit=hwClockBitsCfgRecvSaBit, hwClockSourceStateChange=hwClockSourceStateChange, hwClockAttributeOutThreshold=hwClockAttributeOutThreshold, hwClockClusterTopoStatus=hwClockClusterTopoStatus, hwClockLineCfgSoureId=hwClockLineCfgSoureId, hwClockAttributeOutValue=hwClockAttributeOutValue, hwClockAttributeSysClkLockMode=hwClockAttributeSysClkLockMode, hwClockCesAcrOldMasterPwName=hwClockCesAcrOldMasterPwName, hwClockCesDcrLockState=hwClockCesDcrLockState, hwClockCfgSystemPriority=hwClockCfgSystemPriority, hwClockClusterTopoEntry=hwClockClusterTopoEntry, hwClockCesAcrCfgTable=hwClockCesAcrCfgTable, hwClockClusterTopoFailResume=hwClockClusterTopoFailResume, hwClockCfgFreqCheckResult=hwClockCfgFreqCheckResult, hwClockSrcSelType=hwClockSrcSelType, hwClockBitsCfgInputMode=hwClockBitsCfgInputMode, hwClockAttributeInternalClockId=hwClockAttributeInternalClockId, hwClockSrcCfgOutSsm=hwClockSrcCfgOutSsm, hwClockAttributeChassisIndex=hwClockAttributeChassisIndex, hwClockNotificationsGroup=hwClockNotificationsGroup, hwClockSrcCfgSsmInterval=hwClockSrcCfgSsmInterval, hwClockCesAcrIfIndex=hwClockCesAcrIfIndex, hwClockSourceForceCloseEnableStatus=hwClockSourceForceCloseEnableStatus, hwClockSourceFreqCheckResume=hwClockSourceFreqCheckResume, hwClockSourceGroups=hwClockSourceGroups, hwClockCfgBits0LockOut=hwClockCfgBits0LockOut, hwClockCesDcrDomain=hwClockCesDcrDomain, hwClockTimeUsedSource=hwClockTimeUsedSource, hwClockCfgWtrTime=hwClockCfgWtrTime, hwClockCfgExportEnableStatus=hwClockCfgExportEnableStatus, hwClockBitsCfgEntry=hwClockBitsCfgEntry, hwClockCesAcrDomainInfoEntry=hwClockCesAcrDomainInfoEntry, hwClockFrequencyOffsetMean=hwClockFrequencyOffsetMean, hwClockBitsCfgName=hwClockBitsCfgName, hwClockBitsCfgBitsType=hwClockBitsCfgBitsType, hwClockSrcCfgSourceIndex=hwClockSrcCfgSourceIndex, hwClockCesDcrLockFailResume=hwClockCesDcrLockFailResume, hwClockBitsCfgTable=hwClockBitsCfgTable, hwClockAttributeTodProtocol=hwClockAttributeTodProtocol, hwClockCesAcrSourceMode=hwClockCesAcrSourceMode, hwClockSourceRetrieveMode=hwClockSourceRetrieveMode, hwClockCesDcrNewMasterPwName=hwClockCesDcrNewMasterPwName, hwClockCesAcrCfgEntry=hwClockCesAcrCfgEntry, hwClockSourceSelTable=hwClockSourceSelTable, hwClockPortCfgGroup=hwClockPortCfgGroup, hwClockCesAcrPwDomain=hwClockCesAcrPwDomain) | # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HUAWEI-CLOCK-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:43:52 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 |
0025_auto__add_field_databasecreate_plan_name__chg_field_databasecreate_pla.py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'DatabaseCreate.plan_name'
db.add_column(u'maintenance_databasecreate', 'plan_name',
self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True),
keep_default=False)
# Changing field 'DatabaseCreate.plan'
db.alter_column(u'maintenance_databasecreate', 'plan_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['physical.Plan']))
def backwards(self, orm):
# Deleting field 'DatabaseCreate.plan_name'
db.delete_column(u'maintenance_databasecreate', 'plan_name')
# User chose to not deal with backwards NULL issues for 'DatabaseCreate.plan'
#raise RuntimeError("Cannot reverse this migration. 'DatabaseCreate.plan' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration
# Changing field 'DatabaseCreate.plan'
db.alter_column(u'maintenance_databasecreate', 'plan_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['physical.Plan']))
models = {
u'account.team': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Team'},
'contacts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_alocation_limit': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'backup.backupgroup': {
'Meta': {'object_name': 'BackupGroup'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'dbaas_cloudstack.cloudstackoffering': {
'Meta': {'object_name': 'CloudStackOffering'},
'cpus': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'equivalent_offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['dbaas_cloudstack.CloudStackOffering']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory_size_mb': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_offering_region'", 'null': 'True', 'to': u"orm['dbaas_cloudstack.CloudStackRegion']"}),
'serviceofferingid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weaker': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'dbaas_cloudstack.cloudstackpack': {
'Meta': {'object_name': 'CloudStackPack'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_packs'", 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_offering_packs'", 'to': u"orm['dbaas_cloudstack.CloudStackOffering']"}),
'script_file': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'dbaas_cloudstack.cloudstackregion': {
'Meta': {'object_name': 'CloudStackRegion'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_environment_region'", 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'logical.database': {
'Meta': {'ordering': "(u'name',)", 'unique_together': "((u'name', u'environment'),)", 'object_name': 'Database'},
'backup_path': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DatabaseInfra']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_auto_resize': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_quarantine': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['logical.Project']"}),
'quarantine_dt': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'quarantine_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_quarantine'", 'null': 'True', 'to': u"orm['auth.User']"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'default': '0.0'})
},
u'logical.project': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Project'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasechangeparameter': {
'Meta': {'object_name': 'DatabaseChangeParameter'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'change_parameters'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_change_parameters'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasecreate': {
'Meta': {'object_name': 'DatabaseCreate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['logical.Database']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.DatabaseInfra']"}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['logical.Project']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'create_database'", 'to': u"orm['notification.TaskHistory']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databasereinstallvm': {
'Meta': {'object_name': 'DatabaseReinstallVM'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'reinstall_vm'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinstall_vm'", 'to': u"orm['physical.Instance']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinsgtall_vm'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseresize': {
'Meta': {'object_name': 'DatabaseResize'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'resizes'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_offer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes_source'", 'to': u"orm['dbaas_cloudstack.CloudStackPack']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_offer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes_target'", 'to': u"orm['dbaas_cloudstack.CloudStackPack']"}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestore': {
'Meta': {'object_name': 'DatabaseRestore'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['backup.BackupGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_restore_new'", 'null': 'True', 'to': u"orm['backup.BackupGroup']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestoreinstancepair': {
'Meta': {'unique_together': "((u'master', u'slave', u'restore'),)", 'object_name': 'DatabaseRestoreInstancePair'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_master'", 'to': u"orm['physical.Instance']"}),
'restore': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_instances'", 'to': u"orm['maintenance.DatabaseRestore']"}),
'slave': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_slave'", 'to': u"orm['physical.Instance']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseupgrade': {
'Meta': {'object_name': 'DatabaseUpgrade'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'upgrades'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_source'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'source_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_target'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'target_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_upgrades'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.hostmaintenance': {
'Meta': {'unique_together': "((u'host', u'maintenance'),)", 'object_name': 'HostMaintenance', 'index_together': "[[u'host', u'maintenance']]"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_maintenance'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Host']"}),
'hostname': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance'", 'to': u"orm['maintenance.Maintenance']"}),
'rollback_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenance': {
'Meta': {'object_name': 'Maintenance'},
'affected_hosts': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'celery_task_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'hostsid': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '10000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_script': ('django.db.models.fields.TextField', [], {}),
'maximum_workers': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'revoked_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'rollback_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scheduled_for': ('django.db.models.fields.DateTimeField', [], {'unique': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenanceparameters': {
'Meta': {'object_name': 'MaintenanceParameters'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'function_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance_params'", 'to': u"orm['maintenance.Maintenance']"}),
'parameter_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'notification.taskhistory': {
'Meta': {'object_name': 'TaskHistory'},
'arguments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_class': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'task_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_status': ('django.db.models.fields.CharField', [], {'default': "u'WAITING'", 'max_length': '100', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}), | u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shard': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'can_change_parameters': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_clone_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_reinstall_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_resize_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_switch_master': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_upgrade_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['maintenance'] | 'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}), |
create_size_log.py | # Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Script to build the required binaries, profile their size and generate log.
"""
import argparse
import datetime
import os
import pandas as pd
import subprocess
def _build_a_binary(root_dir, binary_name, makefile_options):
os.chdir(root_dir)
params_list = [
"make", "-f", "tensorflow/lite/micro/tools/make/Makefile", binary_name
] + ["%s=%s" % (key, value) for (key, value) in makefile_options.items()]
process = subprocess.Popen(params_list,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if process.returncode != 0:
raise RuntimeError("Building %s failed with \n\n %s" % | (" ".join(params_list), stderr.decode()))
def _profile_a_binary(root_dir, binary_name, makefile_options, build_info):
target_dir = "%s_%s_%s" % (makefile_options["TARGET"],
makefile_options["TARGET_ARCH"],
makefile_options["BUILD_TYPE"])
binary_path = os.path.join(root_dir, 'tensorflow/lite/micro/tools/make/gen/',
target_dir, 'bin', binary_name)
csv_path = os.path.join(root_dir, 'data/continuous_builds/size_profiling',
target_dir, "%s.csv" % binary_name)
# Run size command and extract the output
process = subprocess.Popen(["size", binary_path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if process.returncode != 0:
raise RuntimeError("size %s failed with \n\n %s" %
(binary_name, stderr.decode()))
output_str = stdout.decode()
df = pd.DataFrame([line.split() for line in output_str.split('\n')[1:]],
columns=list(output_str.split('\n')[0].split()))
# Append the output from the size to the CSV file
report = _create_or_read_csv(csv_path)
report.loc[len(report.index)] = [
build_info["date"], build_info['sha'], df['text'][0], df['data'][0],
df['bss'][0], df['dec'][0]
]
report.to_csv(csv_path, index=False, header=False, mode='a')
def _create_or_read_csv(csv_file_name):
if os.path.exists(csv_file_name) is not True:
csv_df = pd.DataFrame(
columns=['date', 'sha', 'text', 'data', 'bss', 'total'])
csv_df.to_csv(csv_file_name, index=False, mode='w')
csv_head = pd.read_csv(csv_file_name, index_col=False, nrows=0)
return csv_head
def _get_build_info(root_dir):
os.chdir(root_dir)
current_time = str(datetime.datetime.now())
git_process = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE,
cwd=root_dir)
sha, err = git_process.communicate()
if git_process.returncode != 0:
raise RuntimeError("Git failed with %s" % err.decode())
return {'date': current_time, 'sha': sha.decode().strip('\n')}
def _build_and_profile(root_dir, makefile_options, binary_names):
build_info = _get_build_info(root_dir)
for binary_name in binary_names:
_build_a_binary(root_dir, binary_name, makefile_options)
_profile_a_binary(root_dir, binary_name, makefile_options, build_info)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
default_binary_list_string = 'keyword_benchmark,baseline_memory_footprint,interpreter_memory_footprint'
parser.add_argument(
'--binary_list',
nargs='?',
const=default_binary_list_string,
default=default_binary_list_string,
help=
'binary list separated by comma (e.g. keyword_benchmark,baseline_memory_footprint)'
)
parser.add_argument('--build_type',
nargs='?',
const='release',
default='release',
help='build type (e.g. release)')
parser.add_argument('--target',
nargs='?',
const='linux',
default='linux',
help='host target (e.g. linux)')
parser.add_argument('--target_arch',
nargs='?',
const='x86_64',
default='x86_64',
help='target architecture (e.g x86_64)')
args = parser.parse_args()
makefile_options = {
"BUILD_TYPE": args.build_type,
"TARGET": args.target,
"TARGET_ARCH": args.target_arch
}
binary_names = args.binary_list.split(',')
script_path = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.join(script_path, '../../../../..')
_build_and_profile(root_dir, makefile_options, binary_names) | |
w3c_credentials_list_request.py | from typing import Any, Dict, List, Type, TypeVar, Union, cast
import attr
from ..models.w3c_credentials_list_request_tag_query import W3CCredentialsListRequestTagQuery
from ..types import UNSET, Unset
T = TypeVar("T", bound="W3CCredentialsListRequest")
@attr.s(auto_attribs=True)
class W3CCredentialsListRequest:
""" """
contexts: Union[Unset, List[str]] = UNSET
given_id: Union[Unset, str] = UNSET
issuer_id: Union[Unset, str] = UNSET
max_results: Union[Unset, int] = UNSET
proof_types: Union[Unset, List[str]] = UNSET
schema_ids: Union[Unset, List[str]] = UNSET
subject_ids: Union[Unset, List[str]] = UNSET
tag_query: Union[Unset, W3CCredentialsListRequestTagQuery] = UNSET
types: Union[Unset, List[str]] = UNSET
additional_properties: Dict[str, Any] = attr.ib(init=False, factory=dict)
def to_dict(self) -> Dict[str, Any]:
contexts: Union[Unset, List[str]] = UNSET
if not isinstance(self.contexts, Unset):
contexts = self.contexts
given_id = self.given_id
issuer_id = self.issuer_id
max_results = self.max_results
proof_types: Union[Unset, List[str]] = UNSET
if not isinstance(self.proof_types, Unset):
proof_types = self.proof_types
schema_ids: Union[Unset, List[str]] = UNSET
if not isinstance(self.schema_ids, Unset):
schema_ids = self.schema_ids
subject_ids: Union[Unset, List[str]] = UNSET
if not isinstance(self.subject_ids, Unset):
subject_ids = self.subject_ids
tag_query: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.tag_query, Unset):
tag_query = self.tag_query.to_dict()
types: Union[Unset, List[str]] = UNSET
if not isinstance(self.types, Unset):
types = self.types
field_dict: Dict[str, Any] = {}
field_dict.update(self.additional_properties)
field_dict.update({})
if contexts is not UNSET:
field_dict["contexts"] = contexts
if given_id is not UNSET:
field_dict["given_id"] = given_id
if issuer_id is not UNSET:
field_dict["issuer_id"] = issuer_id
if max_results is not UNSET:
field_dict["max_results"] = max_results
if proof_types is not UNSET:
field_dict["proof_types"] = proof_types
if schema_ids is not UNSET:
field_dict["schema_ids"] = schema_ids
if subject_ids is not UNSET:
field_dict["subject_ids"] = subject_ids
if tag_query is not UNSET:
field_dict["tag_query"] = tag_query
if types is not UNSET:
field_dict["types"] = types
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
contexts = cast(List[str], d.pop("contexts", UNSET))
given_id = d.pop("given_id", UNSET)
issuer_id = d.pop("issuer_id", UNSET)
max_results = d.pop("max_results", UNSET)
proof_types = cast(List[str], d.pop("proof_types", UNSET))
schema_ids = cast(List[str], d.pop("schema_ids", UNSET))
subject_ids = cast(List[str], d.pop("subject_ids", UNSET))
_tag_query = d.pop("tag_query", UNSET)
tag_query: Union[Unset, W3CCredentialsListRequestTagQuery]
if isinstance(_tag_query, Unset):
tag_query = UNSET
else:
tag_query = W3CCredentialsListRequestTagQuery.from_dict(_tag_query)
types = cast(List[str], d.pop("types", UNSET))
w3c_credentials_list_request = cls(
contexts=contexts,
given_id=given_id,
issuer_id=issuer_id,
max_results=max_results,
proof_types=proof_types,
schema_ids=schema_ids,
subject_ids=subject_ids,
tag_query=tag_query,
types=types,
)
w3c_credentials_list_request.additional_properties = d
return w3c_credentials_list_request
@property
def additional_keys(self) -> List[str]:
return list(self.additional_properties.keys())
def __getitem__(self, key: str) -> Any:
return self.additional_properties[key]
def | (self, key: str, value: Any) -> None:
self.additional_properties[key] = value
def __delitem__(self, key: str) -> None:
del self.additional_properties[key]
def __contains__(self, key: str) -> bool:
return key in self.additional_properties
| __setitem__ |
ref.test-d.ts | isRef,
unref,
reactive,
expectType,
proxyRefs,
toRef,
toRefs
} from './index'
function plainType(arg: number | Ref<number>) {
// ref coercing
const coerced = ref(arg)
expectType<Ref<number>>(coerced)
// isRef as type guard
if (isRef(arg)) {
expectType<Ref<number>>(arg)
}
// ref unwrapping
expectType<number>(unref(arg))
// ref inner type should be unwrapped
const nestedRef = ref({
foo: ref(1)
})
expectType<Ref<{ foo: number }>>(nestedRef)
expectType<{ foo: number }>(nestedRef.value)
// ref boolean
const falseRef = ref(false)
expectType<Ref<boolean>>(falseRef)
expectType<boolean>(falseRef.value)
// ref true
const trueRef = ref<true>(true)
expectType<Ref<true>>(trueRef)
expectType<true>(trueRef.value)
// tuple
expectType<[number, string]>(unref(ref([1, '1'])))
interface IteratorFoo {
[Symbol.iterator]: any
}
// with symbol
expectType<Ref<IteratorFoo | null | undefined>>(
ref<IteratorFoo | null | undefined>()
)
// should not unwrap ref inside arrays
const arr = ref([1, new Map<string, any>(), ref('1')]).value
const value = arr[0]
if (isRef(value)) {
expectType<Ref>(value)
} else if (typeof value === 'number') {
expectType<number>(value)
} else {
// should narrow down to Map type
// and not contain any Ref type
expectType<Map<string, any>>(value)
}
// should still unwrap in objects nested in arrays
const arr2 = ref([{ a: ref(1) }]).value
expectType<number>(arr2[0].a)
}
plainType(1)
function bailType(arg: HTMLElement | Ref<HTMLElement>) {
// ref coercing
const coerced = ref(arg)
expectType<Ref<HTMLElement>>(coerced)
// isRef as type guard
if (isRef(arg)) {
expectType<Ref<HTMLElement>>(arg)
}
// ref unwrapping
expectType<HTMLElement>(unref(arg))
// ref inner type should be unwrapped
// eslint-disable-next-line no-restricted-globals
const nestedRef = ref({ foo: ref(document.createElement('DIV')) })
expectType<Ref<{ foo: HTMLElement }>>(nestedRef)
expectType<{ foo: HTMLElement }>(nestedRef.value)
}
// eslint-disable-next-line no-restricted-globals
const el = document.createElement('DIV')
bailType(el)
function withSymbol() {
const customSymbol = Symbol()
const obj = {
[Symbol.asyncIterator]: { a: 1 },
[Symbol.unscopables]: { b: '1' },
[customSymbol]: { c: [1, 2, 3] }
}
const objRef = ref(obj)
expectType<{ a: number }>(objRef.value[Symbol.asyncIterator])
expectType<{ b: string }>(objRef.value[Symbol.unscopables])
expectType<{ c: Array<number> }>(objRef.value[customSymbol])
}
withSymbol()
const state = reactive({
foo: {
value: 1,
label: 'bar'
}
})
expectType<string>(state.foo.label)
// shallowRef
type Status = 'initial' | 'ready' | 'invalidating'
const shallowStatus = shallowRef<Status>('initial')
if (shallowStatus.value === 'initial') {
expectType<Ref<Status>>(shallowStatus)
expectType<Status>(shallowStatus.value)
shallowStatus.value = 'invalidating'
}
const refStatus = ref<Status>('initial')
if (refStatus.value === 'initial') {
expectType<Ref<Status>>(shallowStatus)
expectType<Status>(shallowStatus.value)
refStatus.value = 'invalidating'
}
// proxyRefs: should return `reactive` directly
const r1 = reactive({
k: 'v'
})
const p1 = proxyRefs(r1)
expectType<typeof r1>(p1)
// proxyRefs: `ShallowUnwrapRef`
const r2 = {
a: ref(1),
obj: {
k: ref('foo')
}
}
const p2 = proxyRefs(r2)
expectType<number>(p2.a)
expectType<Ref<string>>(p2.obj.k)
// toRef
const obj = {
a: 1,
b: ref(1)
}
expectType<Ref<number>>(toRef(obj, 'a'))
expectType<Ref<number>>(toRef(obj, 'b'))
// toRefs
const objRefs = toRefs(obj)
expectType<{
a: Ref<number>
b: Ref<number>
}>(objRefs) | import {
Ref,
ref,
shallowRef, |
|
tl_messages_get_emoji_url_gen.go | // Code generated by gotdgen, DO NOT EDIT.
package tg
import (
"context"
"errors"
"fmt"
"sort"
"strings"
"go.uber.org/multierr"
"github.com/gotd/td/bin"
"github.com/gotd/td/tdjson"
"github.com/gotd/td/tdp"
"github.com/gotd/td/tgerr"
)
// No-op definition for keeping imports.
var (
_ = bin.Buffer{}
_ = context.Background()
_ = fmt.Stringer(nil)
_ = strings.Builder{}
_ = errors.Is
_ = multierr.AppendInto
_ = sort.Ints
_ = tdp.Format
_ = tgerr.Error{}
_ = tdjson.Encoder{}
)
// MessagesGetEmojiURLRequest represents TL type `messages.getEmojiURL#d5b10c26`.
// Returns an HTTP URL which can be used to automatically log in into translation
// platform and suggest new emoji replacements. The URL will be valid for 30 seconds
// after generation
//
// See https://core.telegram.org/method/messages.getEmojiURL for reference.
type MessagesGetEmojiURLRequest struct {
// Language code for which the emoji replacements will be suggested
LangCode string
}
// MessagesGetEmojiURLRequestTypeID is TL type id of MessagesGetEmojiURLRequest.
const MessagesGetEmojiURLRequestTypeID = 0xd5b10c26
// Ensuring interfaces in compile-time for MessagesGetEmojiURLRequest.
var (
_ bin.Encoder = &MessagesGetEmojiURLRequest{}
_ bin.Decoder = &MessagesGetEmojiURLRequest{}
_ bin.BareEncoder = &MessagesGetEmojiURLRequest{}
_ bin.BareDecoder = &MessagesGetEmojiURLRequest{}
)
func (g *MessagesGetEmojiURLRequest) Zero() bool {
if g == nil {
return true
}
if !(g.LangCode == "") {
return false
}
return true
}
// String implements fmt.Stringer.
func (g *MessagesGetEmojiURLRequest) String() string {
if g == nil {
return "MessagesGetEmojiURLRequest(nil)"
}
type Alias MessagesGetEmojiURLRequest
return fmt.Sprintf("MessagesGetEmojiURLRequest%+v", Alias(*g))
}
// FillFrom fills MessagesGetEmojiURLRequest from given interface.
func (g *MessagesGetEmojiURLRequest) FillFrom(from interface {
GetLangCode() (value string)
}) {
g.LangCode = from.GetLangCode()
}
// TypeID returns type id in TL schema.
//
// See https://core.telegram.org/mtproto/TL-tl#remarks.
func (*MessagesGetEmojiURLRequest) TypeID() uint32 {
return MessagesGetEmojiURLRequestTypeID
}
// TypeName returns name of type in TL schema.
func (*MessagesGetEmojiURLRequest) TypeName() string {
return "messages.getEmojiURL"
}
// TypeInfo returns info about TL type.
func (g *MessagesGetEmojiURLRequest) TypeInfo() tdp.Type {
typ := tdp.Type{
Name: "messages.getEmojiURL",
ID: MessagesGetEmojiURLRequestTypeID,
}
if g == nil {
typ.Null = true
return typ
}
typ.Fields = []tdp.Field{
{
Name: "LangCode",
SchemaName: "lang_code",
},
}
return typ
}
// Encode implements bin.Encoder.
func (g *MessagesGetEmojiURLRequest) Encode(b *bin.Buffer) error {
if g == nil {
return fmt.Errorf("can't encode messages.getEmojiURL#d5b10c26 as nil")
}
b.PutID(MessagesGetEmojiURLRequestTypeID)
return g.EncodeBare(b)
}
// EncodeBare implements bin.BareEncoder.
func (g *MessagesGetEmojiURLRequest) EncodeBare(b *bin.Buffer) error {
if g == nil {
return fmt.Errorf("can't encode messages.getEmojiURL#d5b10c26 as nil")
}
b.PutString(g.LangCode)
return nil
}
// Decode implements bin.Decoder.
func (g *MessagesGetEmojiURLRequest) Decode(b *bin.Buffer) error {
if g == nil {
return fmt.Errorf("can't decode messages.getEmojiURL#d5b10c26 to nil")
}
if err := b.ConsumeID(MessagesGetEmojiURLRequestTypeID); err != nil {
return fmt.Errorf("unable to decode messages.getEmojiURL#d5b10c26: %w", err)
}
return g.DecodeBare(b)
}
// DecodeBare implements bin.BareDecoder.
func (g *MessagesGetEmojiURLRequest) DecodeBare(b *bin.Buffer) error {
if g == nil {
return fmt.Errorf("can't decode messages.getEmojiURL#d5b10c26 to nil")
} | }
g.LangCode = value
}
return nil
}
// GetLangCode returns value of LangCode field.
func (g *MessagesGetEmojiURLRequest) GetLangCode() (value string) {
return g.LangCode
}
// MessagesGetEmojiURL invokes method messages.getEmojiURL#d5b10c26 returning error if any.
// Returns an HTTP URL which can be used to automatically log in into translation
// platform and suggest new emoji replacements. The URL will be valid for 30 seconds
// after generation
//
// See https://core.telegram.org/method/messages.getEmojiURL for reference.
func (c *Client) MessagesGetEmojiURL(ctx context.Context, langcode string) (*EmojiURL, error) {
var result EmojiURL
request := &MessagesGetEmojiURLRequest{
LangCode: langcode,
}
if err := c.rpc.Invoke(ctx, request, &result); err != nil {
return nil, err
}
return &result, nil
} | {
value, err := b.String()
if err != nil {
return fmt.Errorf("unable to decode messages.getEmojiURL#d5b10c26: field lang_code: %w", err) |
toggle.py | #
# Copyright 2021 Splunk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from ..base_component import Selector
from .base_control import BaseControl
class Toggle(BaseControl):
"""
Entity_Component : Button
"""
def __init__(self, browser, container):
"""
:param browser: The selenium webdriver
:param container: The locator of the container where the control is located in.
"""
super().__init__(browser, container)
self.elements.update(
{
"toggle_btn": Selector(
select=container.select
+ ' [data-test="option"] [data-test="label"]'
),
"selected": Selector(
select=container.select
+ ' [data-test="option"][aria-checked="true"] [data-test="label"]'
),
}
)
self.browser = browser
self.container = container
def select(self, value):
"""
Selects the toggle specified
:param value: the value to select
:return: Bool if successful in selection, else raises an error
"""
for each in self.get_elements("toggle_btn"):
if each.text.lower() == value.lower():
self._wait_to_be_clickable(each)
return True
else:
raise ValueError("{} not found".format(value))
def _wait_to_be_clickable(self, element):
|
def get_value(self):
"""
Returns the value of the toggle element
:return: Str the text for the toggle element
"""
return self.selected.text.strip()
| def try_click(self):
try:
element.click()
return True
except:
return False
WebDriverWait(self.browser, 10).until(try_click) |
attestation.go | // Copyright (c) 2021 Fraunhofer AISEC
// Fraunhofer-Gesellschaft zur Foerderung der angewandten Forschung e.V.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package attestedtls
import (
"context"
"crypto/rand"
"crypto/sha256"
"crypto/tls"
"encoding/json"
"errors"
log "github.com/sirupsen/logrus"
"google.golang.org/protobuf/proto"
// local modules
ar "github.com/Fraunhofer-AISEC/cmc/attestationreport"
ci "github.com/Fraunhofer-AISEC/cmc/cmcinterface"
)
var id = "0000"
var noncelen = 32
// Checks Attestation report by calling the CMC to Verify and checking its status response
func obtainAR(req *ci.AttestationRequest, cc cmcConfig) (resp *ci.AttestationResponse, err error) {
// Get backend connection
cmcClient, cmcconn, cancel := getCMCServiceConn(cc)
if cmcClient == nil {
return nil, errors.New("[attestedTLS] Connection failed. No result obtained")
}
defer cmcconn.Close()
defer cancel()
log.Trace("[attestedTLS] Contacting backend to obtain AR.")
// Extend Attest request with id
req.Id = id
// Call Attest request
resp, err = cmcClient.Attest(context.Background(), req)
if err != nil {
log.Error(err)
return nil, errors.New("[attestedTLS] Could not obtain attestation report")
}
// Return response
return resp, nil
}
//Checks Attestation report by calling the CMC to Verify and checking its status response
func | (nonce, report []byte, cc cmcConfig) error {
// Get backend connection
cmcClient, conn, cancel := getCMCServiceConn(cc)
if cmcClient == nil {
return errors.New("[attestedTLS] Connection failed. No result obtained")
}
defer conn.Close()
defer cancel()
log.Trace("[attestedTLS] Contacting backend for AR verification")
// Create Verification request
req := ci.VerificationRequest{
Nonce: nonce,
AttestationReport: report,
Ca: cc.ca,
Policies: cc.policies,
}
// Perform Verify request
resp, err := cmcClient.Verify(context.Background(), &req)
if err != nil {
log.Error(err)
return errors.New("[attestedTLS] Could not obtain verification result")
}
// Check Verify response
if resp.GetStatus() != ci.Status_OK {
return errors.New("[attestedTLS] Obtaining verification result failed")
}
// parse VerificationResult
var result ar.VerificationResult
err = json.Unmarshal(resp.GetVerificationResult(), &result)
if err != nil {
log.Error(err)
return errors.New("[attestedTLS] Could not parse verification result")
}
// check results
if !result.Success {
log.Tracef("Attestation result: %v", string(resp.GetVerificationResult()))
return errors.New("[attestedTLS] Verification failed")
}
return nil
}
// Attests itself by receiving a nonce, creating an AR and returning it
func attest(conn *tls.Conn, cert []byte, cc cmcConfig) error {
log.Info("[attestedTLS] Attesting to peer in connection...")
// Obtain request msg
data, err := Read(conn)
if err != nil {
log.Error(err)
return errors.New("[attestedTLS] Did not receive attestation request")
}
// Parse request msg
req := &ci.AttestationRequest{}
err = proto.Unmarshal(data, req)
if err != nil {
log.Error(err)
return errors.New("[attestedTLS] Failed to parse attestation request")
}
// Check nonce length
if len(req.Nonce) != noncelen {
return errors.New("[attestedTLS] Nonce does not have expected size")
}
// include components of tls.Conn to link both protocols: use serverCert
combinedNonce := sha256.Sum256(append(cert[:], req.Nonce[:]...))
req.Nonce = combinedNonce[:]
// Obtain response (AR)
resp, err := obtainAR(req, cc)
if err != nil {
log.Error(err)
return errors.New("[attestedTLS] Could not obtain response")
}
data, err = proto.Marshal(resp)
if err != nil {
log.Error(err)
return errors.New("[attestedTLS] Failed to marshal response")
}
// Send response
err = Write(data, conn)
if err != nil {
log.Error(err)
return errors.New("[attestedTLS] Failed to send AR")
}
log.Info("[attestedTLS] Sent AR")
// Receive Ack to know if verification was successful
err = receiveAck(conn)
if err != nil {
log.Error(err)
return errors.New("[attestedTLS] Other side failed to verify AR")
}
log.Info("[attestedTLS] Attestation to peer succesful")
return nil
}
// Verifies remote party by sending nonce, receiving an AR and verfying it
func verify(conn *tls.Conn, cert []byte, cc cmcConfig) error {
log.Info("[attestedTLS] Verifying peer in connection...")
// Create nonce
nonce := make([]byte, noncelen)
_, err := rand.Read(nonce)
if err != nil {
log.Error(err)
_ = sendAck(false, conn)
return errors.New("[attestedTLS] Failed to generate nonce")
}
// Create AR request with nonce
req := &ci.AttestationRequest{
Nonce: nonce,
}
data, err := proto.Marshal(req)
if err != nil {
log.Error(err)
_ = sendAck(false, conn)
return errors.New("[attestedTLS] Failed to generate request")
}
// Send Request msg
err = Write(data, conn)
if err != nil {
log.Error(err)
_ = sendAck(false, conn)
return errors.New("[attestedTLS] Failed to write request")
}
// Receive response
log.Trace("[attestedTLS] Waiting for AR...")
data, err = Read(conn)
if err != nil {
log.Error(err)
_ = sendAck(false, conn)
return errors.New("[attestedTLS] Failed to read response")
}
// Parse response msg
resp := &ci.AttestationResponse{}
err = proto.Unmarshal(data, resp)
if err != nil {
log.Error(err)
_ = sendAck(false, conn)
return errors.New("[attestedTLS] Failed to parse response")
}
// Check response status
if resp.Status != ci.Status_OK || len(resp.AttestationReport) == 0 {
_ = sendAck(false, conn)
return errors.New("[attestedTLS] Did not receive attestation report")
}
// include components of tls.Conn to link both protocols
combinedNonce := sha256.Sum256(append(cert[:], nonce[:]...))
// Verify AR
log.Trace("[attestedTLS] Verifying attestation report...")
err = verifyAR(combinedNonce[:], resp.AttestationReport, cc)
if err != nil {
log.Error(err)
_ = sendAck(false, conn)
return errors.New("[attestedTLS] Attestation report verification failed")
}
err = sendAck(true, conn)
if err != nil {
log.Error(err)
return errors.New("[attestedTLS] Failed to send ACK")
}
log.Trace("[attestedTLS] Verified peer")
return nil
}
// Sends success ACK or failure to peer
func sendAck(success bool, conn *tls.Conn) error {
log.Trace("[attestedTLS] Sending ACK...")
data := make([]byte, 1)
if success {
data[0] = 0
} else {
data[0] = 1
}
err := Write(data, conn)
if err != nil {
log.Error(err)
return errors.New("[attestedTLS] Failed to write ACK")
}
return nil
}
// Waits for and reads success ACK or failure
// In case of failure, an error is returned
func receiveAck(conn *tls.Conn) error {
log.Trace("[attestedTLS] Waiting for ACK...")
data, err := Read(conn)
if err != nil {
log.Error(err)
return errors.New("[attestedTLS] Failed to read ACK")
}
if len(data) != 1 || data[0] != 0 {
return errors.New("[attestedTLS] Did not receive success ACK")
}
return nil
}
| verifyAR |
test_traces_rbac.py | # Copyright 2020 The StackStorm Authors.
# Copyright (C) 2020 Extreme Networks, Inc - All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
from st2common.rbac.types import PermissionType
from st2common.rbac.types import ResourceType
from st2common.persistence.auth import User
from st2common.persistence.rbac import Role
from st2common.persistence.rbac import UserRoleAssignment
from st2common.persistence.rbac import PermissionGrant
from st2common.models.db.auth import UserDB
from st2common.models.db.rbac import RoleDB
from st2common.models.db.rbac import UserRoleAssignmentDB
from st2common.models.db.rbac import PermissionGrantDB
from st2tests.fixturesloader import FixturesLoader
from st2api.controllers.v1.traces import TracesController
from tests.base import APIControllerWithRBACTestCase
from st2tests.api import APIControllerWithIncludeAndExcludeFilterTestCase
http_client = six.moves.http_client
__all__ = [
'TraceControllerRBACTestCase'
]
FIXTURES_PACK = 'generic'
TEST_FIXTURES = {
'traces': ['trace_for_test_enforce.yaml', 'trace_for_test_enforce_2.yaml',
'trace_for_test_enforce_3.yaml'],
}
class TraceControllerRBACTestCase(APIControllerWithRBACTestCase,
APIControllerWithIncludeAndExcludeFilterTestCase):
# Attributes used by APIControllerWithIncludeAndExcludeFilterTestCase
get_all_path = '/v1/traces'
controller_cls = TracesController
include_attribute_field_name = 'trace_tag'
exclude_attribute_field_name = 'start_timestamp'
rbac_enabled = True
fixtures_loader = FixturesLoader()
def setUp(self):
super(TraceControllerRBACTestCase, self).setUp()
self.models = self.fixtures_loader.save_fixtures_to_db(fixtures_pack=FIXTURES_PACK,
fixtures_dict=TEST_FIXTURES)
file_name = 'trace_for_test_enforce.yaml'
TraceControllerRBACTestCase.TRACE_1 = self.fixtures_loader.load_fixtures(
fixtures_pack=FIXTURES_PACK,
fixtures_dict={'traces': [file_name]})['traces'][file_name]
file_name = 'trace_for_test_enforce_2.yaml'
TraceControllerRBACTestCase.TRACE_1 = self.fixtures_loader.load_fixtures(
fixtures_pack=FIXTURES_PACK,
fixtures_dict={'traces': [file_name]})['traces'][file_name]
file_name = 'trace_for_test_enforce_3.yaml'
TraceControllerRBACTestCase.TRACE_1 = self.fixtures_loader.load_fixtures(
fixtures_pack=FIXTURES_PACK,
fixtures_dict={'traces': [file_name]})['traces'][file_name]
# Insert mock users, roles and assignments
# Users
user_1_db = UserDB(name='trace_list')
user_1_db = User.add_or_update(user_1_db)
self.users['trace_list'] = user_1_db
user_2_db = UserDB(name='trace_view')
user_2_db = User.add_or_update(user_2_db)
self.users['trace_view'] = user_2_db
# Roles
# trace_list
grant_db = PermissionGrantDB(resource_uid=None,
resource_type=ResourceType.TRACE,
permission_types=[PermissionType.TRACE_LIST])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_1_db = RoleDB(name='trace_list', permission_grants=permission_grants)
role_1_db = Role.add_or_update(role_1_db)
self.roles['trace_list'] = role_1_db
# trace_view on trace 1
trace_uid = self.models['traces']['trace_for_test_enforce.yaml'].get_uid()
grant_db = PermissionGrantDB(resource_uid=trace_uid,
resource_type=ResourceType.TRACE,
permission_types=[PermissionType.TRACE_VIEW])
grant_db = PermissionGrant.add_or_update(grant_db)
permission_grants = [str(grant_db.id)]
role_1_db = RoleDB(name='trace_view', permission_grants=permission_grants)
role_1_db = Role.add_or_update(role_1_db)
self.roles['trace_view'] = role_1_db
# Role assignments
role_assignment_db = UserRoleAssignmentDB(
user=self.users['trace_list'].name,
role=self.roles['trace_list'].name,
source='assignments/%s.yaml' % self.users['trace_list'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
role_assignment_db = UserRoleAssignmentDB(
user=self.users['trace_view'].name,
role=self.roles['trace_view'].name,
source='assignments/%s.yaml' % self.users['trace_view'].name)
UserRoleAssignment.add_or_update(role_assignment_db)
def test_get_all_no_permissions(self):
user_db = self.users['no_permissions']
self.use_user(user_db)
resp = self.app.get('/v1/traces', expect_errors=True)
expected_msg = ('User "no_permissions" doesn\'t have required permission "trace_list"')
self.assertEqual(resp.status_code, http_client.FORBIDDEN)
self.assertEqual(resp.json['faultstring'], expected_msg)
def test_get_one_no_permissions(self):
user_db = self.users['no_permissions']
self.use_user(user_db)
trace_id = self.models['traces']['trace_for_test_enforce.yaml'].id
trace_uid = self.models['traces']['trace_for_test_enforce.yaml'].get_uid()
resp = self.app.get('/v1/traces/%s' % (trace_id), expect_errors=True)
expected_msg = ('User "no_permissions" doesn\'t have required permission "trace_view"'
' on resource "%s"' % (trace_uid))
self.assertEqual(resp.status_code, http_client.FORBIDDEN)
self.assertEqual(resp.json['faultstring'], expected_msg)
def test_get_all_permission_success_get_one_no_permission_failure(self):
user_db = self.users['trace_list']
self.use_user(user_db)
# trace_list permission, but no trace_view permission
resp = self.app.get('/v1/traces')
self.assertEqual(resp.status_code, http_client.OK)
self.assertEqual(len(resp.json), 3)
trace_id = self.models['traces']['trace_for_test_enforce.yaml'].id
trace_uid = self.models['traces']['trace_for_test_enforce.yaml'].get_uid()
resp = self.app.get('/v1/traces/%s' % (trace_id), expect_errors=True)
expected_msg = ('User "trace_list" doesn\'t have required permission "trace_view"'
' on resource "%s"' % (trace_uid))
self.assertEqual(resp.status_code, http_client.FORBIDDEN)
self.assertEqual(resp.json['faultstring'], expected_msg)
def test_get_one_permission_success_get_all_no_permission_failure(self):
|
def _insert_mock_models(self):
trace_ids = [trace['id'] for trace in self.models['traces'].values()]
return trace_ids
| user_db = self.users['trace_view']
self.use_user(user_db)
# trace_view permission, but no trace_list permission
trace_id = self.models['traces']['trace_for_test_enforce.yaml'].id
trace_uid = self.models['traces']['trace_for_test_enforce.yaml'].get_uid()
resp = self.app.get('/v1/traces/%s' % (trace_id))
self.assertEqual(resp.status_code, http_client.OK)
self.assertEqual(resp.json['uid'], trace_uid)
resp = self.app.get('/v1/traces', expect_errors=True)
expected_msg = ('User "trace_view" doesn\'t have required permission "trace_list"')
self.assertEqual(resp.status_code, http_client.FORBIDDEN)
self.assertEqual(resp.json['faultstring'], expected_msg) |
client.rs | use crate::error::Error;
use crate::result::Result;
use reqwest::{StatusCode, Url};
use tuple_space::query_tuple::QueryTuple;
use tuple_space::tuple::Tuple;
pub struct Client {
size_url: Url,
write_url: Url,
read_url: Url,
take_url: Url,
http_client: reqwest::Client,
}
#[derive(Default)]
pub struct Builder {}
impl Client {
pub fn builder() -> Builder {
Builder::default()
}
pub async fn size(&self) -> Result<usize> {
let response = self.http_client.get(self.size_url.clone()).send().await?;
match response.status() {
StatusCode::OK => Ok(response.json::<usize>().await?),
_ => Err(Error::ServerError),
}
}
pub async fn write(&self, tuple: &Tuple) -> Result<()> {
let response = self
.http_client
.post(self.write_url.clone())
.body(serde_json::to_string(tuple)?)
.send()
.await?;
match response.status() {
StatusCode::CREATED => Ok(()),
_ => Err(Error::ServerError),
}
}
pub async fn read(&self, tuple: &QueryTuple) -> Result<Option<Tuple>> {
let response = self
.http_client
.post(self.read_url.clone())
.body(serde_json::to_string(tuple)?)
.send()
.await?;
match response.status() {
StatusCode::OK => Ok(Some(response.json::<Tuple>().await?)),
StatusCode::NOT_FOUND => Ok(None),
_ => Err(Error::ServerError),
}
}
pub async fn take(&self, tuple: &QueryTuple) -> Result<Option<Tuple>> {
let response = self
.http_client
.post(self.take_url.clone())
.body(serde_json::to_string(tuple)?)
.send()
.await?;
| match response.status() {
StatusCode::OK => Ok(Some(response.json::<Tuple>().await?)),
StatusCode::NOT_FOUND => Ok(None),
_ => Err(Error::ServerError),
}
}
}
impl Builder {
pub fn build(&self, server: &str) -> Result<Client> {
let base_server = Url::parse(server)?;
let size_url = base_server.join("size")?;
let read_url = base_server.join("read")?;
let take_url = base_server.join("take")?;
let write_url = base_server.join("write")?;
Ok(Client {
http_client: reqwest::Client::new(),
size_url,
read_url,
take_url,
write_url,
})
}
} | |
no-stories.tsx | import * as React from "react";
import config from "./get-config";
import { Code, Link } from "./ui";
const NoStories: React.FC<{ wrongUrl?: boolean; activeStory?: string }> = ({
wrongUrl,
activeStory,
}) => (
<div className="ladle-error-content">
{wrongUrl ? (
<>
<h1>The story not found</h1>
<p>
The story id <Code>{activeStory}</Code> you are trying to open does
not exist. Typo?
</p>
</>
) : (
<>
<h1>No stories found</h1>
<p>
The configured glob pattern for stories is:{" "}
<Code>{config.stories}</Code>.{" "}
</p>
<p>
It can be changed through the{" "}
<Link href="https://www.ladle.dev/docs/config#story-filenames">
configuration file
</Link>{" "}
or CLI flag <Code>--stories=your-glob</Code>.
</p>
</>
)}
<p>
<Link href="https://github.com/tajo/ladle">Github</Link> | <p>
<Link href="https://www.ladle.dev">Docs</Link>
</p>
</div>
);
export default NoStories; | </p> |
event.rs | use anyhow::{bail, Result};
use std::process::Command;
#[derive(Debug)]
pub enum FieldFormat {
Simple {
signed: bool,
size: usize,
},
Array {
signed: bool,
size: usize,
len: usize,
},
}
#[derive(Debug, Default)]
pub struct EventFormat {
fields: Vec<(String, FieldFormat)>,
}
impl EventFormat {
pub fn fields(&self) -> impl Iterator<Item = &(String, FieldFormat)> {
self.fields.iter()
}
fn add_field(&mut self, name: String, format: FieldFormat) |
}
pub fn event_format(category: &str, name: &str) -> Result<EventFormat> {
let events_dir = "/sys/kernel/debug/tracing/events";
let output = Command::new("sudo")
.arg("cat")
.arg(format!("{}/{}/{}/format", events_dir, category, name))
.output()?;
if !output.status.success() {
bail!("{}", std::str::from_utf8(&output.stderr)?);
}
let lines = std::str::from_utf8(&output.stdout)?.lines().skip(3);
let mut event = EventFormat::default();
for line in lines {
if line.is_empty() {
continue;
}
if !line.starts_with('\t') {
break;
}
let mut cols = line.split('\t').skip(1);
let (name, len) = parse_decl(cols.next())?;
cols.next();
let size: usize = parse_size(cols.next())?;
let signed: bool = parse_signed(cols.next())?;
let format = if let Some(len) = len {
FieldFormat::Array {
size: size / len,
signed,
len,
}
} else {
FieldFormat::Simple { size, signed }
};
event.add_field(name.to_owned(), format);
}
Ok(event)
}
fn parse_decl(input: Option<&str>) -> Result<(&str, Option<usize>)> {
if let Some(array) = parse_column(input)?.rsplit(' ').next() {
let mut iter = array.split(|c| c == '[' || c == ']');
let name = iter.next();
let len = iter.next();
match (name, len) {
(Some(name), Some(len)) => return Ok((name, Some(len.parse()?))),
(Some(name), None) => return Ok((name, None)),
_ => {}
}
}
bail!("parse error: {:?}", input);
}
fn parse_size(input: Option<&str>) -> Result<usize> {
Ok(parse_column(input)?.parse()?)
}
fn parse_signed(input: Option<&str>) -> Result<bool> {
match parse_column(input)? {
"0" => Ok(false),
"1" => Ok(true),
_ => bail!("parse error: {:?}", input),
}
}
fn parse_column(input: Option<&str>) -> Result<&str> {
if let Some(input) = input {
if let Some(size) = input.split(|c| c == ':' || c == ';').nth(1) {
return Ok(size);
}
}
bail!("parse error: {:?}", input);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_raw_syscalls_sys_enter() {
event_format("raw_syscalls", "sys_enter").unwrap();
}
#[test]
fn test_raw_syscalls_sys_exit() {
event_format("raw_syscalls", "sys_exit").unwrap();
}
}
| {
self.fields.push((name, format));
} |
profile.py | import datetime
import itertools
import math
import sys
import threading
import time
from collections import defaultdict
from functools import wraps
from typing import Optional, Union, Callable
from jina.enums import ProgressBarStatus
from .logger import JinaLogger
from .. import __windows__
from ..helper import colored, get_readable_size, get_readable_time
def used_memory(unit: int = 1024 * 1024 * 1024) -> float:
"""
Get the memory usage of the current process.
:param unit: Unit of the memory, default in Gigabytes.
:return: Memory usage of the current process.
"""
if __windows__:
# TODO: windows doesn't include `resource` module
return 0
import resource
return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / unit
def used_memory_readable() -> str:
"""
Get the memory usage of the current process in a human-readable format.
:return: Memory usage of the current process.
"""
return get_readable_size(used_memory(1))
def profiling(func):
"""
Create the Decorator to mark a function for profiling. The time and memory usage will be recorded and printed.
Example:
.. highlight:: python
.. code-block:: python
@profiling
def foo():
print(1)
:param func: function to be profiled
:return: arguments wrapper
"""
from .predefined import default_logger
@wraps(func)
def arg_wrapper(*args, **kwargs):
start_t = time.perf_counter()
start_mem = used_memory(unit=1)
r = func(*args, **kwargs)
elapsed = time.perf_counter() - start_t
end_mem = used_memory(unit=1)
# level_prefix = ''.join('-' for v in inspect.stack() if v and v.index is not None and v.index >= 0)
level_prefix = ''
mem_status = f'memory Δ {get_readable_size(end_mem - start_mem)} {get_readable_size(start_mem)} -> {get_readable_size(end_mem)}'
default_logger.info(
f'{level_prefix} {func.__qualname__} time: {elapsed}s {mem_status}'
)
return r
return arg_wrapper
class TimeDict:
"""Records of time information."""
def __init__(self):
self.accum_time = defaultdict(float)
self.first_start_time = defaultdict(float)
self.start_time = defaultdict(float)
self.end_time = defaultdict(float)
self._key_stack = []
self._pending_reset = False
def __enter__(self):
_key = self._key_stack[-1]
# store only the first enter time
if _key not in self.first_start_time:
self.first_start_time[_key] = time.perf_counter()
self.start_time[_key] = time.perf_counter()
return self
def __exit__(self, typ, value, traceback):
_key = self._key_stack.pop()
self.end_time[_key] = time.perf_counter()
self.accum_time[_key] += self.end_time[_key] - self.start_time[_key]
if self._pending_reset:
self.reset()
def __call__(self, key: str, *args, **kwargs):
"""
Add time counter.
:param key: key name of the counter
:param args: extra arguments
:param kwargs: keyword arguments
:return: self object
"""
self._key_stack.append(key)
return self
def reset(self):
"""Clear the time information."""
if self._key_stack:
self._pending_reset = True
else:
self.accum_time.clear()
self.start_time.clear()
self.first_start_time.clear()
self.end_time.clear()
self._key_stack.clear()
self._pending_reset = False
def __str__(self):
return ' '.join(f'{k}: {v:3.1f}s' for k, v in self.accum_time.items())
class TimeContext:
"""Timing a code snippet with a context manager."""
time_attrs = ['years', 'months', 'days', 'hours', 'minutes', 'seconds']
def __init__(self, task_name: str, logger: 'JinaLogger' = None):
"""
Create the context manager to timing a code snippet.
:param task_name: The context/message.
:param logger: Use existing logger or use naive :func:`print`.
Example:
.. highlight:: python
.. code-block:: python
with TimeContext('loop'):
do_busy()
"""
self.task_name = task_name
self._logger = logger
self.duration = 0
def __enter__(self):
s |
def _enter_msg(self):
if self._logger:
self._logger.info(self.task_name + '...')
else:
print(self.task_name, end=' ...\t', flush=True)
def __exit__(self, typ, value, traceback):
self.duration = self.now()
self.readable_duration = get_readable_time(seconds=self.duration)
self._exit_msg()
def now(self) -> float:
"""
Get the passed time from start to now.
:return: passed time
"""
return time.perf_counter() - self.start
def _exit_msg(self):
if self._logger:
self._logger.info(
f'{self.task_name} takes {self.readable_duration} ({self.duration:.2f}s)'
)
else:
print(
colored(
f'{self.task_name} takes {self.readable_duration} ({self.duration:.2f}s)'
),
flush=True,
)
class ProgressBar(TimeContext):
"""
A simple progress bar.
Example:
.. highlight:: python
.. code-block:: python
with ProgressBar('loop'):
do_busy()
"""
col_width = 100
clear_line = '\r{}\r'.format(' ' * col_width)
spinner = itertools.cycle(['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'])
def __init__(
self,
description: str = 'Working...',
message_on_done: Union[str, Callable[..., str], None] = None,
final_line_feed: bool = True,
total_length: Optional[int] = None,
):
"""
Create the ProgressBar.
:param description: The name of the task, will be displayed in front of the bar.
:param message_on_done: The final message to print when the progress is complete
:param final_line_feed: if False, the line will not get a Line Feed and thus is easily overwritable.
:param total_length: if set, then every :py:meth:`.update` increases the bar by `1/total_length * _bars_on_row`
"""
super().__init__(description, None)
self._bars_on_row = 40
self._completed_progress = 0
self._last_rendered_progress = 0
self._num_update_called = 0
self._on_done = message_on_done
self._final_line_feed = final_line_feed
self._total_length = total_length
self._stop_event = threading.Event()
def update(
self,
progress: float = 1.0,
description: Optional[str] = None,
message: Optional[str] = None,
status: ProgressBarStatus = ProgressBarStatus.WORKING,
first_enter: bool = False,
) -> None:
"""
Increment the progress bar by one unit.
:param progress: The number of unit to increment.
:param description: Change the description text before the progress bar on update.
:param message: Change the message text followed after the progress bar on update.
:param status: If set to a value, it will mark the task as complete, can be either "Done" or "Canceled"
:param first_enter: if this method is called by `__enter__`
"""
if self._total_length:
progress = progress / self._total_length * self._bars_on_row
self._completed_progress += progress
self._last_rendered_progress = self._completed_progress
elapsed = time.perf_counter() - self.start
num_bars = self._completed_progress % self._bars_on_row
num_bars = (
self._bars_on_row
if not num_bars and self._completed_progress
else max(num_bars, 1)
)
num_fullbars = math.floor(num_bars)
num_halfbars = 1 if (num_bars - num_fullbars <= 0.5) else 0
if status in {ProgressBarStatus.DONE, ProgressBarStatus.CANCELED}:
bar_color, unfinished_bar_color = 'yellow', 'yellow'
elif status == ProgressBarStatus.ERROR:
bar_color, unfinished_bar_color = 'red', 'red'
else:
bar_color, unfinished_bar_color = 'green', 'green'
if first_enter:
speed_str = 'estimating...'
elif self._total_length:
_prog = self._num_update_called / self._total_length
speed_str = f'{(_prog * 100):.0f}% ETA: {get_readable_time(seconds=self.now() / (_prog + 1e-6) * (1 - _prog + 1e-6))}'
else:
speed_str = f'{self._num_update_called / elapsed:4.1f} step/s'
self._num_update_called += 0 if first_enter else 1
description_str = description or self.task_name or ''
if status != ProgressBarStatus.WORKING:
description_str = str(status)
msg_str = message or ''
self._bar_info = dict(
bar_color=bar_color,
description_str=description_str,
msg_str=msg_str,
num_fullbars=num_fullbars,
num_halfbars=num_halfbars,
speed_str=speed_str,
unfinished_bar_color=unfinished_bar_color,
)
def _print_bar(self, bar_info):
time_str = str(
datetime.timedelta(seconds=time.perf_counter() - self.start)
).split('.')[0]
sys.stdout.write(self.clear_line)
sys.stdout.write(
'{} {:>10} {:<}{:<} {} {} {}'.format(
colored(next(self.spinner), 'green'),
bar_info['description_str'],
colored('━' * bar_info['num_fullbars'], bar_info['bar_color'])
+ (
colored(
'╸',
bar_info['bar_color']
if bar_info['num_halfbars']
else bar_info['unfinished_bar_color'],
)
),
colored(
'━' * (self._bars_on_row - bar_info['num_fullbars']),
bar_info['unfinished_bar_color'],
attrs=['dark'],
),
colored(time_str, 'cyan'),
bar_info['speed_str'],
bar_info['msg_str'],
)
)
sys.stdout.flush()
def _update_thread(self):
sys.stdout.flush()
while not self._stop_event.is_set():
self._print_bar(self._bar_info)
time.sleep(0.1)
def _enter_msg(self):
self.update(first_enter=True)
self._progress_thread = threading.Thread(
target=self._update_thread, daemon=True
)
self._progress_thread.start()
def __exit__(self, exc_type, value, traceback):
self.duration = self.now()
self.readable_duration = get_readable_time(seconds=self.duration)
if exc_type in {KeyboardInterrupt, SystemExit}:
self._stop_event.set()
self.update(0, status=ProgressBarStatus.CANCELED)
self._print_bar(self._bar_info)
elif exc_type and issubclass(exc_type, Exception):
self._stop_event.set()
self.update(0, status=ProgressBarStatus.ERROR)
self._print_bar(self._bar_info)
else:
# normal ending, i.e. task is complete
self._stop_event.set()
self._progress_thread.join()
self.update(0, status=ProgressBarStatus.DONE)
self._print_bar(self._bar_info)
self._print_final_msg()
def _print_final_msg(self):
if self._last_rendered_progress > 1:
final_msg = f'\033[K{self._completed_progress:.0f} steps done in {self.readable_duration}'
if self._on_done:
if isinstance(self._on_done, str):
final_msg = self._on_done
elif callable(self._on_done):
final_msg = self._on_done()
sys.stdout.write(final_msg)
if self._final_line_feed:
sys.stdout.write('\n')
else:
# no actual render happens
sys.stdout.write(self.clear_line)
sys.stdout.flush()
| elf.start = time.perf_counter()
self._enter_msg()
return self
|
crate_details.rs | use super::{match_version, redirect_base, render_markdown, MatchSemver, MetaData};
use crate::utils::{get_correct_docsrs_style_file, report_error};
use crate::{db::Pool, impl_webpage, repositories::RepositoryStatsUpdater, web::page::WebPage};
use anyhow::anyhow;
use chrono::{DateTime, Utc};
use iron::prelude::*;
use iron::Url;
use postgres::GenericClient;
use router::Router;
use serde::{ser::Serializer, Serialize};
use serde_json::Value;
// TODO: Add target name and versions
#[derive(Debug, Clone, PartialEq, Serialize)]
pub struct CrateDetails {
name: String,
version: String,
description: Option<String>,
owners: Vec<(String, String)>,
dependencies: Option<Value>,
#[serde(serialize_with = "optional_markdown")]
readme: Option<String>,
#[serde(serialize_with = "optional_markdown")]
rustdoc: Option<String>, // this is description_long in database
release_time: DateTime<Utc>,
build_status: bool,
last_successful_build: Option<String>,
rustdoc_status: bool,
pub archive_storage: bool,
repository_url: Option<String>,
homepage_url: Option<String>,
keywords: Option<Value>,
have_examples: bool, // need to check this manually
pub target_name: String,
releases: Vec<Release>,
repository_metadata: Option<RepositoryMetadata>,
pub(crate) metadata: MetaData,
is_library: bool,
license: Option<String>,
documentation_url: Option<String>,
total_items: Option<f32>,
documented_items: Option<f32>,
total_items_needing_examples: Option<f32>,
items_with_examples: Option<f32>,
/// Database id for this crate
pub(crate) crate_id: i32,
/// Database id for this release
pub(crate) release_id: i32,
}
#[derive(Debug, Clone, PartialEq, Serialize)]
struct RepositoryMetadata {
stars: i32,
forks: i32,
issues: i32,
name: Option<String>,
icon: &'static str,
}
fn optional_markdown<S>(markdown: &Option<String>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
markdown
.as_ref()
.map(|markdown| render_markdown(markdown))
.serialize(serializer)
}
#[derive(Debug, Clone, Eq, PartialEq, Serialize)]
pub struct Release {
pub id: i32,
pub version: semver::Version,
pub build_status: bool,
pub yanked: bool,
pub is_library: bool,
pub rustdoc_status: bool,
}
impl CrateDetails {
pub fn new(
conn: &mut impl GenericClient,
name: &str,
version: &str,
version_or_latest: &str,
up: Option<&RepositoryStatsUpdater>,
) -> Result<Option<CrateDetails>, anyhow::Error> {
// get all stuff, I love you rustfmt
let query = "
SELECT
crates.id AS crate_id,
releases.id AS release_id,
crates.name,
releases.version,
releases.description,
releases.dependencies,
releases.readme,
releases.description_long,
releases.release_time,
releases.build_status,
releases.rustdoc_status,
releases.archive_storage,
releases.repository_url,
releases.homepage_url,
releases.keywords,
releases.have_examples,
releases.target_name,
repositories.host as repo_host,
repositories.stars as repo_stars,
repositories.forks as repo_forks,
repositories.issues as repo_issues,
repositories.name as repo_name,
releases.is_library,
releases.yanked,
releases.doc_targets,
releases.license,
releases.documentation_url,
releases.default_target,
releases.doc_rustc_version,
doc_coverage.total_items,
doc_coverage.documented_items,
doc_coverage.total_items_needing_examples,
doc_coverage.items_with_examples
FROM releases
INNER JOIN crates ON releases.crate_id = crates.id
LEFT JOIN doc_coverage ON doc_coverage.release_id = releases.id
LEFT JOIN repositories ON releases.repository_id = repositories.id
WHERE crates.name = $1 AND releases.version = $2;";
let rows = conn.query(query, &[&name, &version])?;
let krate = if rows.is_empty() {
return Ok(None);
} else {
&rows[0]
};
let crate_id: i32 = krate.get("crate_id");
let release_id: i32 = krate.get("release_id");
// get releases, sorted by semver
let releases = releases_for_crate(conn, crate_id)?;
let repository_metadata =
krate
.get::<_, Option<String>>("repo_host")
.map(|host| RepositoryMetadata {
issues: krate.get("repo_issues"),
stars: krate.get("repo_stars"),
forks: krate.get("repo_forks"),
name: krate.get("repo_name"),
icon: up.map_or("code-branch", |u| u.get_icon_name(&host)),
});
let metadata = MetaData {
name: krate.get("name"),
version: krate.get("version"),
version_or_latest: version_or_latest.to_string(),
description: krate.get("description"),
rustdoc_status: krate.get("rustdoc_status"),
target_name: krate.get("target_name"),
default_target: krate.get("default_target"),
doc_targets: MetaData::parse_doc_targets(krate.get("doc_targets")),
yanked: krate.get("yanked"),
rustdoc_css_file: get_correct_docsrs_style_file(krate.get("doc_rustc_version"))?,
};
let documented_items: Option<i32> = krate.get("documented_items");
let total_items: Option<i32> = krate.get("total_items");
let total_items_needing_examples: Option<i32> = krate.get("total_items_needing_examples");
let items_with_examples: Option<i32> = krate.get("items_with_examples");
let mut crate_details = CrateDetails {
name: krate.get("name"),
version: krate.get("version"),
description: krate.get("description"),
owners: Vec::new(),
dependencies: krate.get("dependencies"),
readme: krate.get("readme"),
rustdoc: krate.get("description_long"),
release_time: krate.get("release_time"),
build_status: krate.get("build_status"),
last_successful_build: None,
rustdoc_status: krate.get("rustdoc_status"),
archive_storage: krate.get("archive_storage"),
repository_url: krate.get("repository_url"),
homepage_url: krate.get("homepage_url"),
keywords: krate.get("keywords"),
have_examples: krate.get("have_examples"),
target_name: krate.get("target_name"),
releases,
repository_metadata,
metadata,
is_library: krate.get("is_library"),
license: krate.get("license"),
documentation_url: krate.get("documentation_url"),
documented_items: documented_items.map(|v| v as f32),
total_items: total_items.map(|v| v as f32),
total_items_needing_examples: total_items_needing_examples.map(|v| v as f32),
items_with_examples: items_with_examples.map(|v| v as f32),
crate_id,
release_id,
};
// get owners
let owners = conn.query(
"SELECT login, avatar
FROM owners
INNER JOIN owner_rels ON owner_rels.oid = owners.id
WHERE cid = $1",
&[&crate_id],
)?;
crate_details.owners = owners
.into_iter()
.map(|row| (row.get("login"), row.get("avatar")))
.collect();
if !crate_details.build_status {
crate_details.last_successful_build = crate_details
.releases
.iter()
.filter(|release| release.build_status && !release.yanked)
.map(|release| release.version.to_string())
.next();
}
Ok(Some(crate_details))
}
/// Returns the latest non-yanked, non-prerelease release of this crate (or latest
/// yanked/prereleased if that is all that exist).
pub fn latest_release(&self) -> &Release {
self.releases
.iter()
.find(|release| release.version.pre.is_empty() && !release.yanked)
.unwrap_or(&self.releases[0])
}
}
/// Return all releases for a crate, sorted in descending order by semver
pub(crate) fn releases_for_crate(
conn: &mut impl GenericClient,
crate_id: i32,
) -> Result<Vec<Release>, anyhow::Error> {
let mut releases: Vec<Release> = conn
.query(
"SELECT
id,
version,
build_status,
yanked,
is_library,
rustdoc_status
FROM releases
WHERE
releases.crate_id = $1",
&[&crate_id],
)?
.into_iter()
.filter_map(|row| {
let version: String = row.get("version");
match semver::Version::parse(&version) {
Ok(semversion) => Some(Release {
id: row.get("id"),
version: semversion,
build_status: row.get("build_status"),
yanked: row.get("yanked"),
is_library: row.get("is_library"),
rustdoc_status: row.get("rustdoc_status"),
}),
Err(err) => {
report_error(&anyhow!(err).context(format!(
"invalid semver in database for crate {}: {}",
crate_id, version
)));
None
}
}
})
.collect();
releases.sort_by(|a, b| b.version.cmp(&a.version));
Ok(releases)
}
#[derive(Debug, Clone, PartialEq, Serialize)]
struct CrateDetailsPage {
details: CrateDetails,
}
impl_webpage! {
CrateDetailsPage = "crate/details.html",
}
pub fn crate_details_handler(req: &mut Request) -> IronResult<Response> {
let router = extension!(req, Router);
// this handler must always called with a crate name
let name = cexpect!(req, router.find("name"));
let req_version = router.find("version");
if req_version == None {
let url = ctry!(
req,
Url::parse(&format!("{}/crate/{}/latest", redirect_base(req), name,)),
);
return Ok(super::redirect(url));
}
let mut conn = extension!(req, Pool).get()?;
let found_version =
match_version(&mut conn, name, req_version).and_then(|m| m.assume_exact())?;
let (version, version_or_latest) = match found_version {
MatchSemver::Exact((version, _)) => (version.clone(), version),
MatchSemver::Latest((version, _)) => (version, "latest".to_string()),
MatchSemver::Semver((version, _)) => {
let url = ctry!(
req,
Url::parse(&format!(
"{}/crate/{}/{}",
redirect_base(req),
name,
version
)),
);
return Ok(super::redirect(url));
}
};
let updater = extension!(req, RepositoryStatsUpdater);
let details = cexpect!(
req,
ctry!(
req,
CrateDetails::new(
&mut *conn,
name,
&version,
&version_or_latest,
Some(updater)
)
)
);
CrateDetailsPage { details }.into_response(req)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::index::api::CrateOwner;
use crate::test::{assert_redirect, wrapper, TestDatabase};
use anyhow::{Context, Error};
use kuchiki::traits::TendrilSink;
use std::collections::HashMap;
fn assert_last_successful_build_equals(
db: &TestDatabase,
package: &str,
version: &str,
expected_last_successful_build: Option<&str>,
) -> Result<(), Error> {
let details = CrateDetails::new(&mut *db.conn(), package, version, version, None)
.with_context(|| anyhow::anyhow!("could not fetch crate details"))?
.unwrap();
assert_eq!(
details.last_successful_build,
expected_last_successful_build.map(|s| s.to_string()),
);
Ok(())
}
#[test]
fn test_last_successful_build_when_last_releases_failed_or_yanked() {
wrapper(|env| {
let db = env.db();
env.fake_release().name("foo").version("0.0.1").create()?;
env.fake_release().name("foo").version("0.0.2").create()?;
env.fake_release()
.name("foo")
.version("0.0.3")
.build_result_failed()
.create()?;
env.fake_release()
.name("foo")
.version("0.0.4")
.yanked(true)
.create()?;
env.fake_release()
.name("foo")
.version("0.0.5")
.build_result_failed()
.yanked(true)
.create()?;
assert_last_successful_build_equals(db, "foo", "0.0.1", None)?;
assert_last_successful_build_equals(db, "foo", "0.0.2", None)?;
assert_last_successful_build_equals(db, "foo", "0.0.3", Some("0.0.2"))?;
assert_last_successful_build_equals(db, "foo", "0.0.4", None)?;
assert_last_successful_build_equals(db, "foo", "0.0.5", Some("0.0.2"))?;
Ok(())
});
}
#[test]
fn test_last_successful_build_when_all_releases_failed_or_yanked() {
wrapper(|env| {
let db = env.db();
env.fake_release()
.name("foo")
.version("0.0.1")
.build_result_failed()
.create()?;
env.fake_release()
.name("foo")
.version("0.0.2")
.build_result_failed()
.create()?;
env.fake_release()
.name("foo")
.version("0.0.3")
.yanked(true)
.create()?;
assert_last_successful_build_equals(db, "foo", "0.0.1", None)?;
assert_last_successful_build_equals(db, "foo", "0.0.2", None)?;
assert_last_successful_build_equals(db, "foo", "0.0.3", None)?;
Ok(())
});
}
#[test]
fn test_last_successful_build_with_intermittent_releases_failed_or_yanked() {
wrapper(|env| {
let db = env.db();
env.fake_release().name("foo").version("0.0.1").create()?;
env.fake_release()
.name("foo")
.version("0.0.2")
.build_result_failed()
.create()?;
env.fake_release()
.name("foo")
.version("0.0.3")
.yanked(true)
.create()?;
env.fake_release().name("foo").version("0.0.4").create()?;
assert_last_successful_build_equals(db, "foo", "0.0.1", None)?;
assert_last_successful_build_equals(db, "foo", "0.0.2", Some("0.0.4"))?;
assert_last_successful_build_equals(db, "foo", "0.0.3", None)?;
assert_last_successful_build_equals(db, "foo", "0.0.4", None)?;
Ok(())
});
}
#[test]
fn test_releases_should_be_sorted() {
wrapper(|env| {
let db = env.db();
// Add new releases of 'foo' out-of-order since CrateDetails should sort them descending
env.fake_release().name("foo").version("0.1.0").create()?;
env.fake_release().name("foo").version("0.1.1").create()?;
env.fake_release()
.name("foo")
.version("0.3.0")
.build_result_failed()
.create()?;
env.fake_release().name("foo").version("1.0.0").create()?;
env.fake_release().name("foo").version("0.12.0").create()?;
env.fake_release()
.name("foo")
.version("0.2.0")
.yanked(true)
.create()?;
env.fake_release()
.name("foo")
.version("0.2.0-alpha")
.create()?;
env.fake_release()
.name("foo")
.version("0.0.1")
.build_result_failed()
.binary(true)
.create()?;
let details = CrateDetails::new(&mut *db.conn(), "foo", "0.2.0", "0.2.0", None)
.unwrap()
.unwrap();
assert_eq!(
details.releases,
vec![
Release {
version: semver::Version::parse("1.0.0")?,
build_status: true,
yanked: false,
is_library: true,
rustdoc_status: true,
id: details.releases[0].id,
},
Release {
version: semver::Version::parse("0.12.0")?,
build_status: true,
yanked: false,
is_library: true,
rustdoc_status: true,
id: details.releases[1].id,
},
Release {
version: semver::Version::parse("0.3.0")?,
build_status: false,
yanked: false,
is_library: true,
rustdoc_status: false,
id: details.releases[2].id,
},
Release {
version: semver::Version::parse("0.2.0")?,
build_status: true,
yanked: true,
is_library: true,
rustdoc_status: true,
id: details.releases[3].id,
},
Release {
version: semver::Version::parse("0.2.0-alpha")?,
build_status: true,
yanked: false,
is_library: true,
rustdoc_status: true,
id: details.releases[4].id,
},
Release {
version: semver::Version::parse("0.1.1")?,
build_status: true,
yanked: false,
is_library: true,
rustdoc_status: true,
id: details.releases[5].id,
},
Release {
version: semver::Version::parse("0.1.0")?,
build_status: true,
yanked: false,
is_library: true,
rustdoc_status: true,
id: details.releases[6].id,
},
Release {
version: semver::Version::parse("0.0.1")?,
build_status: false,
yanked: false,
is_library: false,
rustdoc_status: false,
id: details.releases[7].id,
},
]
);
Ok(())
});
}
#[test]
fn test_latest_version() {
wrapper(|env| {
let db = env.db();
env.fake_release().name("foo").version("0.0.1").create()?;
env.fake_release().name("foo").version("0.0.3").create()?;
env.fake_release().name("foo").version("0.0.2").create()?;
for version in &["0.0.1", "0.0.2", "0.0.3"] {
let details = CrateDetails::new(&mut *db.conn(), "foo", version, version, None)
.unwrap()
.unwrap();
assert_eq!(
details.latest_release().version,
semver::Version::parse("0.0.3")?
);
}
Ok(())
})
}
#[test]
fn test_latest_version_ignores_prerelease() {
wrapper(|env| {
let db = env.db();
env.fake_release().name("foo").version("0.0.1").create()?;
env.fake_release()
.name("foo")
.version("0.0.3-pre.1")
.create()?;
env.fake_release().name("foo").version("0.0.2").create()?;
for version in &["0.0.1", "0.0.2", "0.0.3-pre.1"] {
let details = CrateDetails::new(&mut *db.conn(), "foo", version, version, None)
.unwrap()
.unwrap();
assert_eq!(
details.latest_release().version,
semver::Version::parse("0.0.2")?
);
}
Ok(())
})
}
#[test]
fn test_latest_version_ignores_yanked() {
wrapper(|env| {
let db = env.db();
env.fake_release().name("foo").version("0.0.1").create()?;
env.fake_release()
.name("foo")
.version("0.0.3")
.yanked(true)
.create()?;
env.fake_release().name("foo").version("0.0.2").create()?;
for version in &["0.0.1", "0.0.2", "0.0.3"] {
let details = CrateDetails::new(&mut *db.conn(), "foo", version, version, None)
.unwrap()
.unwrap();
assert_eq!(
details.latest_release().version,
semver::Version::parse("0.0.2")?
);
}
Ok(())
})
}
#[test]
fn test_latest_version_only_yanked() {
wrapper(|env| {
let db = env.db();
env.fake_release()
.name("foo")
.version("0.0.1")
.yanked(true)
.create()?;
env.fake_release()
.name("foo")
.version("0.0.3")
.yanked(true)
.create()?;
env.fake_release()
.name("foo")
.version("0.0.2")
.yanked(true)
.create()?;
for version in &["0.0.1", "0.0.2", "0.0.3"] {
let details = CrateDetails::new(&mut *db.conn(), "foo", version, version, None)
.unwrap()
.unwrap();
assert_eq!(
details.latest_release().version,
semver::Version::parse("0.0.3")?
);
}
Ok(())
})
}
#[test]
fn | () {
wrapper(|env| {
env.fake_release()
.name("binary")
.version("0.1.0")
.binary(true)
.create()?;
let page = kuchiki::parse_html()
.one(env.frontend().get("/crate/binary/0.1.0").send()?.text()?);
let warning = page.select_first("a.pure-menu-link.warn").unwrap();
assert_eq!(
warning
.as_node()
.as_element()
.unwrap()
.attributes
.borrow()
.get("title")
.unwrap(),
"binary-0.1.0 is not a library"
);
Ok(())
});
}
#[test]
fn test_updating_owners() {
wrapper(|env| {
let db = env.db();
env.fake_release()
.name("foo")
.version("0.0.1")
.add_owner(CrateOwner {
login: "foobar".into(),
avatar: "https://example.org/foobar".into(),
name: "Foo Bar".into(),
email: "[email protected]".into(),
})
.create()?;
let details = CrateDetails::new(&mut *db.conn(), "foo", "0.0.1", "0.0.1", None)
.unwrap()
.unwrap();
assert_eq!(
details.owners,
vec![("foobar".into(), "https://example.org/foobar".into())]
);
// Adding a new owner, and changing details on an existing owner
env.fake_release()
.name("foo")
.version("0.0.2")
.add_owner(CrateOwner {
login: "foobar".into(),
avatar: "https://example.org/foobarv2".into(),
name: "Foo Bar".into(),
email: "[email protected]".into(),
})
.add_owner(CrateOwner {
login: "barfoo".into(),
avatar: "https://example.org/barfoo".into(),
name: "Bar Foo".into(),
email: "[email protected]".into(),
})
.create()?;
let details = CrateDetails::new(&mut *db.conn(), "foo", "0.0.1", "0.0.1", None)
.unwrap()
.unwrap();
let mut owners = details.owners;
owners.sort();
assert_eq!(
owners,
vec![
("barfoo".into(), "https://example.org/barfoo".into()),
("foobar".into(), "https://example.org/foobarv2".into())
]
);
// Removing an existing owner
env.fake_release()
.name("foo")
.version("0.0.3")
.add_owner(CrateOwner {
login: "barfoo".into(),
avatar: "https://example.org/barfoo".into(),
name: "Bar Foo".into(),
email: "[email protected]".into(),
})
.create()?;
let details = CrateDetails::new(&mut *db.conn(), "foo", "0.0.1", "0.0.1", None)
.unwrap()
.unwrap();
assert_eq!(
details.owners,
vec![("barfoo".into(), "https://example.org/barfoo".into())]
);
// Changing owner details on another of their crates applies the change to both
env.fake_release()
.name("bar")
.version("0.0.1")
.add_owner(CrateOwner {
login: "barfoo".into(),
avatar: "https://example.org/barfoov2".into(),
name: "Bar Foo".into(),
email: "[email protected]".into(),
})
.create()?;
let details = CrateDetails::new(&mut *db.conn(), "foo", "0.0.1", "0.0.1", None)
.unwrap()
.unwrap();
assert_eq!(
details.owners,
vec![("barfoo".into(), "https://example.org/barfoov2".into())]
);
Ok(())
});
}
#[test]
fn feature_flags_report_empty() {
wrapper(|env| {
env.fake_release()
.name("library")
.version("0.1.0")
.features(HashMap::new())
.create()?;
let page = kuchiki::parse_html().one(
env.frontend()
.get("/crate/library/0.1.0/features")
.send()?
.text()?,
);
assert!(page.select_first(r#"p[data-id="empty-features"]"#).is_ok());
Ok(())
});
}
#[test]
fn feature_private_feature_flags_are_hidden() {
wrapper(|env| {
let features = [("_private".into(), Vec::new())]
.iter()
.cloned()
.collect::<HashMap<String, Vec<String>>>();
env.fake_release()
.name("library")
.version("0.1.0")
.features(features)
.create()?;
let page = kuchiki::parse_html().one(
env.frontend()
.get("/crate/library/0.1.0/features")
.send()?
.text()?,
);
assert!(page.select_first(r#"p[data-id="empty-features"]"#).is_ok());
Ok(())
});
}
#[test]
fn feature_flags_without_default() {
wrapper(|env| {
let features = [("feature1".into(), Vec::new())]
.iter()
.cloned()
.collect::<HashMap<String, Vec<String>>>();
env.fake_release()
.name("library")
.version("0.1.0")
.features(features)
.create()?;
let page = kuchiki::parse_html().one(
env.frontend()
.get("/crate/library/0.1.0/features")
.send()?
.text()?,
);
assert!(page.select_first(r#"p[data-id="empty-features"]"#).is_err());
let def_len = page
.select_first(r#"b[data-id="default-feature-len"]"#)
.unwrap();
assert_eq!(def_len.text_contents(), "0");
Ok(())
});
}
#[test]
fn feature_flags_with_nested_default() {
wrapper(|env| {
let features = [
("default".into(), vec!["feature1".into()]),
("feature1".into(), vec!["feature2".into()]),
("feature2".into(), Vec::new()),
]
.iter()
.cloned()
.collect::<HashMap<String, Vec<String>>>();
env.fake_release()
.name("library")
.version("0.1.0")
.features(features)
.create()?;
let page = kuchiki::parse_html().one(
env.frontend()
.get("/crate/library/0.1.0/features")
.send()?
.text()?,
);
assert!(page.select_first(r#"p[data-id="empty-features"]"#).is_err());
let def_len = page
.select_first(r#"b[data-id="default-feature-len"]"#)
.unwrap();
assert_eq!(def_len.text_contents(), "3");
Ok(())
});
}
#[test]
fn feature_flags_report_null() {
wrapper(|env| {
let id = env
.fake_release()
.name("library")
.version("0.1.0")
.create()?;
env.db()
.conn()
.query("UPDATE releases SET features = NULL WHERE id = $1", &[&id])?;
let page = kuchiki::parse_html().one(
env.frontend()
.get("/crate/library/0.1.0/features")
.send()?
.text()?,
);
assert!(page.select_first(r#"p[data-id="null-features"]"#).is_ok());
Ok(())
});
}
#[test]
fn platform_links_are_direct_and_without_nofollow() {
wrapper(|env| {
env.fake_release()
.name("dummy")
.version("0.4.0")
.rustdoc_file("dummy/index.html")
.rustdoc_file("x86_64-pc-windows-msvc/dummy/index.html")
.default_target("x86_64-unknown-linux-gnu")
.add_target("x86_64-pc-windows-msvc")
.create()?;
let response = env.frontend().get("/crate/dummy/0.4.0").send()?;
assert!(response.status().is_success());
let platform_links: Vec<(String, String)> = kuchiki::parse_html()
.one(response.text()?)
.select(r#"a[aria-label="Platform"] + ul li a"#)
.expect("invalid selector")
.map(|el| {
let attributes = el.attributes.borrow();
let url = attributes.get("href").expect("href").to_string();
let rel = attributes.get("rel").unwrap_or("").to_string();
(url, rel)
})
.collect();
assert_eq!(platform_links.len(), 2);
for (url, rel) in platform_links {
assert!(!url.contains("/target-redirect/"));
assert_eq!(rel, "");
}
Ok(())
});
}
#[test]
fn latest_url() {
wrapper(|env| {
env.fake_release()
.name("dummy")
.version("0.4.0")
.rustdoc_file("dummy/index.html")
.rustdoc_file("x86_64-pc-windows-msvc/dummy/index.html")
.default_target("x86_64-unknown-linux-gnu")
.add_target("x86_64-pc-windows-msvc")
.create()?;
let web = env.frontend();
let resp = env.frontend().get("/crate/dummy/latest").send()?;
assert!(resp.status().is_success());
assert!(resp.url().as_str().ends_with("/crate/dummy/latest"));
let body = String::from_utf8(resp.bytes().unwrap().to_vec()).unwrap();
assert!(body.contains("<a href=\"/crate/dummy/latest/features\""));
assert!(body.contains("<a href=\"/crate/dummy/latest/builds\""));
assert!(body.contains("<a href=\"/crate/dummy/latest/source/\""));
assert!(body.contains("<a href=\"/crate/dummy/latest\""));
assert_redirect("/crate/dummy/latest/", "/crate/dummy/latest", web)?;
assert_redirect("/crate/dummy", "/crate/dummy/latest", web)?;
let resp_json = env
.frontend()
.get("/crate/aquarelle/latest/builds.json")
.send()?;
assert!(resp_json
.url()
.as_str()
.ends_with("/crate/aquarelle/latest/builds.json"));
Ok(())
});
}
}
| releases_dropdowns_is_correct |
utils.go | /*
URL redirects
URL redirect operations
API version: v3
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package url_redirects
import (
"encoding/json"
"time"
)
// PtrBool is a helper routine that returns a pointer to given boolean value.
func PtrBool(v bool) *bool { return &v }
// PtrInt is a helper routine that returns a pointer to given integer value.
func PtrInt(v int) *int { return &v }
// PtrInt32 is a helper routine that returns a pointer to given integer value.
func PtrInt32(v int32) *int32 { return &v }
// PtrInt64 is a helper routine that returns a pointer to given integer value.
func PtrInt64(v int64) *int64 { return &v }
// PtrFloat32 is a helper routine that returns a pointer to given float value.
func PtrFloat32(v float32) *float32 { return &v }
// PtrFloat64 is a helper routine that returns a pointer to given float value.
func PtrFloat64(v float64) *float64 |
// PtrString is a helper routine that returns a pointer to given string value.
func PtrString(v string) *string { return &v }
// PtrTime is helper routine that returns a pointer to given Time value.
func PtrTime(v time.Time) *time.Time { return &v }
type NullableBool struct {
value *bool
isSet bool
}
func (v NullableBool) Get() *bool {
return v.value
}
func (v *NullableBool) Set(val *bool) {
v.value = val
v.isSet = true
}
func (v NullableBool) IsSet() bool {
return v.isSet
}
func (v *NullableBool) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableBool(val *bool) *NullableBool {
return &NullableBool{value: val, isSet: true}
}
func (v NullableBool) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableBool) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}
type NullableInt struct {
value *int
isSet bool
}
func (v NullableInt) Get() *int {
return v.value
}
func (v *NullableInt) Set(val *int) {
v.value = val
v.isSet = true
}
func (v NullableInt) IsSet() bool {
return v.isSet
}
func (v *NullableInt) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableInt(val *int) *NullableInt {
return &NullableInt{value: val, isSet: true}
}
func (v NullableInt) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableInt) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}
type NullableInt32 struct {
value *int32
isSet bool
}
func (v NullableInt32) Get() *int32 {
return v.value
}
func (v *NullableInt32) Set(val *int32) {
v.value = val
v.isSet = true
}
func (v NullableInt32) IsSet() bool {
return v.isSet
}
func (v *NullableInt32) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableInt32(val *int32) *NullableInt32 {
return &NullableInt32{value: val, isSet: true}
}
func (v NullableInt32) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableInt32) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}
type NullableInt64 struct {
value *int64
isSet bool
}
func (v NullableInt64) Get() *int64 {
return v.value
}
func (v *NullableInt64) Set(val *int64) {
v.value = val
v.isSet = true
}
func (v NullableInt64) IsSet() bool {
return v.isSet
}
func (v *NullableInt64) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableInt64(val *int64) *NullableInt64 {
return &NullableInt64{value: val, isSet: true}
}
func (v NullableInt64) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableInt64) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}
type NullableFloat32 struct {
value *float32
isSet bool
}
func (v NullableFloat32) Get() *float32 {
return v.value
}
func (v *NullableFloat32) Set(val *float32) {
v.value = val
v.isSet = true
}
func (v NullableFloat32) IsSet() bool {
return v.isSet
}
func (v *NullableFloat32) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableFloat32(val *float32) *NullableFloat32 {
return &NullableFloat32{value: val, isSet: true}
}
func (v NullableFloat32) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableFloat32) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}
type NullableFloat64 struct {
value *float64
isSet bool
}
func (v NullableFloat64) Get() *float64 {
return v.value
}
func (v *NullableFloat64) Set(val *float64) {
v.value = val
v.isSet = true
}
func (v NullableFloat64) IsSet() bool {
return v.isSet
}
func (v *NullableFloat64) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableFloat64(val *float64) *NullableFloat64 {
return &NullableFloat64{value: val, isSet: true}
}
func (v NullableFloat64) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableFloat64) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}
type NullableString struct {
value *string
isSet bool
}
func (v NullableString) Get() *string {
return v.value
}
func (v *NullableString) Set(val *string) {
v.value = val
v.isSet = true
}
func (v NullableString) IsSet() bool {
return v.isSet
}
func (v *NullableString) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableString(val *string) *NullableString {
return &NullableString{value: val, isSet: true}
}
func (v NullableString) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableString) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}
type NullableTime struct {
value *time.Time
isSet bool
}
func (v NullableTime) Get() *time.Time {
return v.value
}
func (v *NullableTime) Set(val *time.Time) {
v.value = val
v.isSet = true
}
func (v NullableTime) IsSet() bool {
return v.isSet
}
func (v *NullableTime) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableTime(val *time.Time) *NullableTime {
return &NullableTime{value: val, isSet: true}
}
func (v NullableTime) MarshalJSON() ([]byte, error) {
return v.value.MarshalJSON()
}
func (v *NullableTime) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}
| { return &v } |
index.js | // import app dependencies and css style
import React, { useRef } from 'react';
import axios from "axios";
import { makeStyles } from '@material-ui/core/styles';
import TextField from '@material-ui/core/TextField';
import { lightBlue } from '@material-ui/core/colors';
import "./index.css";
import { useStoreContext } from '../../store/store';
import { useHistory } from 'react-router-dom';
import { UNSET_USER } from '../../store/actions';
// export functional component voter form and props
export default function | (props) {
// declare state and dispatch for the store file and establish useHistory
console.log("VoterForm props:", props);
const [state, dispatch] = useStoreContext();
const history = useHistory();
// declare all useRef variables
const userNameRef = useRef();
const cityRef = useRef();
const countyRef = useRef();
const stateRef = useRef();
const countryRef = useRef();
// material ui component theming
const useStyles = makeStyles((theme) => ({
root: {
'& > *': {
margin: theme.spacing(1),
width: '25ch',
color: lightBlue,
},
},
}));
const classes = useStyles();
const updateInfo = (event) => {
// console.log("target id", event.target.getAttribute("id"));
const info = event.target.getAttribute("id");
const currentInfo = state.userData[info];
let currentRef;
switch (info) {
case "name":
currentRef = userNameRef;
break;
case "city":
currentRef = cityRef;
break;
case "county":
currentRef = countyRef;
break;
case "state":
currentRef = stateRef;
break;
case "country":
currentRef = countryRef;
break;
}
const newInfo = currentRef.current.childNodes[1].firstChild.value;
state.userData[info] = newInfo;
// console.log("User Data", state.userData);
}
// event handler for user infomation name, city, state, county, country
const handleUpdate = (event) => {
if (event) {
event.preventDefault();
}
const body = {
username: state.user,
userData: state.userData,
issuesData: state.issuesData,
candidateData: state.candidateData
}
axios.post("/api/users/update", body)
.then(response => { console.log(response) });
}
// ^ post update user profile
// event handler to add candidacy
const addCandidate = (event) => {
if (event) {
event.preventDefault();
}
state.candidateData.candidate = true;
console.log("User is now a Candidate", state.candidateData.candidate)
handleUpdate();
props.reRender();
}
// event handler to delete user and unset user data
const handleDeleteUser = (event) => {
if (event) {
event.preventDefault();
}
// dispatch({ type: LOADING });
const body = { username: state.user };
console.log("User to be deleted", body);
axios.post('/api/users/delete', { username: state.user }).then((response) => console.log(response));
dispatch({ type: UNSET_USER });
history.replace('/login');
}
// return form with profile information in texfields
return (
<form className={classes.root} noValidate autoComplete="off">
<h3>Update Your Voter Info</h3>
< div id="form-block">
<TextField className="outlined-basic" onChange={(event) => { updateInfo(event) }} ref={userNameRef} placeholder={props.data.name} id="name" label="Name" variant="outlined" />
<br />
<TextField className="outlined-basic" onChange={(event) => { updateInfo(event) }} ref={cityRef} placeholder={props.data.city} id="city" label="City" variant="outlined" />
<br />
<TextField className="outlined-basic" onChange={(event) => { updateInfo(event) }} ref={countyRef} placeholder={props.data.county} id="county" label="County" variant="outlined" />
<br />
<TextField className="outlined-basic" onChange={(event) => { updateInfo(event) }} ref={stateRef} placeholder={props.data.state} id="state" label="State" variant="outlined" />
<br />
<TextField className="outlined-basic" onChange={(event) => { updateInfo(event) }} ref={countryRef} placeholder={props.data.country} id="country" label="Country" variant="outlined" />
<br />
</div>
{/* buttons to update voter info, update candidate info, and button to delete user and unset data */}
<button className="update-info-button" onClick={(event) => handleUpdate(event)}>Update Voter Info</button>
<br />
{!state.candidateData.candidate ? (<button onClick={(event) => addCandidate(event)} className="update-info-button">Add Candidacy</button>) : (<></>)}
{!state.candidateData.candidate ? (<br />) : <></>}
<button onClick={(event) => handleDeleteUser(event)} className="update-info-button">Delete Account</button>
</form>
);
} | VoterForm |
bit_or_impl.rs | use core::ops::{BitOr, BitOrAssign};
use crate::{private::uint::UInt, XUInt};
impl<T, const N: usize> BitOr for XUInt<T, N>
where
T: UInt,
{
type Output = Self;
fn bitor(mut self, rhs: Self) -> Self::Output {
for index in 0..N {
self.parts[index] |= rhs.parts[index];
}
self
}
}
impl<T, const N: usize> BitOrAssign for XUInt<T, N>
where
T: UInt,
{
fn | (&mut self, rhs: Self) {
for index in 0..N {
self.parts[index] |= rhs.parts[index];
}
}
}
| bitor_assign |
servicegroups.go | package servicefabric
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
// | // Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"net/http"
)
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/servicefabric/6.2/servicefabric instead
// ServiceGroupsClient is the client for the ServiceGroups methods of the Servicefabric service.
type ServiceGroupsClient struct {
BaseClient
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/servicefabric/6.2/servicefabric instead
// NewServiceGroupsClient creates an instance of the ServiceGroupsClient client.
func NewServiceGroupsClient(timeout *int32) ServiceGroupsClient {
return NewServiceGroupsClientWithBaseURI(DefaultBaseURI, timeout)
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/servicefabric/6.2/servicefabric instead
// NewServiceGroupsClientWithBaseURI creates an instance of the ServiceGroupsClient client.
func NewServiceGroupsClientWithBaseURI(baseURI string, timeout *int32) ServiceGroupsClient {
return ServiceGroupsClient{NewWithBaseURI(baseURI, timeout)}
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/servicefabric/6.2/servicefabric instead
// Create create service groups
// Parameters:
// applicationName - the name of the service group
// createServiceGroupDescription - the description of the service group
func (client ServiceGroupsClient) Create(ctx context.Context, applicationName string, createServiceGroupDescription BasicCreateServiceGroupDescription) (result String, err error) {
req, err := client.CreatePreparer(ctx, applicationName, createServiceGroupDescription)
if err != nil {
err = autorest.NewErrorWithError(err, "servicefabric.ServiceGroupsClient", "Create", nil, "Failure preparing request")
return
}
resp, err := client.CreateSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "servicefabric.ServiceGroupsClient", "Create", resp, "Failure sending request")
return
}
result, err = client.CreateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "servicefabric.ServiceGroupsClient", "Create", resp, "Failure responding to request")
}
return
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/servicefabric/6.2/servicefabric instead
// CreatePreparer prepares the Create request.
func (client ServiceGroupsClient) CreatePreparer(ctx context.Context, applicationName string, createServiceGroupDescription BasicCreateServiceGroupDescription) (*http.Request, error) {
pathParameters := map[string]interface{}{
"applicationName": applicationName,
}
const APIVersion = "1.0.0"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if client.Timeout != nil {
queryParameters["timeout"] = autorest.Encode("query", *client.Timeout)
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/Applications/{applicationName}/$/GetServices/$/CreateServiceGroup", pathParameters),
autorest.WithJSON(createServiceGroupDescription),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/servicefabric/6.2/servicefabric instead
// CreateSender sends the Create request. The method will close the
// http.Response Body if it receives an error.
func (client ServiceGroupsClient) CreateSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/servicefabric/6.2/servicefabric instead
// CreateResponder handles the response to the Create request. The method always
// closes the http.Response Body.
func (client ServiceGroupsClient) CreateResponder(resp *http.Response) (result String, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated, http.StatusAccepted),
autorest.ByUnmarshallingJSON(&result.Value),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/servicefabric/6.2/servicefabric instead
// Remove remove service groups
// Parameters:
// applicationName - the name of the application
// serviceName - the name of the service
func (client ServiceGroupsClient) Remove(ctx context.Context, applicationName string, serviceName string) (result String, err error) {
req, err := client.RemovePreparer(ctx, applicationName, serviceName)
if err != nil {
err = autorest.NewErrorWithError(err, "servicefabric.ServiceGroupsClient", "Remove", nil, "Failure preparing request")
return
}
resp, err := client.RemoveSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "servicefabric.ServiceGroupsClient", "Remove", resp, "Failure sending request")
return
}
result, err = client.RemoveResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "servicefabric.ServiceGroupsClient", "Remove", resp, "Failure responding to request")
}
return
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/servicefabric/6.2/servicefabric instead
// RemovePreparer prepares the Remove request.
func (client ServiceGroupsClient) RemovePreparer(ctx context.Context, applicationName string, serviceName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"applicationName": applicationName,
"serviceName": serviceName,
}
const APIVersion = "1.0.0"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if client.Timeout != nil {
queryParameters["timeout"] = autorest.Encode("query", *client.Timeout)
}
preparer := autorest.CreatePreparer(
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/Applications/{applicationName}/$/GetServiceGroups/{serviceName}/$/Delete", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/servicefabric/6.2/servicefabric instead
// RemoveSender sends the Remove request. The method will close the
// http.Response Body if it receives an error.
func (client ServiceGroupsClient) RemoveSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/servicefabric/6.2/servicefabric instead
// RemoveResponder handles the response to the Remove request. The method always
// closes the http.Response Body.
func (client ServiceGroupsClient) RemoveResponder(resp *http.Response) (result String, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result.Value),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/servicefabric/6.2/servicefabric instead
// Update update service groups
// Parameters:
// applicationName - the name of the application
// serviceName - the name of the service
// updateServiceGroupDescription - the description of the service group update
func (client ServiceGroupsClient) Update(ctx context.Context, applicationName string, serviceName string, updateServiceGroupDescription BasicUpdateServiceGroupDescription) (result String, err error) {
req, err := client.UpdatePreparer(ctx, applicationName, serviceName, updateServiceGroupDescription)
if err != nil {
err = autorest.NewErrorWithError(err, "servicefabric.ServiceGroupsClient", "Update", nil, "Failure preparing request")
return
}
resp, err := client.UpdateSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "servicefabric.ServiceGroupsClient", "Update", resp, "Failure sending request")
return
}
result, err = client.UpdateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "servicefabric.ServiceGroupsClient", "Update", resp, "Failure responding to request")
}
return
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/servicefabric/6.2/servicefabric instead
// UpdatePreparer prepares the Update request.
func (client ServiceGroupsClient) UpdatePreparer(ctx context.Context, applicationName string, serviceName string, updateServiceGroupDescription BasicUpdateServiceGroupDescription) (*http.Request, error) {
pathParameters := map[string]interface{}{
"applicationName": applicationName,
"serviceName": serviceName,
}
const APIVersion = "1.0.0"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
if client.Timeout != nil {
queryParameters["timeout"] = autorest.Encode("query", *client.Timeout)
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/Applications/{applicationName}/$/GetServices/{serviceName}/$/UpdateServiceGroup", pathParameters),
autorest.WithJSON(updateServiceGroupDescription),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/servicefabric/6.2/servicefabric instead
// UpdateSender sends the Update request. The method will close the
// http.Response Body if it receives an error.
func (client ServiceGroupsClient) UpdateSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// Deprecated: Please use package github.com/Azure/azure-sdk-for-go/services/servicefabric/6.2/servicefabric instead
// UpdateResponder handles the response to the Update request. The method always
// closes the http.Response Body.
func (client ServiceGroupsClient) UpdateResponder(resp *http.Response) (result String, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result.Value),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
} | |
mouse_coverage.rs | use crate::{element::ElementId, prelude::UiMessage};
use bracket_lib::prelude::{BTerm, Rect};
use std::collections::VecDeque;
pub struct MouseCoverage {
tiles: Vec<Option<ElementId>>,
width: i32,
}
impl MouseCoverage {
pub fn new(w: u32, h: u32) -> Self {
let sz = (w * h) as usize;
let tiles = vec![None; sz];
Self {
tiles,
width: w as i32,
}
}
pub fn push(&mut self, id: ElementId, area: Rect) {
area.for_each(|p| {
let idx = (p.y * self.width) + p.x;
self.tiles[idx as usize] = Some(id);
});
}
pub(crate) fn | (&self, ctx: &BTerm, mailbox: &mut VecDeque<UiMessage>) {
let mouse_pos = ctx.mouse_pos();
let idx = ((mouse_pos.1 * self.width) + mouse_pos.0) as usize;
if let Some(id) = self.tiles[idx] {
if ctx.left_click {
mailbox.push_front(UiMessage::MouseClick(id));
}
}
}
}
| message_pump |
directives.py | # -*- coding: utf-8 -*-
"""
sphinxjp.themes.revealjs.directives
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:author: tell-k <[email protected]>
:copyright: tell-k. All Rights Reserved.
"""
from docutils import nodes
from docutils.parsers.rst import directives
from docutils.parsers.rst.roles import set_classes
from docutils.parsers.rst import Directive
from . import compat
__docformat__ = 'reStructuredText'
class revealjs(nodes.General, nodes.Element):
""" node for revealjs """
class rv_code(nodes.General, nodes.Element):
""" node for revealjs code section """
class rv_small(nodes.General, nodes.Element):
""" node for revealjs small text section """
class rv_note(nodes.General, nodes.Element):
""" node for revealjs presentation note """
def heading(argument):
""" directives choices for heading tag """
return directives.choice(argument, ('h1', 'h2', 'h3', 'h4', 'h5', 'h6'))
class RevealjsDirective(Directive):
""" Reveal.JS slide entry """
has_content = True
required_arguments = 0
optional_arguments = 100
final_argument_whitespace = False
option_spec = {
'id': directives.unchanged,
'class': directives.class_option,
'noheading': directives.flag,
'title-heading': heading,
'subtitle': directives.unchanged,
'subtitle-heading': directives.unchanged,
'data-autoslide': directives.unchanged,
'data-markdown': directives.unchanged,
'data-transition': directives.unchanged,
'data-transition-speed': directives.unchanged,
'data-background': directives.unchanged,
'data-background-repeat': directives.unchanged,
'data-background-size': directives.unchanged,
'data-background-transition': directives.unchanged,
'data-state': directives.unchanged,
'data-separator': directives.unchanged,
'data-separator-vertical': directives.unchanged,
'data-separator-notes': directives.unchanged,
'data-charset': directives.unchanged,
}
node_class = revealjs
def run(self):
""" build revealjs node """
set_classes(self.options)
text = '\n'.join(self.content)
node = self.node_class(text, **self.options)
self.add_name(node)
if "data-markdown" not in self.options:
self.state.nested_parse(self.content, self.content_offset, node)
if self.arguments:
node['title'] = " ".join(self.arguments)
node['noheading'] = ('noheading' in self.options)
options_list = (
'id',
'title-heading',
'subtitle-heading',
'data-autoslide',
'data-transition',
'data-transition-speed',
'data-background',
'data-background-repeat',
'data-background-size',
'data-background-transition',
'data-state',
'data-markdown',
'data-separator',
'data-separator-vertical',
'data-separator-notes',
'data-charset',
)
for option in options_list:
if option in self.options:
node[option] = self.options.get(option)
return [node]
class RvSmallDirective(Directive):
"""
Create small text tag.
"""
has_content = True
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = False
option_spec = {
'class': directives.class_option,
}
node_class = rv_small
def run(self):
""" build rv_small node """
set_classes(self.options)
self.assert_has_content()
text = '\n'.join(self.content)
node = self.node_class(text, **self.options)
self.add_name(node)
self.state.nested_parse(self.content, self.content_offset, node)
return [node]
class RvNoteDirective(Directive):
"""
Directive for a notes tag.
"""
has_content = True
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = False
option_spec = {
'class': directives.class_option,
}
node_class = rv_note
def run(self):
""" build rv_note node """
set_classes(self.options)
self.assert_has_content()
text = '\n'.join(self.content)
node = self.node_class(text, **self.options)
self.add_name(node)
self.state.nested_parse(self.content, self.content_offset, node)
return [node]
class RvCodeDirective(Directive):
"""
Directive for a code block with highlight.js
"""
has_content = True
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = False
option_spec = {
'id': directives.unchanged,
'class': directives.class_option,
}
node_class = rv_code
def run(self):
""" build rv_code node """
set_classes(self.options)
self.assert_has_content()
node = self.node_class('\n'.join(self.content), **self.options)
return [node]
def visit_revealjs(self, node):
""" build start tag for revealjs """
section_attr = {}
markdown_headings = {"h1": "#", "h2": "##", "h3": "###",
"h4": "####", "h5": "#####", "h6": "######"}
if node.get("id"):
section_attr.update({"ids": [node.get("id")]})
attr_list = (
'data-autoslide',
'data-transition',
'data-transition-speed',
'data-background',
'data-background-repeat',
'data-background-size',
'data-background-transition',
'data-state',
'data-markdown',
'data-separator',
'data-separator-vertical',
'data-separator-notes',
'data-charset',
)
for attr in attr_list:
if node.get(attr) is not None:
section_attr.update({attr: node.get(attr)})
title = None
if node.get("title") and not node.get('noheading'):
title = node.get("title")
title_heading = node.get('title-heading', 'h2')
subtitle = node.get("subtitle")
subtitle_heading = node.get('subtitle-heading', 'h3')
if node.get("data-markdown") is not None:
title_base = compat.text("%(heading)s %(title)s \n")
title_text = None
if title:
title_text = title_base % dict(
heading=markdown_headings.get(title_heading),
title=title
)
subtitle_text = None
if subtitle:
subtitle_text = title_base % dict(
heading=markdown_headings.get(subtitle_heading),
title=subtitle
)
else:
title_base = compat.text("<%(heading)s>%(title)s</%(heading)s>\n")
title_text = None
if title:
title_text = title_base % dict(
title=title,
heading=title_heading)
subtitle_text = None
if subtitle:
subtitle_text = title_base % dict(
title=subtitle,
heading=subtitle_heading)
if node.get("data-markdown") is not None:
self.body.append(self.starttag(node, 'section', **section_attr))
if node.get("data-markdown") == compat.text(""):
self.body.append("<script type='text/template'>\n")
if title_text:
self.body.append(title_text)
if subtitle_text:
self.body.append(subtitle_text)
self.body.append(node.rawsource)
self.body.append("</script>\n")
else:
self.body.append(self.starttag(node, 'section', **section_attr))
if title_text:
self.body.append(title_text) | self.body.append(subtitle_text)
self.set_first_last(node)
def depart_revealjs(self, node=None):
""" build end tag for revealjs """
self.body.append('</section>\n')
def visit_rv_code(self, node):
""" build start tag for rv_code """
self.body.append(self.starttag(node, 'pre'))
self.body.append("<code data-trim contenteditable>")
self.body.append(compat.escape_html(node.rawsource))
def depart_rv_code(self, node=None):
""" build end tag for rv_code """
self.body.append("</code>")
self.body.append("</pre>\n")
def visit_rv_small(self, node):
""" build start tag for rv_small """
self.body.append(self.starttag(node, 'small'))
self.set_first_last(node)
def depart_rv_small(self, node=None):
""" build end tag for rv_small"""
self.body.append("</small>\n")
def visit_rv_note(self, node):
""" build start tag for rv_note """
self.body.append(self.starttag(node, 'aside', **{'class': 'notes'}))
self.set_first_last(node)
def depart_rv_note(self, node=None):
""" build end tag for rv_note """
self.body.append("</aside>\n")
def setup(app):
"""Initialize """
app.add_node(revealjs, html=(visit_revealjs, depart_revealjs))
app.add_node(rv_code, html=(visit_rv_code, depart_rv_code))
app.add_node(rv_note, html=(visit_rv_note, depart_rv_note))
app.add_node(rv_small, html=(visit_rv_small, depart_rv_small))
app.add_directive('revealjs', RevealjsDirective)
app.add_directive('rv_code', RvCodeDirective)
app.add_directive('rv_note', RvNoteDirective)
app.add_directive('rv_small', RvSmallDirective)
return app | if subtitle_text: |
cookies-private.rs | #![cfg(feature = "secrets")]
#![deny(warnings)]
use rocket::http::{Cookie, CookieJar, SameSite};
use rocket::{get, post, routes};
#[post("/")]
fn cookie_add_private(jar: &CookieJar<'_>) {
let mut cookie_a = Cookie::new("a", "v1");
jar.add(cookie_a.clone());
let mut cookie_b = Cookie::new("b", "v2");
jar.add_private(cookie_b.clone());
jar.add(Cookie::new("c", "v3"));
// private: CookieJar::set_defaults(&mut cookie_a);
cookie_a.set_path("/");
cookie_a.set_same_site(SameSite::Strict);
assert_eq!(jar.get_pending(cookie_a.name()), Some(cookie_a));
// private: CookieJar::set_private_defaults(&mut cookie_b);
cookie_b.set_path("/");
cookie_b.set_same_site(SameSite::Strict);
cookie_b.set_http_only(true);
let expires = time::OffsetDateTime::now_utc() + time::Duration::weeks(1);
cookie_b.set_expires(expires);
let mut cookie_b_pending = jar
.get_pending(cookie_b.name())
.expect("cookie_b_pending None");
cookie_b_pending.set_expires(expires);
assert_eq!(cookie_b_pending, cookie_b);
}
#[get("/")]
fn cookie_get_private(jar: &CookieJar<'_>) -> String {
let (a, b, c) = (jar.get("a"), jar.get_private("b"), jar.get("c"));
assert_ne!(a, b.as_ref());
assert_ne!(a, c);
assert_ne!(b.as_ref(), c);
format!(
"{}{}{}",
a.unwrap().value(),
b.unwrap().value(),
c.unwrap().value()
)
}
/// For test if we got really a private cookie
#[get("/oh-no")]
fn cookie_get(jar: &CookieJar<'_>) -> String {
let (a, b, c) = (jar.get("a"), jar.get("b"), jar.get("c"));
format!(
"{}{}{}",
a.unwrap().value(),
b.unwrap().value(),
c.unwrap().value()
)
}
#[cfg(test)]
mod cookies_private_tests {
use super::*;
use rocket::local::blocking::Client;
use rocket::{Build, Rocket};
fn rocket() -> Rocket<Build> {
rocket::build().mount(
"/",
routes![cookie_add_private, cookie_get, cookie_get_private],
)
}
#[test]
fn test_cookie_add_private() {
let client = Client::debug(rocket()).unwrap();
let response = client.post("/").dispatch();
let cookies = response.cookies();
assert_eq!(cookies.iter().count(), 3);
assert_eq!(cookies.get("a").unwrap().value(), "v1");
assert_eq!(cookies.get_private("b").unwrap().value(), "v2");
assert_ne!(cookies.get("b").unwrap().value(), "v2");
assert_eq!(cookies.get("c").unwrap().value(), "v3");
}
#[test]
fn test_cookie_get_private() |
/// Test if we got really a private cookie
#[test]
fn test_cookie_get_ohno() {
let client = Client::debug(rocket()).unwrap();
let response = client
.get("/oh-no")
.cookie(Cookie::new("a", "Cookie"))
.private_cookie(Cookie::new("b", " tastes "))
.cookie(Cookie::new("c", "good!"))
.dispatch();
assert_ne!(response.into_string().unwrap(), "Cookie tastes good!");
}
}
| {
let client = Client::debug(rocket()).unwrap();
let response = client
.get("/")
.cookie(Cookie::new("a", "Cookie"))
.private_cookie(Cookie::new("b", " tastes "))
.cookie(Cookie::new("c", "good!"))
.dispatch();
assert_eq!(response.into_string().unwrap(), "Cookie tastes good!");
} |
vec2.rs | use super::{
err_event_too_large, Batch, BatchConfig, BatchError, BatchSettings, BatchSize, PushResult,
};
pub trait Length {
fn len(&self) -> usize;
fn is_empty(&self) -> bool {
self.len() == 0
}
}
#[derive(Clone)]
pub struct VecBuffer2<T> {
batch: Vec<T>,
bytes: usize,
settings: BatchSize<Self>,
}
impl<T> VecBuffer2<T> {
pub fn new(settings: BatchSize<Self>) -> Self {
Self::new_with_settings(settings)
}
| batch: Vec::with_capacity(settings.events),
bytes: 0,
settings,
}
}
}
impl<T: Length> Batch for VecBuffer2<T> {
type Input = T;
type Output = Vec<T>;
fn get_settings_defaults(
config: BatchConfig,
defaults: BatchSettings<Self>,
) -> Result<BatchSettings<Self>, BatchError> {
Ok(config
.use_size_as_events()?
.get_settings_or_default(defaults))
}
fn push(&mut self, item: Self::Input) -> PushResult<Self::Input> {
let new_bytes = self.bytes + item.len();
if self.is_empty() && item.len() > self.settings.bytes {
err_event_too_large(item.len())
} else if self.batch.len() >= self.settings.events || new_bytes > self.settings.bytes {
PushResult::Overflow(item)
} else {
self.batch.push(item);
self.bytes = new_bytes;
PushResult::Ok(
self.batch.len() >= self.settings.events || new_bytes >= self.settings.bytes,
)
}
}
fn is_empty(&self) -> bool {
self.batch.is_empty()
}
fn fresh(&self) -> Self {
Self::new_with_settings(self.settings)
}
fn finish(self) -> Self::Output {
self.batch
}
fn num_items(&self) -> usize {
self.batch.len()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::sinks::util::BatchSettings;
impl Length for String {
fn len(&self) -> usize {
self.len() + 1
}
}
#[test]
fn obeys_max_events() {
let settings = BatchSettings::default().events(2).size;
let mut buffer = VecBuffer2::new(settings);
let data = "dummy".to_string();
assert_eq!(buffer.is_empty(), true);
assert_eq!(buffer.num_items(), 0);
assert_eq!(buffer.push(data.clone()), PushResult::Ok(false));
assert_eq!(buffer.is_empty(), false);
assert_eq!(buffer.num_items(), 1);
assert_eq!(buffer.push(data.clone()), PushResult::Ok(true));
assert_eq!(buffer.is_empty(), false);
assert_eq!(buffer.num_items(), 2);
assert_eq!(buffer.push(data.clone()), PushResult::Overflow(data));
assert_eq!(buffer.is_empty(), false);
assert_eq!(buffer.num_items(), 2);
assert_eq!(buffer.finish().len(), 2);
}
#[test]
fn obeys_max_bytes() {
let settings = BatchSettings::default().events(99).bytes(22).size;
let mut buffer = VecBuffer2::new(settings);
let data = "some bytes".to_string();
assert_eq!(buffer.is_empty(), true);
assert_eq!(buffer.num_items(), 0);
assert_eq!(
buffer.push("this record is just too long to be inserted".into()),
PushResult::Ok(false)
);
assert_eq!(buffer.is_empty(), true);
assert_eq!(buffer.num_items(), 0);
assert_eq!(buffer.push(data.clone()), PushResult::Ok(false));
assert_eq!(buffer.is_empty(), false);
assert_eq!(buffer.num_items(), 1);
assert_eq!(buffer.push(data.clone()), PushResult::Ok(true));
assert_eq!(buffer.is_empty(), false);
assert_eq!(buffer.num_items(), 2);
assert_eq!(buffer.push(data.clone()), PushResult::Overflow(data));
assert_eq!(buffer.is_empty(), false);
assert_eq!(buffer.num_items(), 2);
assert_eq!(buffer.finish().len(), 2);
}
} | fn new_with_settings(settings: BatchSize<Self>) -> Self {
Self { |
model_initial_update_webhook.go | /*
* The Plaid API
*
* The Plaid REST API. Please see https://plaid.com/docs/api for more details.
*
* API version: 2020-09-14_1.44.0
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package plaid
import (
"encoding/json"
)
// InitialUpdateWebhook Fired when an Item's initial transaction pull is completed. Once this webhook has been fired, transaction data for the most recent 30 days can be fetched for the Item. If [Account Select v2](https://plaid.com/docs/link/customization/#account-select) is enabled, this webhook will also be fired if account selections for the Item are updated, with `num_transactions` set to the number of net new transactions pulled after the account selection update.
type InitialUpdateWebhook struct {
// `TRANSACTIONS`
WebhookType string `json:"webhook_type"`
// `INITIAL_UPDATE`
WebhookCode string `json:"webhook_code"`
// The error code associated with the webhook.
Error NullableString `json:"error,omitempty"`
// The number of new, unfetched transactions available.
NewTransactions float32 `json:"new_transactions"`
// The `item_id` of the Item associated with this webhook, warning, or error
ItemId string `json:"item_id"`
AdditionalProperties map[string]interface{}
}
type _InitialUpdateWebhook InitialUpdateWebhook
// NewInitialUpdateWebhook instantiates a new InitialUpdateWebhook object
// This constructor will assign default values to properties that have it defined,
// and makes sure properties required by API are set, but the set of arguments
// will change when the set of required properties is changed
func NewInitialUpdateWebhook(webhookType string, webhookCode string, newTransactions float32, itemId string) *InitialUpdateWebhook {
this := InitialUpdateWebhook{}
this.WebhookType = webhookType
this.WebhookCode = webhookCode
this.NewTransactions = newTransactions
this.ItemId = itemId
return &this
}
// NewInitialUpdateWebhookWithDefaults instantiates a new InitialUpdateWebhook object
// This constructor will only assign default values to properties that have it defined,
// but it doesn't guarantee that properties required by API are set
func NewInitialUpdateWebhookWithDefaults() *InitialUpdateWebhook {
this := InitialUpdateWebhook{}
return &this
}
// GetWebhookType returns the WebhookType field value
func (o *InitialUpdateWebhook) GetWebhookType() string {
if o == nil {
var ret string
return ret
}
return o.WebhookType
}
// GetWebhookTypeOk returns a tuple with the WebhookType field value
// and a boolean to check if the value has been set.
func (o *InitialUpdateWebhook) GetWebhookTypeOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.WebhookType, true
}
// SetWebhookType sets field value
func (o *InitialUpdateWebhook) SetWebhookType(v string) {
o.WebhookType = v
}
// GetWebhookCode returns the WebhookCode field value
func (o *InitialUpdateWebhook) GetWebhookCode() string {
if o == nil {
var ret string
return ret
}
return o.WebhookCode
}
// GetWebhookCodeOk returns a tuple with the WebhookCode field value
// and a boolean to check if the value has been set.
func (o *InitialUpdateWebhook) GetWebhookCodeOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.WebhookCode, true
}
// SetWebhookCode sets field value
func (o *InitialUpdateWebhook) SetWebhookCode(v string) {
o.WebhookCode = v
}
// GetError returns the Error field value if set, zero value otherwise (both if not set or set to explicit null).
func (o *InitialUpdateWebhook) GetError() string {
if o == nil || o.Error.Get() == nil {
var ret string
return ret
}
return *o.Error.Get()
}
// GetErrorOk returns a tuple with the Error field value if set, nil otherwise
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *InitialUpdateWebhook) GetErrorOk() (*string, bool) {
if o == nil {
return nil, false
}
return o.Error.Get(), o.Error.IsSet()
}
// HasError returns a boolean if a field has been set.
func (o *InitialUpdateWebhook) HasError() bool {
if o != nil && o.Error.IsSet() {
return true
}
return false
}
// SetError gets a reference to the given NullableString and assigns it to the Error field.
func (o *InitialUpdateWebhook) SetError(v string) {
o.Error.Set(&v)
}
// SetErrorNil sets the value for Error to be an explicit nil
func (o *InitialUpdateWebhook) SetErrorNil() {
o.Error.Set(nil)
}
// UnsetError ensures that no value is present for Error, not even an explicit nil
func (o *InitialUpdateWebhook) UnsetError() {
o.Error.Unset()
}
// GetNewTransactions returns the NewTransactions field value
func (o *InitialUpdateWebhook) GetNewTransactions() float32 {
if o == nil {
var ret float32
return ret
}
return o.NewTransactions
}
// GetNewTransactionsOk returns a tuple with the NewTransactions field value
// and a boolean to check if the value has been set.
func (o *InitialUpdateWebhook) GetNewTransactionsOk() (*float32, bool) {
if o == nil {
return nil, false
}
return &o.NewTransactions, true
}
// SetNewTransactions sets field value
func (o *InitialUpdateWebhook) SetNewTransactions(v float32) {
o.NewTransactions = v
}
// GetItemId returns the ItemId field value
func (o *InitialUpdateWebhook) GetItemId() string {
if o == nil {
var ret string
return ret
}
return o.ItemId
}
// GetItemIdOk returns a tuple with the ItemId field value
// and a boolean to check if the value has been set.
func (o *InitialUpdateWebhook) GetItemIdOk() (*string, bool) {
if o == nil {
return nil, false
}
return &o.ItemId, true
}
// SetItemId sets field value
func (o *InitialUpdateWebhook) SetItemId(v string) {
o.ItemId = v
}
func (o InitialUpdateWebhook) MarshalJSON() ([]byte, error) {
toSerialize := map[string]interface{}{}
if true {
toSerialize["webhook_type"] = o.WebhookType
}
if true {
toSerialize["webhook_code"] = o.WebhookCode
}
if o.Error.IsSet() {
toSerialize["error"] = o.Error.Get()
}
if true {
toSerialize["new_transactions"] = o.NewTransactions
}
if true {
toSerialize["item_id"] = o.ItemId
}
for key, value := range o.AdditionalProperties {
toSerialize[key] = value
}
return json.Marshal(toSerialize)
}
func (o *InitialUpdateWebhook) UnmarshalJSON(bytes []byte) (err error) {
varInitialUpdateWebhook := _InitialUpdateWebhook{}
if err = json.Unmarshal(bytes, &varInitialUpdateWebhook); err == nil {
*o = InitialUpdateWebhook(varInitialUpdateWebhook)
}
additionalProperties := make(map[string]interface{})
if err = json.Unmarshal(bytes, &additionalProperties); err == nil {
delete(additionalProperties, "webhook_type")
delete(additionalProperties, "webhook_code")
delete(additionalProperties, "error")
delete(additionalProperties, "new_transactions")
delete(additionalProperties, "item_id")
o.AdditionalProperties = additionalProperties
}
return err
}
type NullableInitialUpdateWebhook struct {
value *InitialUpdateWebhook
isSet bool
}
func (v NullableInitialUpdateWebhook) Get() *InitialUpdateWebhook {
return v.value
}
func (v *NullableInitialUpdateWebhook) Set(val *InitialUpdateWebhook) {
v.value = val
v.isSet = true
}
func (v NullableInitialUpdateWebhook) IsSet() bool {
return v.isSet
}
func (v *NullableInitialUpdateWebhook) Unset() {
v.value = nil
v.isSet = false
}
func | (val *InitialUpdateWebhook) *NullableInitialUpdateWebhook {
return &NullableInitialUpdateWebhook{value: val, isSet: true}
}
func (v NullableInitialUpdateWebhook) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableInitialUpdateWebhook) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}
| NewNullableInitialUpdateWebhook |
custom-scripts.min.js | !function($){"use strict";$.fn.andSelf=function(){return this.addBack.apply(this,arguments)},$(window).on("load",(function(){$(".section-loader").fadeOut("slow"),$(".portfolio-nav li").click((function(){$(".portfolio-nav .current").removeClass("current"),$(this).addClass("current");var selector=$(this).attr("data-filter");return $container.isotope({filter:selector,animationOptions:{queue:!0}}),!1})),$(".current_year").text((new Date).getFullYear());var swiper=new Swiper(".swiper-container",{preloadImages:!1,lazy:!0,loop:!0,loopedSlides:3,slidesPerView:"auto",centeredSlides:!0,spaceBetween:30,pagination:{el:".swiper-pagination"},navigation:{nextEl:".swiper-button-next",prevEl:".swiper-button-prev"}})}));var trigger=$(".navbar-toggler"),overlay=$(".overlay"),navc=$(".navbar-collapse"),active=!1,wow;function submitForm(){var name,last_name,email,message,postData={first_name:$("#name").val(),last_name:$("#L_name").val(),emailAddress:$("#email").val(),message:$("#message").val()};$.ajax({type:"POST",url:"https://james.prod.with-datafire.io/contact",data:postData,success:function(text){"success"==text?formSuccess():(formError(),submitMSG(!1,text))}})}function formSuccess(){$("#contactForm")[0].reset(),submitMSG(!0,"Message Sent!")}function | (){$("#contactForm").removeClass().addClass("shake animated").one("webkitAnimationEnd mozAnimationEnd MSAnimationEnd oanimationend animationend",(function(){$(this).removeClass()}))}function submitMSG(valid,msg){var msgClasses="";msgClasses=valid?"h3 text-center fadeInUp animated text-success":"h3 text-center shake animated text-danger",$("#msgSubmit").removeClass().addClass(msgClasses).text(msg)}$(".navbar-toggler, .navbar-nav li a, .overlay").on("click",(function(){$(".navbar-toggler").toggleClass("active"),overlay.toggleClass("active"),navc.toggleClass("active")})),$("#mh-header").onePageNav({currentClass:"active",changeHash:!1,scrollSpeed:750,scrollThreshold:.5}),$("[data-fancybox]").fancybox({}),new WOW({mobile:!1}).init(),$(window).on("scroll",(function(){var scroll;$(window).scrollTop()>=50?$(".nav-scroll").addClass("nav-strict"):$(".nav-scroll").removeClass("nav-strict")})),$(".determinate").each((function(){var width=$(this).text();$(this).css("width",width).empty().append('<i class="fa fa-circle"></i>')})),$("#portfolio-item").mixItUp(),$((function(){$("a[href*=\\#]:not([href=\\#])").click((function(){if(location.pathname.replace(/^\//,"")==this.pathname.replace(/^\//,"")&&location.hostname==this.hostname){var target=$(this.hash);if((target=target.length?target:$("[name="+this.hash.slice(1)+"]")).length)return $("html,body").animate({scrollTop:target.offset().top},600),!1}}))})),$("#contactForm").validator().on("submit",(function(event){event.isDefaultPrevented()?(formError(),submitMSG(!1,"Did you fill in the form properly?")):(event.preventDefault(),submitForm())}))}(jQuery); | formError |
option.py | class Option:
def __init__(self, option_info):
self.option_info = option_info
self.flag = option_info['flag']
def mkdir(self):
if self.flag == False:
return False
return self.option_info['mkdir']
def dir_name(self, problem):
|
def source_name(self, problem):
if self.flag == False:
return problem['problem_id']
return self.replace_info(self.option_info['source_name'], problem)
def replace_name(self, value, problem):
value = value.replace('[NO]', problem['problem_id'])
value = value.replace('[TITLE]', problem['problem_title'])
return value
def get_ext(self, language):
extensions = {
'C': '.c',
'C++': '.cpp',
'C++11': '.cpp',
'C++14': '.cpp',
'C++17': '.cpp',
'Java': '.java',
'Java (OpenJDK)': '.java',
'C11': '.c',
'Python 2': '.py',
'Python 3': '.py',
'PyPy2': '.py',
'PyPy3': '.py',
'Ruby2.5': '.rb',
'Kotlin': '.kt',
'Swift': '.swift',
'C# 6.0': '.cs',
'Text': '.txt',
'node.js': 'js',
'Go': '.go',
'F#': '.fs',
'PHP': '.php',
'Pascal': '.pas',
'Lua': '.lua',
'Perl': '.pl',
'Objective-C': '.m',
'Objective-C++': '.mm',
'C (Clang)': '.c',
'C++11 (Clang)': '.cpp',
'C++14 (Clang)': '.cpp',
'C++17 (Clang)': '.cpp',
'Golfscript': '.gs',
'Bash': '.sh',
'Fortran': '.f95',
'Scheme': '.scm',
'Ada': '.ada',
'awk': '.awk',
'OCaml': '.ml',
'Brainfuck': '.bf',
'Whitespace': '.ws',
'Tcl': '.tcl',
'Assembly (32bit)': '.asm',
'Assembly (32bit)': '.asm',
'D': '.d',
'Clojure': '.clj',
'Rhino': '.js',
'Cobol': '.cob',
'SpiderMonkey': '.js',
'Pike': '.pike',
'sed': '.sed',
'Rust': '.rs',
'Boo': '.boo',
'Intercal': '.i',
'bc': '.bc',
'Nemerle': '.n',
'Cobra': '.cobra',
'Algol 68': '.a68',
'Befunge': '.bf',
'Haxe': '.hx',
'LOLCODE': '.lol',
'VB.NET 4.0': '.vb',
'아희': '.aheui'
}
if not language in extensions:
return True, 'Unknown extension'
return False, extensions[language]
| if self.flag == False:
return ''
if not self.mkdir():
return ''
return self.replace_name(self.option_info['dir_name'], problem) + '/' |
renderer_color_test.go | package renderer
import (
"os"
"testing"
"github.com/stretchr/testify/assert"
)
func TestMain(m *testing.M) {
code := m.Run()
os.Exit(code)
}
func TestColorRenderTitle(t *testing.T) {
renderer := New(true)
expected := "\x1b[1;37mTitle\x1b[0m\n"
actual := renderer.RenderTitle("Title")
assert.Equal(t, expected, actual)
}
func TestColorRenderDescription(t *testing.T) {
renderer := New(true)
expected := "\x1b[37mDescription\x1b[0m\n"
actual := renderer.RenderDescription("Description")
assert.Equal(t, expected, actual)
}
func TestColorRenderPlatform(t *testing.T) {
renderer := New(true)
expected := "\x1b[90mPlatform\x1b[0m\n"
actual := renderer.RenderPlatform("Platform")
assert.Equal(t, expected, actual)
}
func TestColorRenderExample(t *testing.T) {
renderer := New(true)
expected := "\x1b[32mExample\x1b[0m\n"
actual := renderer.RenderExample("Example")
assert.Equal(t, expected, actual)
}
func TestColorRenderSyntax(t *testing.T) {
renderer := New(true)
expected := " \x1b[34mSyntax \x1b[0m\x1b[37mexample\x1b[0m\x1b[34m\x1b[0m\n"
actual := renderer.RenderSyntax("Syntax {{example}}")
assert.Equal(t, expected, actual)
}
func | (t *testing.T) {
tests := []struct {
input string
expected string
}{
{"Syntax {{example}}", " \x1b[34mSyntax \x1b[0m\x1b[37mexample\x1b[0m\x1b[34m\x1b[0m\n"},
{"Syntax {{example}}{{2}}", " \x1b[34mSyntax \x1b[0m\x1b[37mexample2\x1b[0m\x1b[34m\x1b[0m\n"},
{"Empty {{}}", " \x1b[34mEmpty \x1b[0m\n"},
}
r := New(true)
for _, test := range tests {
assert.Equal(t, test.expected, r.formatSyntaxLine(test.input))
}
}
| TestFormatSyntaxLine |
source_repositories.go | package pipelinescheduler
import (
"context"
"fmt"
"net/url"
"reflect"
"strings"
jenkinsio "github.com/jenkins-x/jx-api/v4/pkg/apis/jenkins.io"
v1 "github.com/jenkins-x/jx-api/v4/pkg/apis/jenkins.io/v1"
"github.com/jenkins-x/jx-api/v4/pkg/client/clientset/versioned"
"github.com/jenkins-x/jx-helpers/v3/pkg/kube/naming"
"github.com/jenkins-x/jx-helpers/v3/pkg/stringhelpers"
"github.com/pkg/errors"
apierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
// GetRepositoryGitURL returns the git repository clone URL
func GetRepositoryGitURL(s *v1.SourceRepository) (string, error) {
spec := s.Spec
provider := spec.Provider
owner := spec.Org
repo := spec.Repo
if spec.HTTPCloneURL == "" {
if spec.ProviderKind == "bitbucketserver" {
provider = stringhelpers.UrlJoin(provider, "scm")
}
if provider == "" {
return spec.HTTPCloneURL, fmt.Errorf("missing provider in SourceRepository %s", s.Name)
}
if owner == "" {
return spec.HTTPCloneURL, fmt.Errorf("missing org in SourceRepository %s", s.Name)
}
if repo == "" {
return spec.HTTPCloneURL, fmt.Errorf("missing repo in SourceRepository %s", s.Name)
}
spec.HTTPCloneURL = stringhelpers.UrlJoin(provider, owner, repo) + ".git"
}
return spec.HTTPCloneURL, nil
}
// FindSourceRepositoryWithoutProvider returns a SourceRepository for the given namespace, owner and repo name.
// If no SourceRepository is found, return nil.
func FindSourceRepositoryWithoutProvider(jxClient versioned.Interface, ns, owner, name string) (*v1.SourceRepository, error) {
return FindSourceRepository(jxClient, ns, owner, name, "")
}
// findSourceRepositoryByLabels returns a SourceRepository matching the given label selector, if it exists.
func findSourceRepositoryByLabels(jxClient versioned.Interface, ns, labelSelector string) (*v1.SourceRepository, error) {
repos, err := jxClient.JenkinsV1().SourceRepositories(ns).List(context.TODO(), metav1.ListOptions{
LabelSelector: labelSelector,
})
if err != nil {
return nil, errors.Wrapf(err, "listing SourceRepositorys matching label selector %s in namespace %s", labelSelector, ns)
}
if repos != nil && len(repos.Items) == 1 {
return &repos.Items[0], nil
}
return nil, nil
}
// FindSourceRepository returns a SourceRepository for the given namespace, owner, repo name, and (optional) provider name.
// If no SourceRepository is found, return nil.
func | (jxClient versioned.Interface, ns, owner, name, providerName string) (*v1.SourceRepository, error) {
// Look up by resource name is retained for compatibility with SourceRepositorys created before they were always created with labels
resourceName := naming.ToValidName(owner + "-" + name)
repo, err := jxClient.JenkinsV1().SourceRepositories(ns).Get(context.TODO(), resourceName, metav1.GetOptions{})
if err != nil {
if apierrors.IsNotFound(err) {
labelSelector := fmt.Sprintf("%s=%s,%s=%s", v1.LabelOwner, owner, v1.LabelRepository, name)
if providerName != "" {
labelSelector += fmt.Sprintf(",%s=%s", v1.LabelProvider, providerName)
}
return findSourceRepositoryByLabels(jxClient, ns, labelSelector)
}
return nil, errors.Wrapf(err, "getting SourceRepository %s in namespace %s", resourceName, ns)
}
return repo, nil
}
// GetOrCreateSourceRepositoryCallback gets or creates the SourceRepository for the given repository name and
// organisation invoking the given callback to modify the resource before create/udpate
func GetOrCreateSourceRepositoryCallback(jxClient versioned.Interface, ns, name, organisation, providerURL string, callback func(*v1.SourceRepository)) (*v1.SourceRepository, error) {
resourceName := naming.ToValidName(organisation + "-" + name)
repositories := jxClient.JenkinsV1().SourceRepositories(ns)
providerName := ToProviderName(providerURL)
foundSr, err := FindSourceRepository(jxClient, ns, organisation, name, providerName)
if err != nil {
return nil, errors.Wrapf(err, "failed to find existing SourceRepository")
}
// If we did not find an existing SourceRepository for this org/repo, create one
if foundSr == nil {
return createSourceRepositoryCallback(jxClient, ns, name, organisation, providerURL, callback)
}
// If we did find a SourceRepository, use that as our basis and see if we need to update it.
description := fmt.Sprintf("Imported application for %s/%s", organisation, name)
srCopy := foundSr.DeepCopy()
srCopy.Name = foundSr.Name
srCopy.Spec.Description = description
srCopy.Spec.Org = organisation
srCopy.Spec.Provider = providerURL
srCopy.Spec.Repo = name
srCopy.Labels = map[string]string{}
for k, v := range foundSr.Labels {
srCopy.Labels[k] = v
}
srCopy.Labels[v1.LabelProvider] = providerName
srCopy.Labels[v1.LabelOwner] = organisation
srCopy.Labels[v1.LabelRepository] = name
if callback != nil {
callback(srCopy)
}
srCopy.Sanitize()
// If we don't need to update the found SourceRepository, return it.
if reflect.DeepEqual(srCopy.Spec, foundSr.Spec) && reflect.DeepEqual(srCopy.Labels, foundSr.Labels) {
return foundSr, nil
}
// Otherwise, update the SourceRepository and return it.
answer, err := repositories.Update(context.TODO(), srCopy, metav1.UpdateOptions{})
if err != nil {
return answer, errors.Wrapf(err, "failed to update SourceRepository %s", resourceName)
}
answer, err = repositories.Get(context.TODO(), foundSr.Name, metav1.GetOptions{})
if err != nil {
return answer, errors.Wrapf(err, "failed to get SourceRepository %s", resourceName)
}
return answer, nil
}
// GetOrCreateSourceRepository gets or creates the SourceRepository for the given repository name and organisation
func GetOrCreateSourceRepository(jxClient versioned.Interface, ns, name, organisation, providerURL string) (*v1.SourceRepository, error) {
return GetOrCreateSourceRepositoryCallback(jxClient, ns, name, organisation, providerURL, nil)
}
// ToProviderName takes the git URL and converts it to a provider name which can be used as a label selector
func ToProviderName(gitURL string) string {
if gitURL == "" {
return ""
}
u, err := url.Parse(gitURL)
if err == nil {
host := strings.TrimSuffix(u.Host, ".com")
return naming.ToValidName(host)
}
idx := strings.Index(gitURL, "://")
if idx > 0 {
gitURL = gitURL[idx+3:]
}
gitURL = strings.TrimSuffix(gitURL, "/")
gitURL = strings.TrimSuffix(gitURL, ".com")
return naming.ToValidName(gitURL)
}
// createSourceRepositoryCallback creates a repo, returning the created repo and an error if it couldn't be created
func createSourceRepositoryCallback(client versioned.Interface, namespace, name, organisation, providerURL string, callback func(*v1.SourceRepository)) (*v1.SourceRepository, error) {
resourceName := naming.ToValidName(organisation + "-" + name)
description := fmt.Sprintf("Imported application for %s/%s", organisation, name)
providerName := ToProviderName(providerURL)
labels := map[string]string{
v1.LabelProvider: providerName,
v1.LabelOwner: organisation,
v1.LabelRepository: name,
}
sr := &v1.SourceRepository{
TypeMeta: metav1.TypeMeta{
Kind: "SourceRepository",
APIVersion: jenkinsio.GroupName + "/" + jenkinsio.Version,
},
ObjectMeta: metav1.ObjectMeta{
Name: resourceName,
Labels: labels,
},
Spec: v1.SourceRepositorySpec{
Description: description,
Org: organisation,
Provider: providerURL,
ProviderName: providerName,
Repo: name,
},
}
if callback != nil {
callback(sr)
}
sr.Sanitize()
answer, err := client.JenkinsV1().SourceRepositories(namespace).Create(context.TODO(), sr, metav1.CreateOptions{})
if err != nil {
return nil, errors.Wrapf(err, "failed to create new SourceRepository for organisation %s and repository %s", organisation, name)
}
return answer, nil
}
// IsRemoteEnvironmentRepository returns true if the given repository is a remote environment
func IsRemoteEnvironmentRepository(environments map[string]*v1.Environment, repository *v1.SourceRepository) bool {
gitURL, err := GetRepositoryGitURL(repository)
if err != nil {
return false
}
u2 := gitURL + ".git"
for _, env := range environments {
if env.Spec.Kind != v1.EnvironmentKindTypePermanent {
continue
}
if env.Spec.Source.URL == gitURL || env.Spec.Source.URL == u2 {
if env.Spec.RemoteCluster {
return true
}
}
}
return false
}
// IsIncludedInTheGivenEnvs returns true if the given repository is an environment repository
func IsIncludedInTheGivenEnvs(environments map[string]*v1.Environment, repository *v1.SourceRepository) bool {
gitURL, err := GetRepositoryGitURL(repository)
if err != nil {
return false
}
u2 := gitURL + ".git"
for _, env := range environments {
if env.Spec.Source.URL == gitURL || env.Spec.Source.URL == u2 {
return true
}
}
return false
}
| FindSourceRepository |
warnings.py | """Python part of the warnings subsystem."""
import sys
__all__ = ["warn", "warn_explicit", "showwarning",
"formatwarning", "filterwarnings", "simplefilter",
"resetwarnings", "catch_warnings"]
def showwarning(message, category, filename, lineno, file=None, line=None):
"""Hook to write a warning to a file; replace if you like."""
if file is None:
file = sys.stderr
try:
file.write(formatwarning(message, category, filename, lineno, line))
except OSError:
pass # the file (probably stderr) is invalid - this warning gets lost.
def formatwarning(message, category, filename, lineno, line=None):
"""Function to format a warning the standard way."""
import linecache
s = "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message)
line = linecache.getline(filename, lineno) if line is None else line
if line:
line = line.strip()
s += " %s\n" % line
return s
def filterwarnings(action, message="", category=Warning, module="", lineno=0,
append=False):
"""Insert an entry into the list of warnings filters (at the front).
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'message' -- a regex that the warning message must match
'category' -- a class that the warning must be a subclass of
'module' -- a regex that the module name must match
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
import re
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(message, str), "message must be a string"
assert isinstance(category, type), "category must be a class"
assert issubclass(category, Warning), "category must be a Warning subclass"
assert isinstance(module, str), "module must be a string"
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, re.compile(message, re.I), category,
re.compile(module), lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
_filters_mutated()
def simplefilter(action, category=Warning, lineno=0, append=False):
"""Insert a simple entry into the list of warnings filters (at the front).
A simple filter matches all modules and messages.
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'category' -- a class that the warning must be a subclass of
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, None, category, None, lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
_filters_mutated()
def resetwarnings():
"""Clear the list of warning filters, so that no filters are active."""
filters[:] = []
_filters_mutated()
class _OptionError(Exception):
"""Exception used by option processing helpers."""
pass
# Helper to process -W options passed via sys.warnoptions
def _processoptions(args):
for arg in args:
try:
_setoption(arg)
except _OptionError as msg:
print("Invalid -W option ignored:", msg, file=sys.stderr)
# Helper for _processoptions()
def _setoption(arg):
import re
parts = arg.split(':')
if len(parts) > 5:
raise _OptionError("too many fields (max 5): %r" % (arg,))
while len(parts) < 5:
parts.append('')
action, message, category, module, lineno = [s.strip()
for s in parts]
action = _getaction(action)
message = re.escape(message)
category = _getcategory(category)
module = re.escape(module)
if module:
module = module + '$'
if lineno:
try:
lineno = int(lineno)
if lineno < 0:
raise ValueError
except (ValueError, OverflowError):
raise _OptionError("invalid lineno %r" % (lineno,))
else:
lineno = 0
filterwarnings(action, message, category, module, lineno)
# Helper for _setoption()
def _getaction(action):
if not action:
return "default"
if action == "all": return "always" # Alias
for a in ('default', 'always', 'ignore', 'module', 'once', 'error'):
if a.startswith(action):
return a
raise _OptionError("invalid action: %r" % (action,))
# Helper for _setoption()
def _getcategory(category):
import re
if not category:
return Warning
if re.match("^[a-zA-Z0-9_]+$", category):
try:
cat = eval(category)
except NameError:
raise _OptionError("unknown warning category: %r" % (category,))
else:
i = category.rfind(".")
module = category[:i]
klass = category[i+1:]
try:
m = __import__(module, None, None, [klass])
except ImportError:
raise _OptionError("invalid module name: %r" % (module,))
try:
cat = getattr(m, klass)
except AttributeError:
raise _OptionError("unknown warning category: %r" % (category,))
if not issubclass(cat, Warning):
raise _OptionError("invalid warning category: %r" % (category,))
return cat
# Code typically replaced by _warnings
def warn(message, category=None, stacklevel=1):
"""Issue a warning, or maybe ignore it or raise an exception."""
# Check if message is already a Warning object
if isinstance(message, Warning):
category = message.__class__
# Check category argument
if category is None:
category = UserWarning
if not (isinstance(category, type) and issubclass(category, Warning)):
raise TypeError("category must be a Warning subclass, "
"not '{:s}'".format(type(category).__name__))
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith((".pyc", ".pyo")):
filename = filename[:-1]
else:
if module == "__main__":
try:
filename = sys.argv[0]
except AttributeError:
# embedded interpreters don't have sys.argv, see bug #839151
filename = '__main__'
if not filename:
filename = module
registry = globals.setdefault("__warningregistry__", {})
warn_explicit(message, category, filename, lineno, module, registry,
globals)
def warn_explicit(message, category, filename, lineno,
module=None, registry=None, module_globals=None):
lineno = int(lineno)
if module is None:
module = filename or "<unknown>"
if module[-3:].lower() == ".py":
module = module[:-3] # XXX What about leading pathname?
if registry is None:
registry = {}
if registry.get('version', 0) != _filters_version:
registry.clear()
registry['version'] = _filters_version
if isinstance(message, Warning):
text = str(message)
category = message.__class__
else:
text = message
message = category(message)
key = (text, category, lineno)
# Quick test for common case
if registry.get(key):
return
# Search the filters
for item in filters:
action, msg, cat, mod, ln = item
if ((msg is None or msg.match(text)) and
issubclass(category, cat) and
(mod is None or mod.match(module)) and
(ln == 0 or lineno == ln)):
break
else:
action = defaultaction
# Early exit actions
if action == "ignore":
registry[key] = 1
return
# Prime the linecache for formatting, in case the
# "file" is actually in a zipfile or something.
import linecache
linecache.getlines(filename, module_globals)
if action == "error":
raise message
# Other actions
if action == "once":
registry[key] = 1
oncekey = (text, category)
if onceregistry.get(oncekey):
return
onceregistry[oncekey] = 1
elif action == "always":
pass
elif action == "module":
registry[key] = 1
altkey = (text, category, 0)
if registry.get(altkey):
return
registry[altkey] = 1
elif action == "default":
registry[key] = 1
else:
# Unrecognized actions are errors
raise RuntimeError(
"Unrecognized action (%r) in warnings.filters:\n %s" %
(action, item))
if not callable(showwarning):
raise TypeError("warnings.showwarning() must be set to a "
"function or method")
# Print message and context
showwarning(message, category, filename, lineno)
class WarningMessage(object):
"""Holds the result of a single showwarning() call."""
_WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
"line")
def __init__(self, message, category, filename, lineno, file=None,
line=None):
local_values = locals()
for attr in self._WARNING_DETAILS:
setattr(self, attr, local_values[attr])
self._category_name = category.__name__ if category else None
def __str__(self):
return ("{message : %r, category : %r, filename : %r, lineno : %s, "
"line : %r}" % (self.message, self._category_name,
self.filename, self.lineno, self.line))
class catch_warnings(object):
"""A context manager that copies and restores the warnings filter upon
exiting the context.
The 'record' argument specifies whether warnings should be captured by a
custom implementation of warnings.showwarning() and be appended to a list
returned by the context manager. Otherwise None is returned by the context
manager. The objects appended to the list are arguments whose attributes
mirror the arguments to showwarning().
The 'module' argument is to specify an alternative module to the module
named 'warnings' and imported under that name. This argument is only useful
when testing the warnings module itself.
"""
def __init__(self, *, record=False, module=None):
|
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._module._filters_mutated()
self._showwarning = self._module.showwarning
if self._record:
log = []
def showwarning(*args, **kwargs):
log.append(WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return log
else:
return None
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module._filters_mutated()
self._module.showwarning = self._showwarning
# filters contains a sequence of filter 5-tuples
# The components of the 5-tuple are:
# - an action: error, ignore, always, default, module, or once
# - a compiled regex that must match the warning message
# - a class representing the warning category
# - a compiled regex that must match the module that is being warned
# - a line number for the line being warning, or 0 to mean any line
# If either if the compiled regexs are None, match anything.
_warnings_defaults = False
try:
from _warnings import (filters, _defaultaction, _onceregistry,
warn, warn_explicit, _filters_mutated)
defaultaction = _defaultaction
onceregistry = _onceregistry
_warnings_defaults = True
except ImportError:
filters = []
defaultaction = "default"
onceregistry = {}
_filters_version = 1
def _filters_mutated():
global _filters_version
_filters_version += 1
# Module initialization
_processoptions(sys.warnoptions)
if not _warnings_defaults:
silence = [ImportWarning, PendingDeprecationWarning]
silence.append(DeprecationWarning)
for cls in silence:
simplefilter("ignore", category=cls)
bytes_warning = sys.flags.bytes_warning
if bytes_warning > 1:
bytes_action = "error"
elif bytes_warning:
bytes_action = "default"
else:
bytes_action = "ignore"
simplefilter(bytes_action, category=BytesWarning, append=1)
# resource usage warnings are enabled by default in pydebug mode
if hasattr(sys, 'gettotalrefcount'):
resource_action = "always"
else:
resource_action = "ignore"
simplefilter(resource_action, category=ResourceWarning, append=1)
del _warnings_defaults
| """Specify whether to record warnings and if an alternative module
should be used other than sys.modules['warnings'].
For compatibility with Python 3.0, please consider all arguments to be
keyword-only.
"""
self._record = record
self._module = sys.modules['warnings'] if module is None else module
self._entered = False |
Header.js | import React, { Component } from "react";
import { connect } from "react-redux";
import { Link } from "react-router-dom";
import "./Header.css";
import { logoutUser } from "../actions/index";
class | extends Component {
renderContent = () => {
switch (this.props.auth.isAuthenticated) {
case false:
return;
default:
return (
<li>
<a href="/auth/logout">Logout</a>
</li>
);
}
};
render() {
return (
<div className="navbar-fixed ">
<nav>
<div className="nav-wrapper orange col">
<Link
to={this.props.auth.isAuthenticated ? "/workouts" : "/"}
className="left brand-logo"
>
Tri Workouts
</Link>
<ul id="nav-mobile" className="right">
{this.renderContent()}
</ul>
</div>
</nav>
</div>
);
}
}
const mapStateToProps = state => {
return {
auth: state.auth
};
};
export default connect(mapStateToProps, { logoutUser })(Header);
| Header |
interaction_diff.rs | use futures::sink::SinkExt;
use insta::assert_debug_snapshot;
use optic_diff_engine::{diff_interaction, streams, HttpInteraction, SpecEvent, SpecProjection};
use petgraph::dot::Dot;
use serde_json::json;
use tokio::io::{AsyncBufReadExt, BufReader};
use tokio_stream::wrappers::LinesStream;
use tokio_stream::StreamExt;
#[tokio::main]
#[test]
async fn can_yield_interactive_diff_result() {
let events = SpecEvent::from_file(
std::env::current_dir()
.unwrap()
.join("tests/fixtures/ergast-example-spec.json")
.to_str()
.unwrap(),
)
.expect("todos spec should deserialize");
let spec_projection = SpecProjection::from(events);
{
let endpoint_projection = spec_projection.endpoint();
for node_index in endpoint_projection.graph.node_indices() {
println!(
"{:?}: {:?}",
node_index,
endpoint_projection.graph.node_weight(node_index).unwrap()
)
}
assert_debug_snapshot!(Dot::with_config(&endpoint_projection.graph, &[]));
assert_debug_snapshot!(Dot::with_config(&spec_projection.shape().graph, &[]));
}
let interaction = HttpInteraction::from_json_str(
r#"{
"uuid": "5",
"request": {
"host": "localhost",
"method": "GET",
"path": "/api/f1/2019/drivers/screw",
"query": {
"asJsonString": null,
"asText": null,
"asShapeHashBytes": null
},
"headers": {
"asJsonString": null,
"asText": null,
"asShapeHashBytes": null
},
"body": {
"contentType": null,
"value": {}
}
},
"response": {
"statusCode": 200,
"headers": {
"asJsonString": null,
"asText": null,
"asShapeHashBytes": null
},
"body": {
"contentType": "application/jsonxxx",
"value": {}
}
},
"tags": []
}"#,
)
.expect("example http interaction should deserialize");
println!("{:?}", interaction);
let results = diff_interaction(&spec_projection, interaction);
println!("{:?}", results);
assert_eq!(results.len(), 1);
assert_debug_snapshot!("can_yield_interactive_diff_result__results", results);
let mut destination: Vec<u8> = vec![];
assert_eq!(destination.len(), 0);
{
let mut sink = streams::diff::into_json_lines(&mut destination);
for result in results {
if let Err(_) = sink.send(result).await {
panic!("interaction diff results should deserialise and write to json lines");
}
}
}
assert!(destination.len() > 0);
let desitination_reader = BufReader::new(std::io::Cursor::new(&destination));
let mut written_lines = LinesStream::new(desitination_reader.lines());
let first_line = written_lines
.next()
.await
.expect("should be able to read a line from the in-memory destination")
.unwrap();
serde_json::from_str::<serde_json::Value>(&first_line).expect("first line should be valid json");
}
#[test]
fn can_yield_unmatched_request_url() {
let events: Vec<SpecEvent> = serde_json::from_value(
json!([
{"PathComponentAdded":{"pathId":"path_1","parentPathId":"root","name":"xyz"}},
{"RequestAdded":{"requestId":"request_1","pathId":"path_1","httpMethod":"POST"}},
{"ResponseAddedByPathAndMethod":{"responseId":"response_1", "httpStatusCode":200,"pathId":"path_1","httpMethod":"GET"}},
])
).expect("should be able to deserialize path added events as spec events");
let interaction = HttpInteraction::from_json_str(
r#"{
"uuid": "5",
"request": {
"host": "localhost",
"method": "GET",
"path": "/abc/def",
"query": {
"asJsonString": null,
"asText": null,
"asShapeHashBytes": null
},
"headers": {
"asJsonString": null,
"asText": null,
"asShapeHashBytes": null
},
"body": {
"contentType": null,
"value": {}
}
},
"response": {
"statusCode": 200,
"headers": {
"asJsonString": null,
"asText": null,
"asShapeHashBytes": null
},
"body": {
"contentType": null, | }"#,
)
.expect("example http interaction should deserialize");
let spec_projection = SpecProjection::from(events);
let results = diff_interaction(&spec_projection, interaction);
let fingerprints = results
.iter()
.map(|result| result.fingerprint())
.collect::<Vec<_>>();
assert_debug_snapshot!("can_yield_unmatched_request_url__results", results);
assert_debug_snapshot!(
"can_yield_unmatched_request_url__fingerprints",
fingerprints
);
assert_eq!(results.len(), 1);
}
#[tokio::main]
#[test]
async fn can_yield_unmatched_shape() {
let events: Vec<SpecEvent> = serde_json::from_value(
json!([
{"PathComponentAdded":{"pathId":"path_1","parentPathId":"root","name":"xyz"}},
{"RequestAdded":{"requestId":"request_1","pathId":"path_1","httpMethod":"POST"}},
{"ResponseAddedByPathAndMethod":{"responseId":"response_1", "httpStatusCode":200,"pathId":"path_1","httpMethod":"POST"}},
{"ShapeAdded":{"shapeId":"shape_1","baseShapeId":"$string","parameters":{"DynamicParameterList":{"shapeParameterIds":[]}},"name":""}},
{"RequestBodySet": {"shapeId":"shape_1","requestId": "request_1", "bodyDescriptor":{"httpContentType":"application/json","shapeId":"shape_1","isRemoved":false}}}
]),
).expect("should be able to deserialize shape added events as spec events");
let spec_projection = SpecProjection::from(events);
let compliant_interaction = HttpInteraction::from_json_str(
r#"{
"uuid": "5",
"request": {
"host": "localhost",
"method": "POST",
"path": "/xyz",
"query": {
"asJsonString": null,
"asText": null,
"asShapeHashBytes": null
},
"headers": {
"asJsonString": null,
"asText": null,
"asShapeHashBytes": null
},
"body": {
"contentType": "application/json",
"value": {
"asJsonString": null,
"asText": "whatever",
"asShapeHashBytes": null
}
}
},
"response": {
"statusCode": 200,
"headers": {
"asJsonString": null,
"asText": null,
"asShapeHashBytes": null
},
"body": {
"contentType": null,
"value": {
"asJsonString": null,
"asText": null,
"asShapeHashBytes": null
}
}
},
"tags": []
}"#,
)
.expect("example http interaction should deserialize");
let mut results = diff_interaction(&spec_projection, compliant_interaction);
assert_debug_snapshot!(results);
assert_eq!(results.len(), 0);
let incompliant_interaction = HttpInteraction::from_json_str(
r#"{
"uuid": "5",
"request": {
"host": "localhost",
"method": "POST",
"path": "/xyz",
"query": {
"asJsonString": null,
"asText": null,
"asShapeHashBytes": null
},
"headers": {
"asJsonString": null,
"asText": null,
"asShapeHashBytes": null
},
"body": {
"contentType": "application/json",
"value": {
"asJsonString": "null",
"asText": null,
"asShapeHashBytes": null
}
}
},
"response": {
"statusCode": 200,
"headers": {
"asJsonString": null,
"asText": null,
"asShapeHashBytes": null
},
"body": {
"contentType": null,
"value": {
"asJsonString": null,
"asText": null,
"asShapeHashBytes": null
}
}
},
"tags": []
}"#,
)
.expect("example http interaction should deserialize");
results = diff_interaction(&spec_projection, incompliant_interaction);
let fingerprints = results
.iter()
.map(|result| result.fingerprint())
.collect::<Vec<_>>();
assert_debug_snapshot!(results);
assert_debug_snapshot!("can_yield_unmatched_shape__fingerprints", fingerprints);
} | "value": {}
}
},
"tags": [] |
modules2.rs | // modules2.rs
// Make me compile! Execute `rustlings hint modules2` for hints :)
// I AM DONE
mod delicious_snacks {
pub use self::fruits::PEAR as fruit;
pub use self::veggies::CUCUMBER as veggie;
mod fruits {
pub const PEAR: &'static str = "Pear";
pub const APPLE: &'static str = "Apple";
}
mod veggies {
pub const CUCUMBER: &'static str = "Cucumber";
pub const CARROT: &'static str = "Carrot";
}
}
fn main() {
println!(
"favorite snacks: {} and {}",
delicious_snacks::fruit,
delicious_snacks::veggie | );
} | |
schema.go | // Code generated by entc, DO NOT EDIT.
package migrate
import (
"entgo.io/ent/dialect/sql/schema"
"entgo.io/ent/schema/field"
)
var (
// CarsColumns holds the columns for the "cars" table.
CarsColumns = []*schema.Column{
{Name: "id", Type: field.TypeUUID, Unique: true},
{Name: "type", Type: field.TypeString},
{Name: "name", Type: field.TypeString},
{Name: "created_at", Type: field.TypeTime},
{Name: "user_cars", Type: field.TypeUUID, Nullable: true},
}
// CarsTable holds the schema information for the "cars" table.
CarsTable = &schema.Table{
Name: "cars",
Columns: CarsColumns,
PrimaryKey: []*schema.Column{CarsColumns[0]},
ForeignKeys: []*schema.ForeignKey{
{
Symbol: "cars_users_cars",
Columns: []*schema.Column{CarsColumns[4]},
RefColumns: []*schema.Column{UsersColumns[0]},
OnDelete: schema.SetNull,
},
},
}
// UsersColumns holds the columns for the "users" table.
UsersColumns = []*schema.Column{
{Name: "id", Type: field.TypeUUID, Unique: true},
{Name: "user_name", Type: field.TypeString},
{Name: "password", Type: field.TypeString},
{Name: "age", Type: field.TypeInt},
{Name: "created_at", Type: field.TypeTime},
{Name: "email", Type: field.TypeString},
{Name: "permission", Type: field.TypeInt},
}
// UsersTable holds the schema information for the "users" table.
UsersTable = &schema.Table{
Name: "users",
Columns: UsersColumns,
PrimaryKey: []*schema.Column{UsersColumns[0]},
ForeignKeys: []*schema.ForeignKey{},
}
// Tables holds all the tables in the schema.
Tables = []*schema.Table{
CarsTable,
UsersTable,
}
)
func init() | {
CarsTable.ForeignKeys[0].RefTable = UsersTable
} |
|
get-primary-options.pipe.ts | // This file is part of the research.fi API service
//
// Copyright 2019 Ministry of Education and Culture, Finland
//
// :author: CSC - IT Center for Science Ltd., Espoo Finland [email protected] | import { CheckFieldLocalePipe } from './check-field-locale.pipe';
@Pipe({
name: 'getPrimaryOptions',
})
export class GetPrimaryOptionsPipe implements PipeTransform {
transform(
items: unknown,
index: number,
locale: string,
fieldName: string
): unknown {
const checkFieldLocale = new CheckFieldLocalePipe();
const mappedItems = [];
const options = [];
items[index].groupItems.map((groupItem) =>
groupItem.items.forEach((item) => mappedItems.push(item))
);
mappedItems.map((item) =>
options.push({
id: item.itemMeta.id,
value: checkFieldLocale.transform(item, locale, fieldName),
})
);
return options;
}
} | // :license: MIT
import { Pipe, PipeTransform } from '@angular/core'; |
generic.py | import z3
from ..utils import logger, utils
from ..model import model_utils
from . import symbols, struct
def const(name, sort):
assert(isinstance(name, str))
return z3.Const(name, sort)
def array(name, ix_sort, cont_sort):
return z3.Array(name, ix_sort, cont_sort)
class SlSort:
"""Representation of a separation logic sort.
Depending on the backend, this could be either an uninterpreted sort or a
built-in sort (Int).
Indexing into the sort object with a string returns a constant of
the sort of the given name.
"""
def __init__(self, ref, set_class):
"""
Create new separation logic sort whose elements are of sort `ref` in Z3.
:param: ref: Reference type to be used for elements of this sort in z3 (of type :class:`z3.SortRef`)
:param: set_class: Subclass of Set used to create sets of this sort.
"""
assert(isinstance(ref, z3.SortRef))
self.ref = ref
self.set_class = set_class
def __eq__(self, other):
try:
return self.ref == other.ref
except:
return False
def __hash__(self):
# TODO: Proper hash for sorts? (Currently simply negating the ref to ensure the hash is different from the one for the wrapped z3 sort)
return ~hash(self.ref)
def to_declare(self):
"""Must this sort be declared in the SMT2 encoding?"""
raise NotImplementedError("Not specified whether sort must be declared")
def set_sort(self):
"""Return the set / footprint sort associated with this sort."""
return SetSort(self.set_class, self)
def __getitem__(self, elem):
"""Return a constant of this sort of the given string name."""
return const(elem, self.ref)
class Set:
"""Representation of a set of locations / footprint."""
def __init__(self, ref, elem_sort):
"""Create a new set"""
self.ref = ref
self.elem_sort = elem_sort
def __repr__(self):
return "{} : SET({})".format(self.ref, self.elem_sort)
@staticmethod
def get_empty(elem_sort):
"""Return encoding of an empty set"""
raise NotImplementedError("")
def is_empty(self):
"""Return constraint expressing that this set is empty"""
raise NotImplementedError("")
def non_empty(self):
"""Return constraint expressing that this set is nonempty"""
raise NotImplementedError("")
def insert(self, elem):
"""Return a new set that additionally contains `elem`"""
raise NotImplementedError("")
def remove(self, elem):
"""Return a new set with `elem` removed"""
raise NotImplementedError("")
def is_singleton(self, elem):
"""Return constraint expressing that `self` is the singleton set containing `elem`"""
raise NotImplementedError("")
def contains(self, elem):
"""Return constraint expressing that this set contains the given element"""
raise NotImplementedError("")
def subset_of(self, other):
"""Return constraint expressing that `self` is a subset of `other`"""
raise NotImplementedError("")
def disjoint_from(self, other):
"""Return constraint expressing that `self` is disjoint from `other`"""
raise NotImplementedError("")
def is_identical(self, other):
"""Return constraint expressing that `self` is identical to `other`"""
raise NotImplementedError("")
def union_of(self, part1, part2):
"""Return constraint expressing that `self` is the union of `part1` and `part2`"""
raise NotImplementedError("")
def union_of_all(self, *parts):
"""Return constraint expressing that `self` is the union of all `parts`"""
raise NotImplementedError("")
def union_without_elem(self, part1, part2, elem):
"""Return constraint expressing that after removing `elem` from `self`,
the result is the union of `part1` and `part2`"""
raise NotImplementedError("")
class SetSort:
"""A separation logic set / footprint sort associated with a
:class:`backend.generic.SlSort`
Indexing into the sort object with a string returns a constant of
the set sort of the given name.
"""
def __init__(self, set_class, elem_sort):
assert(isinstance(elem_sort, SlSort))
self.set_class = set_class
self.elem_sort = elem_sort
self.ref = z3.ArraySort(self.elem_sort.ref, z3.BoolSort())
self.consts = set()
def __getitem__(self, name):
"""Return a constant of this sort of the given string name."""
assert(isinstance(name, str))
set_ref = array(name, self.elem_sort.ref, z3.BoolSort())
return self.set_class(set_ref, self.elem_sort)
def __eq__(self, other):
try:
return self.elem_sort == other.elem_sort
except:
return False
def __hash__(self):
# TODO: Proper hash for sorts? (Currently simply negating the ref to ensure the hash is different from the one for the wrapped z3 sort)
return ~hash(self.elem_sort)
class LocInterpretation:
"""Interpretation of a location sort in a z3 model.
Represents the interpretation of the location sort itself as well
as all (plain and footprint) constants of a
:class:`sloth.backend.struct.Struct` in a :class:`z3.ModelRef`.
The set of constants is restricted to constants that are both
1. known to the :class:`backend.generic.ConstRegistry` passed to
the constructor
2. interpreted by the z3 model (not None in the z3 model)
This makes `LocInterpretation` a safe interface for iterating over
constants, since neither redundant/unused constants in the
encoding (which may not occur in the z3 model) nor internal
constants introduced by z3 (which are in the z3 model but not part
of our encoding) are contained in the `const` and `fp_consts`
attributes.
"""
def __init__(self, struct, const_registry, z3_model):
self.struct = struct
self.z3_model = z3_model
self._locs = [] # Note: Initialized properly in subclasses
self.labeling = {}
# Initialize constants based on the registry & the model
self.consts = list(const_registry.defined_locs(struct, z3_model))
#print("CONSTS IN LOC INTERPRETATION: {}".format(self.consts))
if self.consts:
self.null = model_utils.val_of(struct.null, z3_model)
else:
self.null = None
self.fp_consts = list(const_registry.defined_fps(struct, z3_model))
# TODO: Locs must currently be initialized in the subclass after calling super and before calling _init_node_labeling --> Make less error prone
def _is_used(self):
# TODO: The following isn't true any more, is it?
# The null constant is always there, because it is declared for the parser
# Thus we define that a sort is used if it contains at least one more const
#null_set = set([self.struct.null])
#return self.struct.sort.consts != null_set
return bool(self.consts)
def __bool__(self):
return bool(self._locs)
def __iter__(self):
return iter(sorted(self._locs, key = lambda v: int(str(v))))
def __len__(self):
return len(self._locs)
def __repr__(self):
def node_repr(k,v):
if v:
return "{}:{}".format(k,v)
else:
return str(k)
ordered = sorted(self.labeling.items(),
key = lambda i: int(str(i[0])))
return ", ".join(map(lambda i : node_repr(*i), ordered))
def empty(self):
"""Is this sort interpreted by an empty set of locations (or not at all)?"""
return not bool(self)
def _init_node_labeling(self):
if not self._is_used():
return
labeling = dict([(loc,[]) for loc in self._locs])
for c in self.consts:
try:
loc = model_utils.val_of(c, self.z3_model)
labeling[loc].append(c)
except KeyError as e:
if loc is None:
fmt = "Adding {} to labeling of {} failed --> {} not actually used in model"
logger.debug(fmt.format(c, loc, c))
else:
fmt = ("Inconsistent internal state: location {} interprets {}"
+ "in z3 model, but model adapter contains only locs {}")
raise utils.IllegalSolverState(fmt.format(loc,c,self._locs))
self.labeling = labeling
class ConstRegistry:
"""Cache for keeping track of constants introduced in an encoding.
Use case: Add all constants that appear in an encoding to a
:class:`ConstRegistry` and pass that registry to all
:class:`LocInterpretation` instances you create. This guarantees
that the set of constants accessible through the intepretation is
the intersection of the constants in the encoding and the
:class:`z3.ModelRef` model returned by z3.
"""
LOC = False
FP = True
DATA = "data"
def __init__(self, structs):
self.structs = structs
self._cache = {(struct, typ) : set()
for struct in structs
for typ in [self.LOC, self.FP]} | self._cache.update({(self.DATA,self.LOC) : set()})
def __repr__(self):
lines = []
for (s,t), cs in self._cache.items():
if cs:
typ = "locs" if t == self.LOC else "foots"
lines.append("{}-{} = {}".format(s, typ, cs))
return "consts(\n" + utils.indented("\n".join(lines)) + "\n)"
def add_const(self, struct, const):
"""Add the given const to the cache for the given struct."""
#print("REGISTRY: {}".format(const))
if const.sort() == struct.fp_sort.ref:
self._cache[(struct, self.FP)].add(const)
elif const.sort() == struct.sort.ref:
self._cache[(struct, self.LOC)].add(const)
else:
fmt = "Constant of wrong sort {} added to {} registry"
raise utils.IllegalSolverState(fmt.format(const.__class__, struct))
def add_data_const(self, const):
assert(const.sort() == symbols.data_sort)
self._cache[(self.DATA, self.LOC)].add(const)
# TODO: Memoize?
def _defined_consts(self, key, z3_model):
try:
for c in self._cache[key]:
if model_utils.val_of(c, z3_model) is not None:
yield c
except KeyError:
fmt = "Registry not defined for {} of {}"
typ = "locations" if key[1] == self.LOC else "footprints"
raise utils.IllegalSolverState(fmt.format(typ, key[0]))
def has_consts(self, struct):
"""Does this registry contain any (location) consts of the given struct?"""
return bool(self._cache[(struct, self.LOC)])
def defined_locs(self, struct, z3_model):
"""Generator for location consts of given struct in given model.
No order on the returned consts guaranteed."""
return self._defined_consts((struct, self.LOC), z3_model)
def defined_data(self, z3_model):
return self._defined_consts((self.DATA, self.LOC), z3_model)
def defined_fps(self, struct, z3_model):
"""Generator for footprint consts of given struct in given model.
No order on the returned consts guaranteed."""
return self._defined_consts((struct, self.FP), z3_model) | |
driverInterface.py | from abc import ABC, abstractclassmethod
class DriverInterface(ABC):
@abstractclassmethod
def | (self) -> object:
raise NotImplementedError()
@abstractclassmethod
def getContent(self) -> str:
raise NotImplementedError()
@abstractclassmethod
def goTo(self) -> None:
raise NotImplementedError()
@abstractclassmethod
def saveScreenshot(self, path: str) -> None:
raise NotImplementedError()
@abstractclassmethod
def getUrl(self) -> str:
raise NotImplementedError()
@abstractclassmethod
def close(self, displayMsg: bool = True) -> None:
raise NotImplementedError()
| start |
creating.ts | import { Environment, environment as envEntity } from "../../entities";
import { Transaction } from "../../db";
import { environmentStateMachine } from "../../environmentStateMachine";
import { log } from "../../../../common/logger";
export async function | (
trx: Transaction,
environment: Environment
) {
await envEntity.touch(trx, environment.id);
// If the environment has no source ID, something went wrong with spinning up
// the environment in the external provider. This is a big error, so move the
// environment to failed...
if (!environment.sourceId) {
await environmentStateMachine.setErrorCreating({
trx,
environment,
});
return;
}
try {
const canProvision = await environmentStateMachine.canSetErrorCreating({
trx,
environment,
});
if (!canProvision.operationSuccess) {
log.debug(
"Environment not yet ready to create: blocked by the state machine!",
{ canProvision, environment: environment.subdomain }
);
}
await environmentStateMachine.setProvisioning({ trx, environment });
} catch (error) {
log.error("Error creating an environment in the source!", {
environment: environment.subdomain,
message: error.message,
stack: error.stack,
});
}
}
| processCreatingEnvironment |
analytics-shell-test-cases.js | import React from 'react';
import Basic from '../examples/basic';
const testCases = {};
const noRenderCases = {};
| element: <Basic />
};
export { testCases, noRenderCases }; | testCases.basic = {
description: 'Basic', |
JavaScript3.js | function calculartemperature(){
let temperature1 = document.getElementById("firstselectbox").value
let temperature2 = document.getElementById("secondselectbox").value
let temepraturenumber = document.querySelector("input.temperatureconverter").value
if (temperature1 == 1 && temperature2 ==2){
let resultadotemperature3 = document.querySelector("p.insideelementsresult").innerHTML = (`A temperatura final é: ${(temepraturenumber *9/5) + 32}° Fahrenheit`)
}
else if (temperature1 == 2 && temperature2 ==1){
let resultadotemperature3 = document.querySelector("p.insideelementsresult").innerHTML = (`A temperatura final é: ${(temepraturenumber - 32) * 5/9}° Celsius`)
} | let resultadotemperature3 = document.querySelector("p.insideelementsresult").innerHTML = (`A temperatura final é: ${temepraturenumber}° Fahrenheit`)
}
} | else if (temperature1 == 1 && temperature2 ==1){
let resultadotemperature3 = document.querySelector("p.insideelementsresult").innerHTML = (`A temperatura final é: ${temepraturenumber}° Celsius`)
}
else if (temperature1 == 2 && temperature2 ==2){ |
next.decorator.ts | import { PARAMETER_TYPE } from "../../constants";
import { params } from "./params.helper";
/**
* @paramDecorator
* Binds a method parameter to the next() function.
*/
export function | (): ParameterDecorator {
return params(PARAMETER_TYPE.NEXT);
}
| Next |
overdraw_color_filter.rs | use crate::prelude::*;
use crate::{ColorFilter, PMColor};
use skia_bindings as sb;
use skia_bindings::SkColorFilter;
pub const NUM_COLORS: usize = 6;
impl RCHandle<SkColorFilter> {
pub fn overdraw(colors: &[PMColor; NUM_COLORS]) -> ColorFilter {
new(colors)
}
}
pub fn | (colors: &[PMColor; NUM_COLORS]) -> ColorFilter {
ColorFilter::from_ptr(unsafe { sb::C_SkOverdrawColorFilter_Make(colors.as_ptr()) }).unwrap()
}
| new |
test_auto_ROIStats.py | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ..preprocess import ROIStats
def test_ROIStats_inputs(): | nohash=True,
usedefault=True,
),
format1D=dict(
argstr='-1Dformat',
xor=['format1DR'],
),
format1DR=dict(
argstr='-1DRformat',
xor=['format1D'],
),
in_file=dict(
argstr='%s',
extensions=None,
mandatory=True,
position=-2,
),
mask=dict(
argstr='-mask %s',
deprecated='1.1.4',
extensions=None,
new_name='mask_file',
position=3,
),
mask_f2short=dict(argstr='-mask_f2short', ),
mask_file=dict(
argstr='-mask %s',
extensions=None,
),
nobriklab=dict(argstr='-nobriklab', ),
nomeanout=dict(argstr='-nomeanout', ),
num_roi=dict(argstr='-numroi %s', ),
out_file=dict(
argstr='> %s',
extensions=None,
keep_extension=False,
name_source='in_file',
name_template='%s_roistat.1D',
position=-1,
),
quiet=dict(argstr='-quiet', ),
roisel=dict(
argstr='-roisel %s',
extensions=None,
),
stat=dict(argstr='%s...', ),
zerofill=dict(
argstr='-zerofill %s',
requires=['num_roi'],
),
)
inputs = ROIStats.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_ROIStats_outputs():
output_map = dict(out_file=dict(extensions=None, ), )
outputs = ROIStats.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value | input_map = dict(
args=dict(argstr='%s', ),
debug=dict(argstr='-debug', ),
environ=dict( |
foreign_definition.rs | #[derive(Clone, Debug, PartialEq)]
pub struct ForeignDefinition {
name: String,
foreign_name: String,
}
impl ForeignDefinition {
pub fn new(name: impl Into<String>, foreign_name: impl Into<String>) -> Self {
Self {
name: name.into(),
foreign_name: foreign_name.into(), | pub fn name(&self) -> &str {
&self.name
}
pub fn foreign_name(&self) -> &str {
&self.foreign_name
}
} | }
}
|
has-no-package-lock.ts | import { existsSync } from 'fs';
export function | () {
return !existsSync(`${process.cwd()}/package-lock.json`);
}
| hasNoPackageLock |
gh_lint_comment.py | import logging
import os
import argparse
from github import Github, GithubException
"""Creates new comment or updates existing comment on a PR using a PAT token"""
def create_update_comment(token, org, repo_name, pr_number, comment_body):
"""Creates or updates existing comment on a PR"""
# auth with GH token
gh = Github(token)
# ref to repo
github_repo = gh.get_repo(f'{org}/{repo_name}')
# get current user id of token owner
current_bot_user = gh.get_user().id
logging.info(f'Current bot user: {current_bot_user}')
# try to get the PR
try:
pr = github_repo.get_pull(pr_number)
except GithubException as e:
logging.info(f'Unable to get PR: {pr_number} in {org}/{repo_name}')
raise
# get all comments in the pr (doesnt capture review comments)
pr_comments = pr.get_issue_comments()
# check if bot has already commented
existing_comments = [
pr_comment for pr_comment in pr_comments
if pr_comment.user.id == current_bot_user
]
if not existing_comments:
# add a comment
comment = pr.create_issue_comment(comment_body)
logging.info(f'Added new comment: {comment}')
else:
# edit existing comment
existing_comments[0].edit(comment_body)
logging.info(f'Edited existing comment: {existing_comments[0]}')
def parse_args():
parser = argparse.ArgumentParser(description='Add/edit comments to PRs')
parser.add_argument('-o', '--org', default='terraform-google-modules', help='Github organization, defaults to cft modules repo', action='store')
parser.add_argument('-r', '--repo', help='Github repo name', action='store')
parser.add_argument('-p', '--pr', help='Github PR number to add/edit comment', type=int, action='store')
parser.add_argument('-c', '--comment', help='Comment body to create or update ', action='store')
return parser.parse_args()
if __name__ == "__main__":
# setup logging
logging.basicConfig(level=logging.INFO)
# check if GITHUB_PAT_TOKEN token is set in env
if os.environ.get('GITHUB_PAT_TOKEN') is None:
raise RuntimeError('Unable to find GITHUB_PAT_TOKEN token in env')
gh_token = os.environ.get('GITHUB_PAT_TOKEN')
# parse args | parser = parse_args()
create_update_comment(gh_token, parser.org, parser.repo, parser.pr, parser.comment) |
|
repo-age.tsx | import twas from 'twas';
import cache from 'webext-storage-cache';
import React from 'dom-chef';
import {RepoIcon} from '@primer/octicons-react';
import elementReady from 'element-ready';
import * as pageDetect from 'github-url-detection';
import features from '.';
import * as api from '../github-helpers/api';
import {getRepo} from '../github-helpers';
interface CommitTarget {
oid: string;
committedDate: string;
resourcePath: string;
history: {
totalCount: number;
};
}
const fresh = [
'Freshly baked',
'Freshly brewed',
'Newly minted',
'Hot off the presses',
'Straight out of the oven',
'Still hot',
'Smells fresh',
'Just a baby',
'It’s my birthday',
'Brand spanking new',
'It’s a new world ✨',
'Certified Fresh Repo™',
'So it begins, the great battle of our time',
];
const dateFormatter = new Intl.DateTimeFormat('en-US', {
year: 'numeric',
month: 'long',
day: 'numeric',
});
const getRepoAge = async (commitSha: string, commitsCount: number): Promise<[committedDate: string, resourcePath: string]> => {
const {repository} = await api.v4(`
repository() {
defaultBranchRef {
target {
... on Commit {
history(first: 5, after: "${commitSha} ${commitsCount - Math.min(6, commitsCount)}") {
nodes {
committedDate
resourcePath
}
}
}
}
}
}
`);
const {committedDate, resourcePath} = repository.defaultBranchRef.target.history.nodes
.reverse()
// Filter out any invalid commit dates #3185
.find((commit: CommitTarget) => new Date(commit.committedDate).getFullYear() > 1970);
return [committedDate, resourcePath];
};
const getFirstCommit = cache.function(async (): Promise<[committedDate: string, resourcePath: string]> => {
const {repository} = await api.v4(`
repository() {
defaultBranchRef {
target {
... on Commit { | history {
totalCount
}
}
}
}
}
`);
const {oid: commitSha, history, committedDate, resourcePath} = repository.defaultBranchRef.target as CommitTarget;
const commitsCount = history.totalCount;
if (commitsCount === 1) {
return [committedDate, resourcePath];
}
return getRepoAge(commitSha, commitsCount);
}, {
cacheKey: () => __filebasename + ':' + getRepo()!.nameWithOwner,
});
async function init(): Promise<void> {
const [firstCommitDate, firstCommitHref] = await getFirstCommit()!;
const birthday = new Date(firstCommitDate);
// `twas` could also return `an hour ago` or `just now`
const [value, unit] = twas(birthday.getTime())
.replace('just now', '1 second')
.replace(/^an?/, '1')
.split(' ');
// About a day old or less ?
const age = Date.now() - birthday.getTime() < 10e7
? fresh[Math.floor(Math.random() * fresh.length)]
: `${value} ${unit} old`;
const sidebarAboutSection = await elementReady('.repository-content .BorderGrid-cell');
sidebarAboutSection!.append(
<h3 className="sr-only">Repository age</h3>,
<div className="mt-3">
<a href={firstCommitHref} className="Link--muted" title={`First commit dated ${dateFormatter.format(birthday)}`}>
<RepoIcon className="mr-2"/>{age}
</a>
</div>,
);
}
void features.add(__filebasename, {
include: [
pageDetect.isRepoRoot,
],
exclude: [
pageDetect.isEmptyRepoRoot,
],
awaitDomReady: false,
deduplicate: 'has-rgh-inner',
init,
}); | oid
committedDate
resourcePath |
func.rs | use std::borrow::Cow;
use maplit::hashmap;
use once_cell::sync::Lazy;
use crate::{
Class,
CompiledInterpolation,
ConstnessOverride,
DefaultElement,
Element,
Field,
Func,
func::Kind,
FunctionTypeHint,
get_debug,
IteratorExt,
settings,
StrExt,
StringExt,
type_ref::{Dir, FishStyle, StrEnc, StrType},
};
use super::RustNativeGeneratedElement;
fn pre_post_arg_handle(mut arg: String, args: &mut Vec<String>) {
if !arg.is_empty() {
arg.push(';');
args.push(arg);
}
}
fn gen_rust_with_name(f: &Func, name: &str, opencv_version: &str) -> String {
static TPL: Lazy<CompiledInterpolation> = Lazy::new(
|| include_str!("tpl/func/rust.tpl.rs").compile_interpolation()
);
let args = Field::rust_disambiguate_names(f.arguments()).collect::<Vec<_>>();
let as_instance_method = f.as_instance_method();
let method_constness = f.constness();
let is_infallible = f.is_infallible();
let mut decl_args = Vec::with_capacity(args.len());
let mut call_args = Vec::with_capacity(args.len());
let mut forward_args = Vec::with_capacity(args.len());
let mut pre_call_args = Vec::with_capacity(args.len());
let mut post_call_args = Vec::with_capacity(args.len());
if let Some(cls) = &as_instance_method {
decl_args.push(cls.type_ref().rust_self_func_decl(method_constness));
call_args.push(cls.type_ref().rust_self_func_call(method_constness));
}
let mut callback_arg_name: Option<String> = None;
for (name, arg) in args {
let type_ref = arg.type_ref();
if arg.is_user_data() {
pre_post_arg_handle(
type_ref.rust_userdata_pre_call(&name, callback_arg_name.as_deref().expect("Can't get name of the callback arg")),
&mut pre_call_args,
);
} else {
if type_ref.as_function().is_some() {
callback_arg_name = Some(name.clone());
}
if !arg.as_slice_len().is_some() {
decl_args.push(type_ref.rust_arg_func_decl(&name));
}
pre_post_arg_handle(type_ref.rust_arg_pre_call(&name, is_infallible), &mut pre_call_args);
}
if let Some((slice_arg, len_div)) = arg.as_slice_len() {
let slice_call = if len_div > 1 {
format!("({slice_arg}.len() / {len_div}) as _", slice_arg=slice_arg, len_div=len_div)
} else {
format!("{slice_arg}.len() as _", slice_arg=slice_arg)
};
call_args.push(slice_call);
} else {
call_args.push(type_ref.rust_arg_func_call(&name, ConstnessOverride::No));
}
forward_args.push(type_ref.rust_arg_forward(&name));
pre_post_arg_handle(type_ref.rust_arg_post_call(&name, is_infallible), &mut post_call_args);
}
let doc_comment = f.rendered_doc_comment(opencv_version);
let debug = get_debug(f);
let visibility = if let Some(cls) = as_instance_method {
if cls.is_trait() {
""
} else {
"pub "
}
} else {
"pub "
};
let identifier = f.identifier();
let is_safe = !f.is_unsafe();
let is_static_func = matches!(f.kind(), Kind::StaticMethod(..) | Kind::Function);
let return_type = f.return_type();
let return_type_func_decl = if is_infallible {
return_type.rust_return_func_decl(FishStyle::No, is_static_func)
} else {
format!("Result<{}>", return_type.rust_return_func_decl(FishStyle::No, is_static_func)).into()
};
let return_type_func_decl = if return_type_func_decl == "()" {
Cow::Borrowed("")
} else {
format!(" -> {}", return_type_func_decl).into()
};
if is_infallible {
post_call_args.push("ret".to_string());
} else {
post_call_args.push("Ok(ret)".to_string());
}
let decl_args = decl_args.join(", ");
let pre_call_args = pre_call_args.join("\n");
let call_args = call_args.join(", ");
let forward_args = forward_args.join(", ");
let post_call_args = post_call_args.join("\n");
let ret_convert = if is_infallible {
""
} else {
".into_result()?"
};
let ret_map = return_type.rust_return_map(is_safe, is_static_func, is_infallible);
let mut attributes = String::new();
if let Some(attrs) = settings::FUNC_CFG_ATTR.get(identifier.as_ref()) {
attributes = format!("#[cfg({})]", attrs.0);
}
if f.is_no_discard() {
attributes.push_str("#[must_use]");
}
let tpl = if let Some(tpl) = settings::FUNC_MANUAL.get(identifier.as_ref()) {
tpl
} else {
&TPL
};
tpl.interpolate(&hashmap! {
"doc_comment" => doc_comment.as_str(),
"debug" => &debug,
"attributes" => &attributes,
"visibility" => visibility,
"unsafety_decl" => if is_safe { "" } else { "unsafe " },
"name" => name,
"generic_decl" => "",
"decl_args" => &decl_args,
"rv_rust_full" => return_type_func_decl.as_ref(),
"pre_call_args" => &pre_call_args,
"unsafety_call" => if is_safe { "unsafe " } else { "" },
"identifier" => identifier.as_ref(),
"call_args" => &call_args,
"forward_args" => &forward_args,
"ret_convert" => ret_convert,
"ret_map" => ret_map.as_ref(),
"post_call_args" => &post_call_args,
})
}
fn cpp_method_call_name(c: &Class, method_name: &str) -> String {
if c.is_by_ptr() {
format!("instance->{name}", name = method_name)
} else {
format!("instance.{name}", name = method_name)
}
}
fn cpp_call_invoke(f: &Func) -> String {
static VOID_TPL: Lazy<CompiledInterpolation> = Lazy::new(||
"{{name}}({{args}});".compile_interpolation()
);
static NORMAL_TPL: Lazy<CompiledInterpolation> = Lazy::new(||
"{{ret_type}} = {{doref}}{{name}}{{generic}}({{args}});".compile_interpolation()
);
static FIELD_READ_TPL: Lazy<CompiledInterpolation> = Lazy::new(||
"{{ret_type}} = {{doref}}{{name}};".compile_interpolation()
);
static FIELD_WRITE_TPL: Lazy<CompiledInterpolation> = Lazy::new(||
"{{name}} = {{args}};".compile_interpolation()
);
static CONSTRUCTOR_TPL: Lazy<CompiledInterpolation> = Lazy::new(||
"{{ret_type}} ret({{args}});".compile_interpolation()
);
static CONSTRUCTOR_NO_ARGS_TPL: Lazy<CompiledInterpolation> = Lazy::new(||
"{{ret_type}} ret;".compile_interpolation()
);
static BOXED_CONSTRUCTOR_TPL: Lazy<CompiledInterpolation> = Lazy::new(||
"{{ret_type}}* ret = new {{ret_type}}({{args}});".compile_interpolation() | let call_name = match f.kind() {
Kind::Function | Kind::GenericFunction | Kind::StaticMethod(..)
| Kind::FunctionOperator(..) => {
f.cpp_fullname()
}
Kind::Constructor(class) => {
class.cpp_fullname().into_owned().into()
}
Kind::FieldAccessor(class) if f.type_hint() == FunctionTypeHint::FieldSetter => {
cpp_method_call_name(&class, DefaultElement::cpp_localname(f).as_ref()).into()
}
Kind::InstanceMethod(class) | Kind::FieldAccessor(class) | Kind::GenericInstanceMethod(class)
| Kind::ConversionMethod(class) | Kind::InstanceOperator(class, ..) => {
cpp_method_call_name(&class, f.cpp_localname().as_ref()).into()
}
};
let mut generic = String::new();
if let Some(spec) = f.as_specialized() {
generic.reserve(64);
generic.push('<');
generic.extend_join(spec.values(), ", ");
generic.push('>');
}
let args = Field::cpp_disambiguate_names(f.arguments())
.map(|(name, arg)| arg.type_ref().cpp_arg_func_call(name).into_owned())
.join(", ");
let return_type = f.return_type();
let tpl = if let Some(cls) = f.as_constructor() {
if cls.is_by_ptr() {
&BOXED_CONSTRUCTOR_TPL
} else if args.is_empty() {
&CONSTRUCTOR_NO_ARGS_TPL
} else {
&CONSTRUCTOR_TPL
}
} else if let Kind::FieldAccessor(..) = f.kind() {
if f.type_hint() == FunctionTypeHint::FieldSetter {
&FIELD_WRITE_TPL
} else {
&FIELD_READ_TPL
}
} else if return_type.is_void() {
&VOID_TPL
} else {
&NORMAL_TPL
};
let ret_type = if f.as_constructor().is_some() {
return_type.cpp_full()
} else {
return_type.cpp_full_ext("ret", true)
};
let doref = if return_type.as_fixed_array().is_some() {
"&"
} else {
""
};
tpl.interpolate(&hashmap! {
"ret_type" => ret_type,
"doref" => doref.into(),
"name" => call_name,
"generic" => generic.into(),
"args" => args.into(),
})
}
fn cpp_method_return<'f>(f: &'f Func, is_infallible: bool) -> Cow<'f, str> {
let return_type = f.return_type();
let ret = if return_type.is_void() {
"".into()
} else if return_type.is_by_ptr() && !f.as_constructor().is_some() {
let out = return_type.source()
.as_class()
.and_then(|cls| if cls.is_abstract() {
Some(Cow::Borrowed("ret"))
} else {
None
});
out.unwrap_or_else(|| format!("new {typ}(ret)", typ=return_type.cpp_full()).into())
} else if let Some(Dir::In(string_type)) | Some(Dir::Out(string_type)) = return_type.as_string() {
match string_type {
StrType::StdString(StrEnc::Text) | StrType::CvString(StrEnc::Text) => {
"ocvrs_create_string(ret.c_str())".into()
},
StrType::CharPtr => {
"ocvrs_create_string(ret)".into()
},
StrType::StdString(StrEnc::Binary) => {
"ocvrs_create_byte_string(ret.data(), ret.size())".into()
}
StrType::CvString(StrEnc::Binary) => {
"ocvrs_create_byte_string(ret.begin(), ret.size())".into()
}
}
} else {
// fixme
return if is_infallible {
format!("({typ})ret", typ=return_type.cpp_extern_return()).into()
} else {
format!("Ok<{typ}>(ret)", typ=return_type.cpp_extern_return()).into()
};
};
if is_infallible {
ret
} else {
format!("Ok({})", ret).into()
}
}
impl RustNativeGeneratedElement for Func<'_, '_> {
fn element_safe_id(&self) -> String {
format!("{}-{}", self.rust_module(), self.rust_localname(FishStyle::No))
}
fn gen_rust(&self, opencv_version: &str) -> String {
let name = if self.is_clone() {
"try_clone".into()
} else if let Some(name_hint) = self.name_hint() {
name_hint.into()
} else {
self.rust_leafname(FishStyle::No)
};
gen_rust_with_name(self, name.as_ref(), opencv_version)
}
fn gen_rust_exports(&self) -> String {
static TPL: Lazy<CompiledInterpolation> = Lazy::new(
|| include_str!("tpl/func/rust_extern.tpl.rs").compile_interpolation()
);
let identifier = self.identifier();
let is_infallible = self.is_infallible();
if settings::FUNC_MANUAL.contains_key(identifier.as_ref()) {
return "".to_string();
}
let mut attributes = String::new();
if let Some(attrs) = settings::FUNC_CFG_ATTR.get(identifier.as_ref()) {
attributes = format!("#[cfg({})]", attrs.0);
}
let mut args = vec![];
if let Some(cls) = self.as_instance_method() {
args.push(cls.type_ref().rust_extern_self_func_decl(self.constness()));
}
for (name, arg) in Field::rust_disambiguate_names(self.arguments()) {
args.push(arg.type_ref().rust_extern_arg_func_decl(&name, ConstnessOverride::No))
}
let return_type = self.return_type();
let return_wrapper_type = if is_infallible {
return_type.rust_extern_return()
} else {
return_type.rust_extern_return_wrapper_full()
};
let return_wrapper_type = if return_wrapper_type == "()" {
Cow::Borrowed("")
} else {
format!(" -> {}", return_wrapper_type).into()
};
TPL.interpolate(&hashmap! {
"attributes" => attributes.into(),
"debug" => get_debug(self).into(),
"identifier" => identifier,
"args" => args.join(", ").into(),
"return_wrapper_type" => return_wrapper_type,
})
}
fn gen_cpp(&self) -> String {
static TPL: Lazy<CompiledInterpolation> = Lazy::new(
|| include_str!("tpl/func/cpp.tpl.cpp").compile_interpolation()
);
static TPL_INFALLIBLE: Lazy<CompiledInterpolation> = Lazy::new(
|| include_str!("tpl/func/cpp_infallible.tpl.cpp").compile_interpolation()
);
let identifier = self.identifier();
let is_infallible = self.is_infallible();
if settings::FUNC_MANUAL.contains_key(identifier.as_ref()) {
return "".to_string();
}
let mut attributes_begin = String::new();
let mut attributes_end = String::new();
if let Some(attrs) = settings::FUNC_CFG_ATTR.get(identifier.as_ref()) {
attributes_begin = format!("#if {}", attrs.1);
attributes_end = "#endif".to_string();
}
let args = Field::cpp_disambiguate_names(self.arguments()).collect::<Vec<_>>();
let mut decl_args = Vec::with_capacity(args.len());
let mut pre_call_args = Vec::with_capacity(args.len());
let mut post_call_args = Vec::with_capacity(args.len());
let mut cleanup_args = Vec::with_capacity(args.len());
if let Some(cls) = self.as_instance_method() {
decl_args.push(cls.type_ref().cpp_self_func_decl(self.constness()));
}
for (name, arg) in args {
let type_ref = arg.type_ref();
decl_args.push(type_ref.cpp_arg_func_decl(&name));
pre_post_arg_handle(type_ref.cpp_arg_pre_call(&name), &mut pre_call_args);
pre_post_arg_handle(type_ref.cpp_arg_post_call(&name), &mut post_call_args);
pre_post_arg_handle(type_ref.cpp_arg_cleanup(&name), &mut cleanup_args);
}
let return_type = self.return_type();
let return_wrapper_full = if is_infallible {
return_type.cpp_extern_return()
} else {
return_type.cpp_extern_return_wrapper_full()
};
let ret = cpp_method_return(self, is_infallible);
let ret = if cleanup_args.is_empty() {
if ret.is_empty() {
"".into()
} else {
format!("return {};", ret).into()
}
} else {
pre_post_arg_handle(format!("{typ} f_ret = {expr}", typ=return_wrapper_full, expr=ret), &mut post_call_args);
"return f_ret;".into()
};
let tpl = if is_infallible {
&TPL_INFALLIBLE
} else {
&TPL
};
tpl.interpolate(&hashmap! {
"attributes_begin" => attributes_begin.into(),
"debug" => get_debug(self).into(),
"return_wrapper_type" => return_wrapper_full,
"identifier" => identifier,
"decl_args" => decl_args.join(", ").into(),
"pre_call_args" => pre_call_args.join("\n").into(),
"call" => cpp_call_invoke(self).into(),
"post_call_args" => post_call_args.join("\n").into(),
"cleanup_args" => cleanup_args.join("\n").into(),
"return" => ret,
"attributes_end" => attributes_end.into(),
})
}
} | );
|
transform.py | # -*- coding: utf-8 -*-
'''
Auther: cytopia
License: MIT
Transformer for kubernetes-incubator/metrics-server from json
into Prometheus readable format.
'''
import os
import json
import re
import time
import requests
import subprocess
from flask import Flask
from flask import Response
'''
Globals that specify at which url metrics for nodes and pods can be found
'''
PROXY = 'http://127.0.0.1:8080'
URL_NODES = PROXY + '/apis/metrics.k8s.io/v1beta1/nodes'
URL_PODS = PROXY + '/apis/metrics.k8s.io/v1beta1/pods'
def shell_exec(command):
'''
Execute raw shell command and return exit code and output
Args:
command (str): Command to execute
Returns:
tuple (int, str, str): Returns exit code, stdout and stderr
''' | bash = os.popen('command -v bash').read().rstrip('\r\n')
# Execute
cpt = subprocess.Popen(
command,
executable=bash,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
)
# Get stdout, stderr and return code
stdout, stderr = cpt.communicate()
return_code = 0 #cpt.returncode
return return_code, stdout, stderr
def json2dict(data):
'''
Safely convert a potential JSON string into a dict
Args:
data (str): Valid or invalid JSON string.
Returns:
dict: Returns dict of string or empty dict in case of invalid JSON input.
'''
json_object = dict()
try:
json_object = json.loads(data)
except ValueError:
pass
return json_object
def val2base(string):
'''
Transforms an arbitrary string value into a prometheus valid base (int|float) type by best guess:
https://prometheus.io/docs/instrumenting/exposition_formats/#comments-help-text-and-type-information
https://golang.org/pkg/strconv/#ParseFloat
https://golang.org/pkg/strconv/#ParseInt
Currently able to handle values of:
15Ki
15Mi
15Gi
1m0s
5m
Args:
string (str): metrics-server metrics value
Returns:
int|float|string: transformed value or initial value if no transformation regex was found.
'''
# Transform KiloByte into Bytes
val = re.search('^([0-9]+)Ki$', string, re.IGNORECASE)
if val and val.group(1):
return int(val.group(1)) * 1024
# Transform Megabytes into Bytes
val = re.search('^([0-9]+)Mi$', string, re.IGNORECASE)
if val and val.group(1):
return int(val.group(1)) * (1024*1024)
# Transform Gigabytes into Bytes
val = re.search('^([0-9]+)Gi$', string, re.IGNORECASE)
if val and val.group(1):
return int(val.group(1)) * (1024*1024*1024)
# Transform Terrabytes into Bytes
val = re.search('^([0-9]+)Ti$', string, re.IGNORECASE)
if val and val.group(1):
return int(val.group(1)) * (1024*1024*1024*1024)
# Transform hours, minutes and seconds into seconds
val = re.search('^(([0-9]+)\s*h\s*)?(([0-9]+)\s*m\s*)?(([0-9]+)\s*s\s*)?$', string, re.IGNORECASE)
if val and (val.group(2) or val.group(4) or val.group(6)):
return (
(int(val.group(2) or 0) * 60 * 60) +
(int(val.group(4) or 0) * 60) +
(int(val.group(6) or 0))
)
# Otherwise return value as it came in
return string
def trans_node_metrics(string):
'''
Transforms metrics-server node metrics (in the form of a JSON string) into Prometheus
readable metrics format (text-based).
https://prometheus.io/docs/instrumenting/exposition_formats/
https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form
Args:
string (str): Valid or invalid JSON string.
Returns:
str: Returns newline separated node metrics ready for Prometheus to pull.
'''
data = json2dict(string)
cpu = []
mem = []
cpu.append('# HELP kube_metrics_server_node_cpu The CPU time of a node in seconds.')
cpu.append('# TYPE kube_metrics_server_node_cpu gauge')
mem.append('# HELP kube_metrics_server_node_mem The memory of a node in Bytes.')
mem.append('# TYPE kube_metrics_server_node_mem gauge')
tpl = 'kube_metrics_server_node_{}{{node="{}"}} {}'
for node in data.get('items', []):
lbl = {
'node': node.get('metadata', []).get('name', '')
}
val = {
'cpu': node.get('usage', []).get('cpu', ''),
'mem': node.get('usage', []).get('memory', '')
}
cpu.append(tpl.format('cpu', lbl['node'], val2base(val['cpu'])))
mem.append(tpl.format('mem', lbl['node'], val2base(val['mem'])))
return '\n'.join(cpu + mem)
def trans_pod_metrics(string):
'''
Transforms metrics-server pod metrics (in the form of a JSON string) into Prometheus
readable metrics format (text-based).
https://prometheus.io/docs/instrumenting/exposition_formats/
https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form
Args:
string (str): Valid or invalid JSON string.
Returns:
str: Returns newline separated node metrics ready for Prometheus to pull.
'''
data = json2dict(string)
more = get_pod_metrics_from_cli()
cpu = []
mem = []
cpu.append('# HELP kube_metrics_server_pod_cpu The CPU time of a pod in seconds.')
cpu.append('# TYPE kube_metrics_server_pod_cpu gauge')
mem.append('# HELP kube_metrics_server_pod_mem The memory of a pod in Bytes.')
mem.append('# TYPE kube_metrics_server_pod_mem gauge')
tpl = 'kube_metrics_server_pod_{}{{node="{}",pod="{}",ip="{}",container="{}",namespace="{}"}} {}'
for pod in data.get('items', []):
lbl = {
'pod': pod.get('metadata', []).get('name', ''),
'ns': pod.get('metadata', []).get('namespace', '')
}
# Loop over defined container in each pod
for container in pod.get('containers', []):
lbl['cont'] = container.get('name', '')
val = {
'cpu': container.get('usage', []).get('cpu', ''),
'mem': container.get('usage', []).get('memory', '')
}
cpu.append(tpl.format(
'cpu',
more[lbl['pod']]['node'],
lbl['pod'],
more[lbl['pod']]['ip'],
lbl['cont'],
lbl['ns'],
val2base(val['cpu'])
))
mem.append(tpl.format(
'mem',
more[lbl['pod']]['node'],
lbl['pod'],
more[lbl['pod']]['ip'],
lbl['cont'],
lbl['ns'],
val2base(val['mem'])
))
return '\n'.join(cpu + mem)
def get_pod_metrics_from_cli():
'''
Get pod metrics via CLI (allows to have node for enriching the data)
Returns
data: Dictionary of additional pod metrics
'''
data = dict()
command = 'kubectl get pods -o wide --no-headers --all-namespaces'
ret, out, err = shell_exec(command)
# 1:NS | 2:Name | 3:Ready | 4:Status | 5:Restarts | 6:Age | 7:IP | 8:Node | 9: NOMINATED NODE
reg = re.compile(r"^([^ ]+)\s+([^ ]+)\s+([^ ]+)\s+([^ ]+)\s+([^ ]+)\s+([^ ]+)\s+([^ ]+)\s+([^ \n]+)[^\n]*$")
for line in out.splitlines():
line = line.decode("utf-8")
line = reg.match(line)
data[line.group(2)] = {
'ns': line.group(1),
'name': line.group(2),
'ready': line.group(3),
'status': line.group(4),
'restarts': line.group(5),
'age': line.group(6),
'ip': line.group(7),
'node': line.group(8)
}
return data
application = Flask(__name__) # pylint: disable=invalid-name
@application.route("/metrics")
def metrics():
'''
This function is the /metrics http entrypoint and will itself do two callbacks
to the running kubectl proxy in order to gather node and pod metrics from specified
kubernetes api urls. Current output is JSON and we must therefore transform both results
into Prometheus readable format:
https://prometheus.io/docs/instrumenting/exposition_formats/
https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form
'''
# Get info from K8s API
req = {
'nodes': requests.get(URL_NODES),
'pods': requests.get(URL_PODS)
}
# Object to JSON text
json = {
'nodes': req['nodes'].text,
'pods': req['pods'].text
}
# Convert to Prometheus format
prom = {
'nodes': trans_node_metrics(json['nodes']),
'pods': trans_pod_metrics(json['pods'])
}
get_pod_metrics_from_cli()
# Return response
return Response(prom['nodes'] + '\n' + prom['pods'], status=200, mimetype='text/plain')
@application.route("/healthz")
def healthz():
'''
This function is the /healthz http entrypoint and will itself do two callbacks
in order to determine the health of node and pod metric endpoints.
Returns:
Response: Flask Response object that will handle returning http header and body.
If one of the pages (nodes or pods metrics by metrics-server) fails,
it will report an overall failure and respond with 503 (service unavailable).
If both a good, it will respond with 200.
'''
req = {
'nodes': requests.get(URL_NODES),
'pods': requests.get(URL_PODS)
}
health = 'ok'
status = 200
if req['nodes'].status_code != 200:
health = 'failed'
status = 503
if req['pods'].status_code != 200:
health = 'failed'
status = 503
return Response(health, status=status, mimetype='text/plain')
@application.route("/")
def index():
'''
This function is the / http entrypoint and will simply provide a link to
the metrics and health page. This is done, because all metrics endpoints I have encountered
so far also do it exactly this way.
Returns:
Response: Flask Response object that will handle returning http header and body.
Returns default Prometheus endpoint index page (http 200) with links
to /healthz and /metrics.
'''
return '''
<html>
<head><title>metrics-server-prom</title></head>
<body>
<h1>metrics-server-prom</h1>
<ul>
<li><a href='/metrics'>metrics</a></li>
<li><a href='/healthz'>healthz</a></li>
</ul>
</body>
</html>
'''
if __name__ == "__main__":
application.run(host='0.0.0.0') | # Get absolute path of bash |
rewrite.py | # Copyright 2019 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Rewrite script for TF->JAX."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
# Dependency imports
from absl import app
from absl import flags
flags.DEFINE_boolean('numpy_to_jax', False,
'Whether or not to rewrite numpy imports to jax.numpy')
flags.DEFINE_list('omit_deps', [], 'List of build deps being omitted.')
FLAGS = flags.FLAGS
TF_REPLACEMENTS = {
'import tensorflow ':
'from tensorflow_probability.python.internal.backend import numpy ',
'import tensorflow.compat.v1':
'from tensorflow_probability.python.internal.backend.numpy.compat '
'import v1',
'import tensorflow.compat.v2':
'from tensorflow_probability.python.internal.backend.numpy.compat '
'import v2',
'import tensorflow_probability as tfp':
'import tensorflow_probability as tfp; '
'tfp = tfp.substrates.numpy',
'from tensorflow.python.framework import tensor_shape':
('from tensorflow_probability.python.internal.backend.numpy.gen '
'import tensor_shape'),
'from tensorflow.python.framework import ops':
('from tensorflow_probability.python.internal.backend.numpy '
'import ops'),
'from tensorflow.python.framework import tensor_util':
('from tensorflow_probability.python.internal.backend.numpy '
'import ops'),
'from tensorflow.python.util import':
'from tensorflow_probability.python.internal.backend.numpy import',
'from tensorflow.python.util.all_util':
'from tensorflow_probability.python.internal.backend.numpy.private',
'from tensorflow.python.ops.linalg':
'from tensorflow_probability.python.internal.backend.numpy.gen',
'from tensorflow.python.ops import parallel_for':
'from tensorflow_probability.python.internal.backend.numpy '
'import functional_ops as parallel_for',
'from tensorflow.python.ops import control_flow_ops':
'from tensorflow_probability.python.internal.backend.numpy '
'import control_flow as control_flow_ops',
'from tensorflow.python.eager import context':
'from tensorflow_probability.python.internal.backend.numpy '
'import private',
('from tensorflow.python.client '
'import pywrap_tf_session as c_api'):
'pass',
('from tensorflow.python '
'import pywrap_tensorflow as c_api'):
'pass'
}
DISABLED_BY_PKG = {
'experimental':
('auto_batching', 'composite_tensor', 'linalg',
'marginalize', 'nn', 'sequential', 'substrates', 'vi'),
}
LIBS = ('bijectors', 'distributions', 'experimental', 'math', 'mcmc',
'optimizer', 'random', 'stats', 'util')
INTERNALS = ('assert_util', 'batched_rejection_sampler', 'broadcast_util',
'cache_util', 'callable_util',
'custom_gradient', 'distribution_util', 'dtype_util',
'hypothesis_testlib', 'implementation_selection', 'monte_carlo',
'name_util', 'nest_util', 'numerics_testing',
'parameter_properties', 'prefer_static', 'samplers',
'special_math', 'structural_tuple', 'tensor_util',
'tensorshape_util', 'test_combinations', 'test_util', 'unnest',
'variadic_reduce', 'vectorization_util')
OPTIMIZERS = ('linesearch',)
LINESEARCH = ('internal',)
SAMPLERS = ('categorical', 'normal', 'poisson', 'uniform', 'shuffle')
PRIVATE_TF_PKGS = ('array_ops', 'control_flow_util', 'gradient_checker_v2',
'numpy_text', 'random_ops')
def main(argv):
disabled_by_pkg = dict(DISABLED_BY_PKG)
for dep in FLAGS.omit_deps:
pkg = dep.split('/python/')[1].split(':')[0].replace('/', '.')
lib = dep.split(':')[1]
if pkg.endswith('.{}'.format(lib)):
pkg = pkg.replace('.{}'.format(lib), '')
disabled_by_pkg.setdefault(pkg, ())
disabled_by_pkg[pkg] += (lib,)
else:
disabled_by_pkg.setdefault(pkg, ())
disabled_by_pkg[pkg] += (lib,)
replacements = collections.OrderedDict(TF_REPLACEMENTS)
for pkg, disabled in disabled_by_pkg.items():
replacements.update({
'from tensorflow_probability.python.{}.{} '.format(pkg, item):
'# from tensorflow_probability.python.{}.{} '.format(pkg, item)
for item in disabled
})
replacements.update({
'from tensorflow_probability.python.{} import {}'.format(pkg, item):
'# from tensorflow_probability.python.{} import {}'.format(pkg, item)
for item in disabled
})
replacements.update({
'tensorflow_probability.python.{}'.format(lib):
'tensorflow_probability.substrates.numpy.{}'.format(lib)
for lib in LIBS
})
replacements.update({
'tensorflow_probability.python import {} as'.format(lib):
'tensorflow_probability.substrates.numpy import {} as'.format(lib)
for lib in LIBS
})
replacements.update({
'tensorflow_probability.python import {}'.format(lib):
'tensorflow_probability.substrates.numpy import {}'.format(lib)
for lib in LIBS
})
replacements.update({
# Permits distributions.internal, psd_kernels.internal.
# 'as psd_kernels as': 'as',
})
replacements.update({
'tensorflow_probability.python.internal.{}'.format(internal):
'tensorflow_probability.substrates.numpy.internal.{}'.format(internal)
for internal in INTERNALS
})
# pylint: disable=g-complex-comprehension
replacements.update({
'tensorflow_probability.python.internal import {}'.format(internal):
'tensorflow_probability.substrates.numpy.internal import {}'.format(
internal)
for internal in INTERNALS
})
replacements.update({
'tensorflow.python.ops import {}'.format(private):
'tensorflow_probability.python.internal.backend.numpy import private'
' as {}'.format(private)
for private in PRIVATE_TF_PKGS
})
replacements.update({
'tensorflow.python.framework.ops import {}'.format(
private):
'tensorflow_probability.python.internal.backend.numpy import private'
' as {}'.format(private)
for private in PRIVATE_TF_PKGS
})
# pylint: enable=g-complex-comprehension
# TODO(bjp): Delete this block after TFP uses stateless samplers.
replacements.update({
'tf.random.{}'.format(sampler): 'tf.random.stateless_{}'.format(sampler)
for sampler in SAMPLERS
})
replacements.update({
'self._maybe_assert_dtype': '# self._maybe_assert_dtype',
'SKIP_DTYPE_CHECKS = False': 'SKIP_DTYPE_CHECKS = True',
'@test_util.test_all_tf_execution_regimes':
'# @test_util.test_all_tf_execution_regimes',
'@test_util.test_graph_and_eager_modes':
'# @test_util.test_graph_and_eager_modes',
'@test_util.test_graph_mode_only':
'# @test_util.test_graph_mode_only',
'TestCombinationsTest(test_util.TestCase)':
'TestCombinationsDoNotTest(object)',
'@six.add_metaclass(TensorMetaClass)':
'# @six.add_metaclass(TensorMetaClass)',
})
filename = argv[1]
contents = open(filename, encoding='utf-8').read()
if '__init__.py' in filename:
# Comment out items from __all__.
for pkg, disabled in disabled_by_pkg.items():
for item in disabled:
def disable_all(name):
replacements.update({
'"{}"'.format(name): '# "{}"'.format(name),
'\'{}\''.format(name): '# \'{}\''.format(name),
})
if 'from tensorflow_probability.python.{} import {}'.format(
pkg, item) in contents:
disable_all(item)
for segment in contents.split(
'from tensorflow_probability.python.{}.{} import '.format(
pkg, item)):
disable_all(segment.split('\n')[0])
for find, replace in replacements.items():
contents = contents.replace(find, replace)
disabler = 'JAX_DISABLE' if FLAGS.numpy_to_jax else 'NUMPY_DISABLE'
lines = contents.split('\n')
for i, l in enumerate(lines):
if disabler in l:
lines[i] = '# {}'.format(l)
contents = '\n'.join(lines)
if not FLAGS.numpy_to_jax:
contents = contents.replace('NUMPY_MODE = False', 'NUMPY_MODE = True')
if FLAGS.numpy_to_jax:
contents = contents.replace('tfp.substrates.numpy', 'tfp.substrates.jax')
contents = contents.replace('substrates.numpy', 'substrates.jax')
contents = contents.replace('backend.numpy', 'backend.jax')
contents = contents.replace('def _call_jax', 'def __call__')
contents = contents.replace('JAX_MODE = False', 'JAX_MODE = True')
contents = contents.replace('SKIP_DTYPE_CHECKS = True',
'SKIP_DTYPE_CHECKS = False')
is_test = lambda x: x.endswith('_test.py') or x.endswith('_test_util.py')
if is_test(argv[1]): # Test-only rewrites.
contents = contents.replace(
'tf.test.main()',
'from jax.config import config; '
'config.update("jax_enable_x64", True); '
'config.enable_omnistaging(); '
'tf.test.main()')
print('# ' + '@' * 78)
print('# This file is auto-generated by substrates/meta/rewrite.py')
print('# It will be surfaced by the build system as a symlink at:')
substrate = 'jax' if FLAGS.numpy_to_jax else 'numpy'
print('# `tensorflow_probability/substrates/{substrate}/{path}`'.format(
substrate=substrate, path=filename.split('/python/')[1]))
print('# For more info, see substrate_runfiles_symlinks in build_defs.bzl')
print('# ' + '@' * 78)
print('\n# (This notice adds 10 to line numbering.)\n\n')
|
if __name__ == '__main__':
app.run(main) | print(contents, file=open(1, 'w', encoding='utf-8', closefd=False))
|
clippy1.rs | // clippy1.rs
// The Clippy tool is a collection of lints to analyze your code
// so you can catch common mistakes and improve your Rust code.
//
// For these exercises the code will fail to compile when there are clippy warnings
// check clippy's suggestions from the output to solve the exercise.
// Execute `rustlings hint clippy1` for hints :)
use std::{f32, f32::consts::PI};
fn | () {
let radius = 5.00f32;
let area = PI * f32::powi(radius, 2);
println!(
"The area of a circle with radius {:.2} is {:.5}!",
radius, area
)
}
| main |
capture_device_capabilities.py | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Code generator DeviceCapabilities literal."""
import argparse
import ctypes
import glob
import evdev
import os
import sys
TEST_DATA_GROUP_SIZE = 64 # Aligns with sysfs on 64-bit devices.
def | (bits):
return (bits + TEST_DATA_GROUP_SIZE - 1) // TEST_DATA_GROUP_SIZE
# As in /sys/class/input/input*/capabilities/*
def serialize_bitfield(bitfield, max_bit):
result = ""
group_count = bits_to_groups(max_bit)
for group in range(group_count - 1, -1, -1):
group_val = 0
for group_bit in range(TEST_DATA_GROUP_SIZE):
code = group * TEST_DATA_GROUP_SIZE + group_bit
if code in bitfield:
group_val |= (1 << group_bit)
if group_val or result:
result += '%x' % group_val
if group:
result += ' '
if not result:
return '0'
return result
def dump_absinfo(out, capabilities, identifier):
out.write('const DeviceAbsoluteAxis %s[] = {\n' % identifier)
for code, absinfo in capabilities[evdev.ecodes.EV_ABS]:
# Set value := 0 to make it deterministic.
code_name = evdev.ecodes.bytype[evdev.ecodes.EV_ABS][code]
absinfo_struct = (0, absinfo.min, absinfo.max, absinfo.fuzz, absinfo.flat,
absinfo.resolution)
data = (code_name,) + absinfo_struct
out.write(' {%s, {%d, %d, %d, %d, %d, %d}},\n' % data)
out.write('};\n')
def dump_capabilities(out, dev, identifier=None):
capabilities = dev.capabilities()
has_abs = evdev.ecodes.EV_ABS in capabilities
basename = os.path.basename(dev.fn)
sysfs_path = '/sys/class/input/' + basename
if not identifier:
identifier = 'kInputDevice_' + basename
# python-evdev is missing some features
uniq = open(sysfs_path + '/device/uniq', 'r').read().strip()
prop = open(sysfs_path + '/device/properties', 'r').read().strip()
ff = open(sysfs_path + '/device/capabilities/ff', 'r').read().strip()
# python-evdev parses the id wrong.
bustype = open(sysfs_path + '/device/id/bustype', 'r').read().strip()
vendor = open(sysfs_path + '/device/id/vendor', 'r').read().strip()
product = open(sysfs_path + '/device/id/product', 'r').read().strip()
version = open(sysfs_path + '/device/id/version', 'r').read().strip()
# python-evdev drops EV_REP from the event set.
ev = open(sysfs_path + '/device/capabilities/ev', 'r').read().strip()
if ctypes.sizeof(ctypes.c_long()) != 8:
# /sys/class/input/*/properties format is word size dependent.
# Could be fixed by regrouping but for now, just raise an error.
raise ValueError("Must be run on 64-bit machine")
key_bits = capabilities.get(evdev.ecodes.EV_KEY, [])
rel_bits = capabilities.get(evdev.ecodes.EV_REL, [])
abs_bits = [abs[0] for abs in capabilities.get(evdev.ecodes.EV_ABS, [])]
msc_bits = capabilities.get(evdev.ecodes.EV_MSC, [])
sw_bits = capabilities.get(evdev.ecodes.EV_SW, [])
led_bits = capabilities.get(evdev.ecodes.EV_LED, [])
fields = [
('path', os.path.realpath(sysfs_path)),
('name', dev.name),
('phys', dev.phys),
('uniq', uniq),
('bustype', bustype),
('vendor', vendor),
('product', product),
('version', version),
('prop', prop),
('ev', ev),
('key', serialize_bitfield(key_bits, evdev.ecodes.KEY_CNT)),
('rel', serialize_bitfield(rel_bits, evdev.ecodes.REL_CNT)),
('abs', serialize_bitfield(abs_bits, evdev.ecodes.ABS_CNT)),
('msc', serialize_bitfield(msc_bits, evdev.ecodes.MSC_CNT)),
('sw', serialize_bitfield(sw_bits, evdev.ecodes.SW_CNT)),
('led', serialize_bitfield(led_bits, evdev.ecodes.LED_CNT)),
('ff', ff),
]
if has_abs:
absinfo_identifier = identifier + 'AbsAxes'
dump_absinfo(out, capabilities, absinfo_identifier)
out.write('const DeviceCapabilities %s = {\n' % identifier)
for name, val in fields:
out.write(' /* %s */ "%s",\n' % (name, val))
if has_abs:
out.write(' %s,\n' % absinfo_identifier)
out.write(' base::size(%s),\n' % absinfo_identifier)
out.write('};\n')
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument('device', nargs='?')
parser.add_argument('identifier', nargs='?')
args = parser.parse_args(argv)
if args.device:
devices = [args.device]
else:
devices = glob.glob('/dev/input/event*')
out = sys.stdout
for device in devices:
dev = evdev.InputDevice(device)
dump_capabilities(out, dev, args.identifier)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bits_to_groups |
caculate_FFalgorithm_for_path.py | class FFalgorithm:
def __init__(self):
| '''
link1's bandwidth
''' | def caculate_bandwidth(S7_PORT2,S5_PORT1,S1_PORT2,S9_PORT4,S11_PORT3,S5_PORT2,S2_PORT4,S7_PORT1,S6_PORT1,S3_PORT1,S10_PORT3,S6_PORT2,S4_PORT2): |
reactor.rs | use crate::{
heartbeat::Heartbeat,
socket_state::{SocketEvent, SocketStateHandle},
tcp::TcpStream,
thread::ThreadHandle,
Result,
};
use log::trace;
use mio::{Events, Interest, Poll, Token, Waker};
use std::{
collections::HashMap,
fmt,
sync::{
atomic::{AtomicBool, AtomicUsize, Ordering},
Arc,
},
thread::Builder as ThreadBuilder,
};
pub type Slot = usize;
pub trait ReactorBuilder: fmt::Debug + Send + Sync {
fn build(&self, heartbeat: Heartbeat) -> Result<Box<dyn Reactor + Send>>;
}
pub trait Reactor: fmt::Debug + Send {
fn register(&mut self, socket: &mut TcpStream, socket_state: SocketStateHandle)
-> Result<Slot>;
fn handle(&self) -> Box<dyn ReactorHandle + Send> {
Box::new(DummyHandle)
}
#[allow(clippy::boxed_local)]
fn start(self: Box<Self>) -> Result<ThreadHandle> |
}
pub trait ReactorHandle {
fn shutdown(&self) -> Result<()> {
Ok(())
}
fn start_heartbeat(&self) {}
fn poll_read(&self, _slot: Slot) {}
fn poll_write(&self, _slot: Slot) {}
}
pub(crate) struct DefaultReactorBuilder;
impl ReactorBuilder for DefaultReactorBuilder {
fn build(&self, heartbeat: Heartbeat) -> Result<Box<dyn Reactor + Send>> {
Ok(Box::new(DefaultReactor::new(heartbeat)?))
}
}
pub(crate) struct DefaultReactor {
slot: AtomicUsize,
poll: Poll,
heartbeat: Heartbeat,
slots: HashMap<Token, SocketStateHandle>,
handle: DefaultReactorHandle,
}
#[derive(Clone)]
pub(crate) struct DefaultReactorHandle {
run: Arc<AtomicBool>,
waker: Arc<Waker>,
}
impl Reactor for DefaultReactor {
fn handle(&self) -> Box<dyn ReactorHandle + Send> {
Box::new(self.handle.clone())
}
fn register(
&mut self,
socket: &mut TcpStream,
socket_state: SocketStateHandle,
) -> Result<usize> {
let token = Token(self.slot.fetch_add(1, Ordering::SeqCst));
self.poll
.registry()
.register(socket, token, Interest::READABLE | Interest::WRITABLE)?;
self.slots.insert(token, socket_state);
Ok(token.0)
}
fn start(mut self: Box<Self>) -> Result<ThreadHandle> {
Ok(ThreadHandle::new(
ThreadBuilder::new()
.name("lapin-reactor".into())
.spawn(move || {
let mut events = Events::with_capacity(16);
while self.should_run() {
self.run(&mut events)?;
}
Ok(())
})?,
))
}
}
impl DefaultReactor {
fn new(heartbeat: Heartbeat) -> Result<Self> {
let poll = Poll::new()?;
let handle = DefaultReactorHandle {
run: Arc::new(AtomicBool::new(true)),
waker: Arc::new(Waker::new(poll.registry(), Token(0))?),
};
Ok(Self {
slot: AtomicUsize::new(1),
poll,
heartbeat,
slots: HashMap::default(),
handle,
})
}
fn should_run(&self) -> bool {
self.handle.run.load(Ordering::SeqCst)
}
fn run(&mut self, events: &mut Events) -> Result<()> {
trace!("reactor poll");
self.poll.poll(events, self.heartbeat.poll_timeout()?)?;
trace!("reactor poll done");
for event in events.iter() {
if let Some(socket) = self.slots.get(&event.token()) {
if event.is_error() {
socket.send(SocketEvent::Error);
}
if event.is_read_closed() || event.is_write_closed() {
socket.send(SocketEvent::Closed);
}
if event.is_readable() {
socket.send(SocketEvent::Readable);
}
if event.is_writable() {
socket.send(SocketEvent::Writable);
}
}
}
Ok(())
}
}
impl ReactorHandle for DefaultReactorHandle {
fn shutdown(&self) -> Result<()> {
self.run.store(false, Ordering::SeqCst);
self.waker.wake()?;
Ok(())
}
}
impl fmt::Debug for DefaultReactorBuilder {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("DefaultReactorBuilder").finish()
}
}
impl fmt::Debug for DefaultReactor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("DefaultReactor").finish()
}
}
#[derive(Clone)]
struct DummyHandle;
impl ReactorHandle for DummyHandle {}
| {
Ok(ThreadHandle::default())
} |
EWMA.py | from pyspark.mllib.common import _java2py, _py2java
from pyspark.mllib.linalg import Vectors
from _model import PyModel
"""
Fits an Exponentially Weight Moving Average model (EWMA) (aka. Simple Exponential Smoothing) to
a time series. The model is defined as S_t = (1 - a) * X_t + a * S_{t - 1}, where a is the
smoothing parameter, X is the original series, and S is the smoothed series. For more
information, please see https://en.wikipedia.org/wiki/Exponential_smoothing.
"""
def | (ts, sc=None):
"""
Fits an EWMA model to a time series. Uses the first point in the time series as a starting
value. Uses sum squared error as an objective function to optimize to find smoothing parameter
The model for EWMA is recursively defined as S_t = (1 - a) * X_t + a * S_{t-1}, where
a is the smoothing parameter, X is the original series, and S is the smoothed series
Note that the optimization is performed as unbounded optimization, although in its formal
definition the smoothing parameter is <= 1, which corresponds to an inequality bounded
optimization. Given this, the resulting smoothing parameter should always be sanity checked
https://en.wikipedia.org/wiki/Exponential_smoothing
Parameters
----------
ts:
the time series to which we want to fit an EWMA model as a Numpy array
Returns an EWMA model
"""
assert sc != None, "Missing SparkContext"
jvm = sc._jvm
jmodel = jvm.com.cloudera.sparkts.models.EWMA.fitModel(
_py2java(sc, Vectors.dense(ts))
)
return EWMAModel(jmodel=jmodel, sc=sc)
class EWMAModel(PyModel):
def __init__(self, smoothing=0.0, jmodel=None, sc=None):
assert sc != None, "Missing SparkContext"
self._ctx = sc
if jmodel == None:
self._jmodel = self._ctx._jvm.com.cloudera.sparkts.models.EWMAModel(
smoothing
)
else:
self._jmodel = jmodel
self.smoothing = self._jmodel.smoothing()
| fit_model |
reduce.go | // Licensed to the LF AI & Data foundation under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package querynode
/*
#cgo CFLAGS: -I${SRCDIR}/../core/output/include
#cgo LDFLAGS: -L${SRCDIR}/../core/output/lib -lmilvus_segcore -Wl,-rpath=${SRCDIR}/../core/output/lib
#include "segcore/plan_c.h"
#include "segcore/reduce_c.h"
*/
import "C"
import (
"errors"
"unsafe"
)
// SearchResult contains a pointer to the search result in C++ memory
type SearchResult struct {
cSearchResult C.CSearchResult
}
// MarshaledHits contains a pointer to the marshaled hits in C++ memory
type MarshaledHits struct {
cMarshaledHits C.CMarshaledHits
}
// RetrieveResult contains a pointer to the retrieve result in C++ memory
type RetrieveResult struct {
cRetrieveResult C.CRetrieveResult
}
func reduceSearchResultsAndFillData(plan *SearchPlan, searchResults []*SearchResult, numSegments int64) error {
if plan.cSearchPlan == nil {
return errors.New("nil search plan")
}
cSearchResults := make([]C.CSearchResult, 0) | for _, res := range searchResults {
cSearchResults = append(cSearchResults, res.cSearchResult)
}
cSearchResultPtr := (*C.CSearchResult)(&cSearchResults[0])
cNumSegments := C.int64_t(numSegments)
status := C.ReduceSearchResultsAndFillData(plan.cSearchPlan, cSearchResultPtr, cNumSegments)
if err := HandleCStatus(&status, "ReduceSearchResultsAndFillData failed"); err != nil {
return err
}
return nil
}
func reorganizeSearchResults(searchResults []*SearchResult, numSegments int64) (*MarshaledHits, error) {
cSearchResults := make([]C.CSearchResult, 0)
for _, res := range searchResults {
cSearchResults = append(cSearchResults, res.cSearchResult)
}
cSearchResultPtr := (*C.CSearchResult)(&cSearchResults[0])
var cNumSegments = C.int64_t(numSegments)
var cMarshaledHits C.CMarshaledHits
status := C.ReorganizeSearchResults(&cMarshaledHits, cSearchResultPtr, cNumSegments)
if err := HandleCStatus(&status, "ReorganizeSearchResults failed"); err != nil {
return nil, err
}
return &MarshaledHits{cMarshaledHits: cMarshaledHits}, nil
}
func (mh *MarshaledHits) getHitsBlobSize() int64 {
res := C.GetHitsBlobSize(mh.cMarshaledHits)
return int64(res)
}
func (mh *MarshaledHits) getHitsBlob() ([]byte, error) {
byteSize := mh.getHitsBlobSize()
result := make([]byte, byteSize)
cResultPtr := unsafe.Pointer(&result[0])
C.GetHitsBlob(mh.cMarshaledHits, cResultPtr)
return result, nil
}
func (mh *MarshaledHits) hitBlobSizeInGroup(groupOffset int64) ([]int64, error) {
cGroupOffset := (C.int64_t)(groupOffset)
numQueries := C.GetNumQueriesPerGroup(mh.cMarshaledHits, cGroupOffset)
result := make([]int64, int64(numQueries))
cResult := (*C.int64_t)(&result[0])
C.GetHitSizePerQueries(mh.cMarshaledHits, cGroupOffset, cResult)
return result, nil
}
func deleteMarshaledHits(hits *MarshaledHits) {
C.DeleteMarshaledHits(hits.cMarshaledHits)
}
func deleteSearchResults(results []*SearchResult) {
for _, result := range results {
C.DeleteSearchResult(result.cSearchResult)
}
} | |
test_submission.py | """Tests for submission functionality-- primarily if a submission
form is validated properly and passed to the backend.
"""
import pytest
import pathlib, json
from src.model import Activity, Submission
# Load in sample submissions and their expected status codes if they were submitted:
with open(pathlib.Path(__file__).parent.absolute()/'data'/'sample_submissions_base.json') as samples:
json_data = json.load(samples)
sample_submissions = [pytest.param(x['submission'], x['status_code'], x.get('broken_field'), id=x['id']) for x in json_data['sample_submissions']]
@pytest.mark.parametrize("submission, expected_code, problematic_field", sample_submissions)
def test_submissions(app, client, submission, expected_code, problematic_field):
|
@pytest.mark.parametrize("max_players", [0, None])
def test_blank_max_players(app, client, max_players):
"""Leaving the max number of players blank (or 0) should result
in a NULL value going to the database.
"""
submission = {
"name": "TestMaxPlayersBlank",
"url": "https://google.ca",
"description": "test desc",
"min_players": 4,
"max_players": max_players,
"paid": False,
"submitted_by": "Matt"
}
rv = client.post('/v1/games/suggest', json=submission)
assert rv.status_code == 200
with app.app_context():
# Physically go into the submissions table and ensure the created
# record has NULL max_players
game = Submission.query.filter_by(name="TestMaxPlayersBlank").first()
assert game.max_players is None
| """Tests some typical submissions to make sure the error code raised
is the one expected (422 in a validation error) and that any validation
errors raised correspond to the right field that failed validation.
"""
rv = client.post('/v1/games/suggest', json=submission)
response_json = rv.get_json()
assert rv.status_code == expected_code
# In the response, validation issues are given in the 'issues' object, like:
# .. 'issues': { 'url': ['Field may not be null'] }
# So, we check to see if the issues object contains the field we expected
# to trigger a validation error.
if 'issues' in response_json and problematic_field:
assert problematic_field in response_json['issues']
# If we expected a 200, was the submission actually added?
if expected_code == 200:
with app.app_context():
game = Submission.query.filter_by(name=submission['name']).first()
assert game is not None |
aso-google-play.user.js | // ==UserScript==
// @name ASO Google Play
// @namespace https://github.com/ayoubfletcher
// @version 2.2
// @description Your tool kit to speed up your aso abilities.
// @author Ayoub Fletcher
// @match https://play.google.com/store/apps/details?id=*
// @grant GM_xmlhttpRequest
// @require http://code.jquery.com/jquery-1.12.4.min.js
// @connect appbrain.com
// @downloadURL https://github.com/ayoubfletcher/ASO-Google-Play/raw/master/aso-google-play.user.js
// @updateURL https://github.com/ayoubfletcher/ASO-Google-Play/raw/master/aso-google-play.user.js
// ==/UserScript==
/*******************************************************************************************************************************
* █████╗ ██╗ ██╗ ██████╗ ██╗ ██╗██████╗ ███████╗██╗ ███████╗████████╗ ██████╗██╗ ██╗███████╗██████╗ *
* ██╔══██╗╚██╗ ██╔╝██╔═══██╗██║ ██║██╔══██╗ ██╔════╝██║ ██╔════╝╚══██╔══╝██╔════╝██║ ██║██╔════╝██╔══██╗ *
* ███████║ ╚████╔╝ ██║ ██║██║ ██║██████╔╝ █████╗ ██║ █████╗ ██║ ██║ ███████║█████╗ ██████╔╝ *
* ██╔══██║ ╚██╔╝ ██║ ██║██║ ██║██╔══██╗ ██╔══╝ ██║ ██╔══╝ ██║ ██║ ██╔══██║██╔══╝ ██╔══██╗ *
* ██║ ██║ ██║ ╚██████╔╝╚██████╔╝██████╔╝ ██║ ███████╗███████╗ ██║ ╚██████╗██║ ██║███████╗██║ ██║ *
* ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═════╝ ╚═════╝ ╚═╝ ╚══════╝╚══════╝ ╚═╝ ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝ *
* (~‾▿‾)~ DONATE ME PLEASE, I NEED MONEY TO LIVE (ง •̀_•́)ง (╥_╥) *
* WANT TO EDIT THE SCRIPT READ THE STUFF IN THE BOTTOM IF YOU BREAK IT DON'T CRY NO MONEY NO FIX *
* SEND MONEY OR I WILL MAKE YOUR ACCOUNT DISAPPEAR YOHAAA (∩`-´)⊃━☆゚.*・。゚ *
* (\____/) *
* ( ͡ ͡° ͜ ʖ ͡ ͡°) *
* \╭☞ \╭☞ HERE'S MY PAYAPL: https://www.paypal.me/ayoubfletcher *
* I KNOW I'M WASTING TIME WRITING THAT CRAP BECAUSE YOU GUYS ARE STINGY *
* IF YOU FOUND IT OFFSENSIVE DON'T BLAME SEND ME MONEY ¯\_(ツ)_/¯. *
* DAAH I WROTE THAT STUFF TO MAKE MYSELF LOOK PROFESSIONAL (⌐■_■) KAPPA AND TRUST ME THOSE EMOJIS SO FUN TO PLAY WITH. *
* OKAY WHATEVER HERE'S THE GITHUB REPO FOR THAT CRAPPY SCRIPT IF THERE WAS AN UPDATE I WOULD POST IT THERE: *
* (╯°□°)╯︵ -> https://github.com/ayoubfletcher/ASO-Google-Play *
* ( •_•)>⌐■-■ FINAL WORDS IF YOU CANNOT BUY ME A COFFEE BECAUSE ARE "STINGY" WISH ME ATLEAST A GOOD LUCK *
* JUST KIDDING GOOD LUCK BUDDY SORRY FOR WASTING YOUR TIME BUT I'M JUST HAVE FUN PLAYING WITH THOSE EMOJIS “ヽ(´▽`)ノ”. *
* IF YOU GOT ANY ISSUE WITH THE SCRIPT SEND ME A MESSAGE THROUGH PAYPAL (¬‿¬) OR THROUGH GITHUB REPO. *
*******************************************************************************************************************************/
/**
* @Note For faster responds consider disabling developers information or app information, or whatever you are free.
*/
/**
* Parameters to toggle the extraction of the fields
* @param short_description To turn on extracting the short description of the app: value [true / false].
* @param feature_image To turn on extracting the feature image of app: value [true / false].
* @param app turn on extracting the app information: value [true / false].
* @param dev To turn on extracting the developer app information and inject into the page: value [true / false].
*/
const controls = {
short_description: true,
feature_image: true,
app: true,
dev: true
}
// To get rid of the warning of scripts
var $ = window.$;
/**
* Static urls don't change.
*/
const app_brain_url = "https://www.appbrain.com/app/";
const dev_barin_url = "https://www.appbrain.com/dev/";
let app_not_found = false;
// The status of the generating
let processing = false;
// Interval checker to reload the page if an ajax call has been accrued no need to change
const INTERVAL_TIMER_CHECKER = 500;
/**
* HTML Template for styling and showing the elements
* If you want to style just do it
*/
const styles_html = "<style>#aso-data{padding: 10px 0; text-align: center;}#aso-data i{margin-top: 10px;}#aso-data-loading{padding: 10px; text-align: center; margin-bottom: 10px; font-weight: bold;}#honor{color: #2c1912;}#aso-data i a{color: #546e7a; font-weight: bold; -webkit-transition: all 200ms; /* Safari */ transition: all 200ms;}#aso-data i a:hover{color: #29434e;}#app-data, #dev-data{text-align: center; padding: 5px;}#aso-data li{display: inline-block; width: 110px; margin: 0 3px; margin-bottom: 10px; background: #F7F7F7}.icon{height: 50px; background-position: 50% 50%; background-repeat: no-repeat; background-size: contain; position: relative}.app-age{background-color: #04B5B0; background-image: url('https://www.appbrain.com/static/images/infotile-age.png')}.app-installs{background-color: #069bf7; background-image: url('https://www.appbrain.com/static/images/infotile-download.png')}.app-ranking{background-color: #F4460A; background-position: 0 50%; background-image: url('https://www.appbrain.com/static/images/infotile-ranking.png')}.app-size{background-color: #EA6E00; background-image: url('https://www.appbrain.com/static/images/infotile-size.png')}.app-update{background-color: #e6a509; background-image: url('https://www.appbrain.com/static/images/infotile-update.png')}.app-android{background-color: #EB008B; background-image: url('https://www.appbrain.com/static/images/infotile-android.png')}.dev-age{background-color: #1E88E5; background-image: url('https://www.appbrain.com/static/images/infotile-age.png')}.dev-total-installs{background-color: #E65100; background-image: url('https://www.appbrain.com/static/images/infotile-download.png')}.dev-total-apps{background-color: #EB008B; background-image: url('https://www.appbrain.com/static/images/infotile-appcount.png')}.dev-average-rating{background-color: #80af3f; background-image: url('https://www.appbrain.com/static/images/infotile-rating.png')}.dev-recent-rating{background-color: #80af3f; background-image: url('https://www.appbrain.com/static/images/infotile-recent-rating.png')}.dev-total-rating{background-color: #80af3f; background-image: url('https://www.appbrain.com/static/images/infotile-rating-count.png')}#aso-data .value{text-align: center; padding: 15px 5px; font-family: Arial; font-size: 12px; font-weight: bold; color: #666}#aso-data .title{text-align: center; padding: 5px; font-family: Arial; font-size: 13px; color: #999; border-top: 1px solid #E1E1E1}#aso-data .header{font-size: 18px; padding: 20px 10px; color: #666; text-transform: uppercase; font-family: Arial; font-weight: bold}#description{width: 100%; float: left; margin-bottom: 20px;}#description .content{color: #fff; padding: 10px; background: #666; font-family: Arial; font-size: 15px; text-align: center}#download_apk{font-size: 14px; height: 36px; line-height: 34px; padding: 0 20px; -webkit-border-radius: 4px; border-radius: 4px; -webkit-box-shadow: 0 1px 0 rgba(0, 0, 0, 0.05); box-shadow: 0 1px 0 rgba(0, 0, 0, 0.05); -webkit-box-sizing: border-box; box-sizing: border-box; -webkit-transition: all .2s; transition: all .2s; -webkit-user-select: none; background-color: #fff; border: 1px solid rgba(0, 0, 0, 0.17); color: #ffffff; cursor: pointer; display: inline-block; font-family: 'Roboto', sans-serif; font-size: 14px; font-style: normal; font-weight: 500; height: 36px; line-height: 34px; min-width: 40px; padding: 0 20px; position: relative; text-align: center; text-decoration: none; white-space: nowrap; vertical-align: middle; background-color: #546e7a;}#download_apk:hover{-webkit-box-shadow: 0 1px 0 0 rgba(0, 0, 0, 0.27); box-shadow: 0 1px 0 0 rgba(0, 0, 0, 0.27); cursor: pointer; background-color: #29434e;}#aso-feature-image{padding-bottom: 20px; text-align: center;}#aso-feature-image i{display: block; margin-bottom: 10px; font-weight: bold;}</style>";
const aso_html_template = "<div id='aso-data'>{%DATA_HTML%} <i>Made by <a href='https://github.com/ayoubfletcher/ASO-Google-Play'>Ayoub Fletcher</a> with LOVE ❤️.</i><p>Honorable Mention <b id='honor'>Re-skinning</b> 🕷️.</p></div>";
const desc_html_template = "<ul id='description'><div class='content'>{%SHORT_DESCRIPTION%}</div></ul>";
const aso_html_loading_template = "<div id='aso-data-loading'>LOADING DATA 👾 ...</div>"
const app_html_template = "<ul id='app-data'> <div class='header'> App Information</div><li> <div class='top'> <div class='icon app-installs'></div><div class='value'>{%APP_INSTALLS%}</div><div class='title'>App Installs</div></div></li><li> <div class='top'> <div class='icon app-age'></div><div class='value'>{%APP_AGE%}</div><div class='title'>App Age</div></div></li><li> <div class='top'> <div class='icon app-ranking'></div><div class='value'>{%RANKING%}</div><div class='title'>Ranking</div></div></li><li> <div class='top'> <div class='icon app-size'></div><div class='value'>{%APP_SIZE%}</div><div class='title'>App Size</div></div></li><li> <div class='top'> <div class='icon app-update'></div><div class='value'>{%LAST_UPDATE%}</div><div class='title'>Last Update</div></div></li><li> <div class='top'> <div class='icon app-android'></div><div class='value'>{%ANDROID_VERSION%}</div><div class='title'>Android Version</div></div></li></ul>";
const dev_html_template = "<ul id='dev-data'> <div class='header'> Developer Information</div><li> <div class='top'> <div class='icon dev-age'></div><div class='value'>{%DEVELOPER_AGE%}</div><div class='title'>Active since</div></div></li><li> <div class='top'> <div class='icon dev-total-apps'></div><div class='value'>{%TOTAL_APPS%}</div><div class='title'>Total apps</div></div></li><li> <div class='top'> <div class='icon dev-total-installs'></div><div class='value'>{%TOTAL_INSTALLS%}</div><div class='title'>Installs</div></div></li><li> <div class='top'> <div class='icon dev-average-rating'></div><div class='value'>{%AVERAGE_RATING%}</div><div class='title'>Average rating</div></div></li><li> <div class='top'> <div class='icon dev-recent-rating'></div><div class='value'>{%RECENT_RATING%}</div><div class='title'>Recent rating</div></div></li><li> <div class='top'> <div class='icon dev-total-rating'></div><div class='value'>{%TOTAL_RATING%}</div><div class='title'>Rating total</div></div></li></ul>";
const download_app_template = "<a id='download_apk' href='https://apkpure.com/{%PACKAGE_NAME%}/{%PACKAGE_NAME%}/download?from=details'>Download APK</a>";
const feature_image_template = "<div id='aso-feature-image'><i>Feature Image:</i><img src='{%FEATURE_IMAGE%}' /></div>";
/**
* Messages feedback change if you want to.
*/
const app_not_found_msg = "SORRY, APP PAGE NOT FOUND ON APPBRAIN 😓!";
const fetching_app_info_msg = 'GETTING APP INFORMATIONS FROM APPBRAIN 👺 ...';
const fetching_dev_info_msg = 'GETTING DEVELOPER INFORMATIONS FROM APPBRAIN 🕵️♀️ ...';
/**
* Promise method to get http request
* @param url Target url
*/
function getHttpRequest(url) {
return new Promise((resolve, reject) => {
GM_xmlhttpRequest({
method: "GET", url: url, onload: function (response) {
if (response.status == 200) {
resolve(response.responseText);
} else {
reject('There was an error requesting: ' + url);
}
}
});
})
}
/**
* Extract fields from selector of appbrain
* @param selector Selector of html
*/
function extractField(selector) {
if(selector.find(".infotile-text-solo").length) {
return selector.find(".infotile-text-solo").text().trim();
} else {
return selector.find(".infotile-text").text().trim() + " " + selector.find(".infotile-subtext").text().trim();
}
}
/**
* Generte the aso data
*/
function generateASO() {
// Inject loading
let titleElements = $('h1');
const elemTarget = $(titleElements[titleElements.length-2]).parent().parent().parent().parent().parent()[0];
const aso_html = aso_html_template.replace('{%DATA_HTML%}', aso_html_loading_template);
// Injecting styles
$(styles_html).insertAfter(elemTarget);
$(aso_html).insertAfter(elemTarget);
// Get the pacakge name and the developer name
const package_name = location.href.split("?id=")[1].split("&")[0];
const developer = $($('a[itemprop="genre"]').parent().parent().find('a')[0]).text();
// Initialize containers of the data
var app = null;
var dev = null;
// Get the data
new Promise((resolve, reject) => {
// Extract app information
if(controls.app) {
$('#aso-data-loading').text(fetching_app_info_msg)
getHttpRequest(app_brain_url+package_name)
.then(result => {
if($(result).find('h1').text() == 'Uh oh! Page not found!') {
reject();
} else {
let data = $(result).find('.infotiles');
// extract the data of app
app = {
installs: extractField(data.find(".infotile-top-installs").parent()),
age: extractField(data.find(".infotile-top-age").parent()),
ranking: extractField(data.find(".infotile-top-ranking").parent()),
size: extractField(data.find(".infotile-top-size").parent()),
update: extractField(data.find(".infotile-top-lastupdated").parent()),
android_version: extractField(data.find(".infotile-top-androidversion").parent()),
short_description: $(result).find('.app-short-description').text()
};
}
resolve();
}).catch(_ => {
reject();
})
} else {
resolve();
}
}).then(_ => {
// Extract developer information
if(controls.dev) {
// Get the developer ifnormation
return new Promise((resolve, reject) => {
if(!app_not_found) {
$('#aso-data-loading').text(fetching_dev_info_msg)
getHttpRequest(dev_barin_url+developer)
.then(result => {
let data = $(result).find(".infotiles");
// extract the data of developer
dev = {
age: extractField(data.find(".infotile-top-age").parent()),
total_apps: extractField(data.find(".infotile-top-appcount").parent()),
total_installs: extractField(data.find(".infotile-top-total-installs").parent()),
average_rating: extractField(data.find(".infotile-top-rating").parent()),
total_rating: extractField(data.find(".infotile-top-rating-count").parent()),
rating_recent: extractField(data.find(".infotile-top-recent-rating").parent())
};
resolve();
}).catch(_ => {
resolve();
})
}
})
} else {
return Promise.resolve();
}
})
// Inject data in to the dom
.then(_ => {
// Inject the data into page
injectData(app, dev, package_name);
})
.catch(_ => {
// Inject the data into page
app_not_found = true;
$('#aso-data-loading').text(app_not_found_msg);
injectData(app, dev, package_name);
})
}
/**
* Unescape text
* @param {str} text Text to unescape
*/
function unescapeText(text) {
const elementDiv = document.createElement('div');
elementDiv.innerHTML = text;
return elementDiv.innerText;
}
/**
* Retrieve short description of the app
*/
function shortDescription() {
const metas = document.getElementsByTagName('meta');
const scripts = document.getElementsByTagName('script');
// Check if description was protected
for(var i=0; i < scripts.length; i++) {
if(scripts[i].innerHTML.indexOf("key: 'ds:5', isError: false") > 0) {
var scriptBlock = scripts[i].innerHTML.split(",[null,");
return scriptBlock[1].substring(1, scriptBlock[1].indexOf('"]'));
}
}
// Get description if was on metas
for(let i = 0; i < metas.length; i++) {
if(metas[i].getAttribute('name') == 'description') {
return metas[i].getAttribute('content');
}
}
}
/**
* Retrieve feature image of the app
*/
function getFeatureImage() {
const metas = document.getElementsByTagName('meta');
// Get feature image if was on metas
for(let i = 0; i < metas.length; i++) {
if(metas[i].getAttribute('property') == 'og:image') {
return metas[i].getAttribute('content');
}
}
}
/**
* Inject the data into the DOM of the page
* @param app App object
* @param developer Developer obj | injectData(app, developer, packageName) {
// Add the app information
let app_html = ""
if(app != null) {
app_html = app_html_template.replace("{%APP_INSTALLS%}", app.installs)
.replace("{%APP_AGE%}", app.age)
.replace("{%RANKING%}", app.ranking)
.replace("{%APP_SIZE%}", app.size)
.replace("{%LAST_UPDATE%}", app.update)
.replace("{%ANDROID_VERSION%}", app.android_version);
}
// Adding the developer information
let dev_html = "";
if(developer != null) {
dev_html = dev_html_template
.replace("{%DEVELOPER_AGE%}", developer.age)
.replace("{%TOTAL_APPS%}", developer.total_apps)
.replace("{%TOTAL_INSTALLS%}", developer.total_installs)
.replace("{%AVERAGE_RATING%}", developer.average_rating)
.replace("{%RECENT_RATING%}", developer.rating_recent)
.replace("{%TOTAL_RATING%}", developer.total_rating);
}
// Addng the short description
let desc_html = "";
if(controls.short_description) {
desc_html = desc_html_template.replace("{%SHORT_DESCRIPTION%}", app.short_description);
}
// App not found
let app_not_found_html = "";
if(app_not_found) {
app_not_found_html = aso_html_loading_template;
}
// Conbining the data
let aso_html = aso_html_template.replace('{%DATA_HTML%}',
desc_html + // Adding the short description
app_html + // Adding the app information
dev_html + // Adding the developer information
app_not_found_html // Not found
)
// Inject result
$('#aso-data').html(aso_html);
// Add the not found message
if(app_not_found) {
$('#aso-data-loading').text(app_not_found_msg);
}
const download_apk_html = download_app_template.replace(/{%PACKAGE_NAME%}/g, packageName)
// BUG fix for weird installer element, trust me it's actually weird
const installbtn_selector = 'button[aria-label="Install"]';
let installBtnElements = null;
if(document.querySelector(installbtn_selector) !== null) {
installBtnElements = $(installbtn_selector);
} else {
installBtnElements = $('button[aria-label="Installer"]');
}
const installElem = $(installBtnElements[installBtnElements.length-1]).parent()[0];
$(download_apk_html).insertBefore(installElem);
// Inject feature image
if(controls.feature_image) {
const featureImage = getFeatureImage();
if(featureImage != null) {
// const topHeaderElem = $('#aso-data').parent().parent().parent();
const feature_image_html = feature_image_template.replace('{%FEATURE_IMAGE%}', featureImage);
$(feature_image_html).insertAfter('#aso-data');
}
}
processing = false;
}
/**
* Run method to reload the data after the ajax requests of page
*/
function run() {
// Save the current page url
let current_app_url = location.href;
// Check if the current id not the same
if(document.getElementById('aso-data') == null && !processing) {
processing = true;
generateASO()
}
setInterval(function() {
if(current_app_url != location.href) {
location.reload()
}
}, INTERVAL_TIMER_CHECKER);
}
/** Main method script */
(function() {
'use strict';
run();
})();
| ect
*/
function |
get_aks_service.py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetAKSServiceResult',
'AwaitableGetAKSServiceResult',
'get_aks_service',
]
@pulumi.output_type
class GetAKSServiceResult:
"""
Machine Learning service object wrapped into ARM resource envelope.
"""
def __init__(__self__, id=None, identity=None, location=None, name=None, properties=None, sku=None, tags=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if identity and not isinstance(identity, dict):
raise TypeError("Expected argument 'identity' to be a dict")
pulumi.set(__self__, "identity", identity)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if sku and not isinstance(sku, dict):
|
pulumi.set(__self__, "sku", sku)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
Specifies the resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def identity(self) -> Optional['outputs.IdentityResponse']:
"""
The identity of the resource.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Specifies the location of the resource.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
Specifies the name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> Any:
"""
Service properties
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def sku(self) -> Optional['outputs.SkuResponse']:
"""
The sku of the workspace.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Contains resource tags defined as key/value pairs.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Specifies the type of the resource.
"""
return pulumi.get(self, "type")
class AwaitableGetAKSServiceResult(GetAKSServiceResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetAKSServiceResult(
id=self.id,
identity=self.identity,
location=self.location,
name=self.name,
properties=self.properties,
sku=self.sku,
tags=self.tags,
type=self.type)
def get_aks_service(expand: Optional[bool] = None,
resource_group_name: Optional[str] = None,
service_name: Optional[str] = None,
workspace_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetAKSServiceResult:
"""
Machine Learning service object wrapped into ARM resource envelope.
:param bool expand: Set to True to include Model details.
:param str resource_group_name: Name of the resource group in which workspace is located.
:param str service_name: Name of the Azure Machine Learning service.
:param str workspace_name: Name of Azure Machine Learning workspace.
"""
__args__ = dict()
__args__['expand'] = expand
__args__['resourceGroupName'] = resource_group_name
__args__['serviceName'] = service_name
__args__['workspaceName'] = workspace_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:machinelearningservices/v20200515preview:getAKSService', __args__, opts=opts, typ=GetAKSServiceResult).value
return AwaitableGetAKSServiceResult(
id=__ret__.id,
identity=__ret__.identity,
location=__ret__.location,
name=__ret__.name,
properties=__ret__.properties,
sku=__ret__.sku,
tags=__ret__.tags,
type=__ret__.type)
| raise TypeError("Expected argument 'sku' to be a dict") |
queryToken.go | package cli
import (
"context"
"strconv"
"github.com/cosmos/cosmos-sdk/client"
"github.com/cosmos/cosmos-sdk/client/flags"
"github.com/oracleNetworkProtocol/token/x/token/types"
"github.com/spf13/cobra"
)
func CmdListToken() *cobra.Command {
cmd := &cobra.Command{
Use: "list-Token",
Short: "list all Token",
RunE: func(cmd *cobra.Command, args []string) error {
clientCtx := client.GetClientContextFromCmd(cmd)
pageReq, err := client.ReadPageRequest(cmd.Flags())
if err != nil {
return err
}
queryClient := types.NewQueryClient(clientCtx)
params := &types.QueryAllTokenRequest{
Pagination: pageReq,
}
res, err := queryClient.TokenAll(context.Background(), params)
if err != nil {
return err
}
return clientCtx.PrintProto(res)
},
}
flags.AddPaginationFlagsToCmd(cmd, cmd.Use)
flags.AddQueryFlagsToCmd(cmd)
return cmd
}
func CmdShowToken() *cobra.Command {
cmd := &cobra.Command{
Use: "show-Token [id]",
Short: "shows a Token",
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
clientCtx := client.GetClientContextFromCmd(cmd)
queryClient := types.NewQueryClient(clientCtx)
id, err := strconv.ParseUint(args[0], 10, 64)
if err != nil {
return err
}
params := &types.QueryGetTokenRequest{
Id: id,
} | }
return clientCtx.PrintProto(res)
},
}
flags.AddQueryFlagsToCmd(cmd)
return cmd
} |
res, err := queryClient.Token(context.Background(), params)
if err != nil {
return err |
problem_039.py | from problems.problem import Problem
def generate_pythagorean_triples(ub: int) -> []:
# https://en.wikipedia.org/wiki/Pythagorean_triple
result = []
for a in range(1, ub):
aa = a * a
b = a + 1
c = b + 1
while c <= ub:
cc = aa + b * b
while c * c < cc:
c += 1
if c * c == cc and c <= ub:
result.append([a + b + c, a, b, c])
b += 1
return result
class | (Problem):
def calculate_answer(self) -> int:
# p = perimeter
# a < b < c = hypotenuse
answer = 0
max_perimeter = 1000
solution_counts = [0 for i in range(max_perimeter + 1)]
triangles = generate_pythagorean_triples(max_perimeter // 2 + 1)
max_solutions = 0
for triangle in triangles:
p = triangle[0]
if p <= max_perimeter:
solution_counts[p] += 1
solutions = solution_counts[p]
if (solutions > max_solutions):
max_solutions = solutions
answer = p
self.print_detail(f"p = {answer}; solutions = {solutions}")
return answer
| Problem039 |
StopRollupJobResponse.ts | /*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
export class | {
'200': {
body: { stopped: boolean }
}
}
| Response |
transfer-create.go | // Copyright 2018-2021 CERN
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// In applying this license, CERN does not waive the privileges and immunities
// granted to it by virtue of its status as an Intergovernmental Organization
// or submit itself to any jurisdiction.
package main
import (
"io"
"os"
"strconv"
"strings"
"time"
userpb "github.com/cs3org/go-cs3apis/cs3/identity/user/v1beta1"
invitepb "github.com/cs3org/go-cs3apis/cs3/ocm/invite/v1beta1"
ocmprovider "github.com/cs3org/go-cs3apis/cs3/ocm/provider/v1beta1"
rpc "github.com/cs3org/go-cs3apis/cs3/rpc/v1beta1"
ocm "github.com/cs3org/go-cs3apis/cs3/sharing/ocm/v1beta1"
provider "github.com/cs3org/go-cs3apis/cs3/storage/provider/v1beta1"
types "github.com/cs3org/go-cs3apis/cs3/types/v1beta1"
"github.com/jedib0t/go-pretty/table"
"github.com/pkg/errors"
)
func transferCreateCommand() *command | {
cmd := newCommand("transfer-create")
cmd.Description = func() string { return "create transfer between 2 sites" }
cmd.Usage = func() string { return "Usage: transfer-create [-flags] <path>" }
grantee := cmd.String("grantee", "", "the grantee, receiver of the transfer")
granteeType := cmd.String("granteeType", "user", "the grantee type, one of: user, group")
idp := cmd.String("idp", "", "the idp of the grantee, default to same idp as the user triggering the action")
cmd.Action = func(w ...io.Writer) error {
if cmd.NArg() < 1 {
return errors.New("Invalid arguments: " + cmd.Usage())
}
if *grantee == "" {
return errors.New("Grantee cannot be empty: use -grantee flag\n" + cmd.Usage())
}
if *idp == "" {
return errors.New("Idp cannot be empty: use -idp flag\n" + cmd.Usage())
}
// the resource to transfer; the path
fn := cmd.Args()[0]
ctx := getAuthContext()
client, err := getClient()
if err != nil {
return err
}
// check if invitation has been accepted
acceptedUserRes, err := client.GetAcceptedUser(ctx, &invitepb.GetAcceptedUserRequest{
RemoteUserId: &userpb.UserId{OpaqueId: *grantee, Idp: *idp},
})
if err != nil {
return err
}
if acceptedUserRes.Status.Code != rpc.Code_CODE_OK {
return formatError(acceptedUserRes.Status)
}
// verify resource stats
statReq := &provider.StatRequest{
Ref: &provider.Reference{Path: fn},
}
statRes, err := client.Stat(ctx, statReq)
if err != nil {
return err
}
if statRes.Status.Code != rpc.Code_CODE_OK {
return formatError(statRes.Status)
}
providerInfoResp, err := client.GetInfoByDomain(ctx, &ocmprovider.GetInfoByDomainRequest{
Domain: *idp,
})
if err != nil {
return err
}
resourcePermissions, pint, err := getOCMSharePerm(editorPermission)
if err != nil {
return err
}
gt := provider.GranteeType_GRANTEE_TYPE_USER
if strings.ToLower(*granteeType) == "group" {
gt = provider.GranteeType_GRANTEE_TYPE_GROUP
}
createShareReq := &ocm.CreateOCMShareRequest{
Opaque: &types.Opaque{
Map: map[string]*types.OpaqueEntry{
"permissions": &types.OpaqueEntry{
Decoder: "plain",
Value: []byte(strconv.Itoa(pint)),
},
"name": &types.OpaqueEntry{
Decoder: "plain",
Value: []byte(statRes.Info.Path),
},
"protocol": &types.OpaqueEntry{
Decoder: "plain",
Value: []byte("datatx"),
},
},
},
ResourceId: statRes.Info.Id,
Grant: &ocm.ShareGrant{
Grantee: &provider.Grantee{
Type: gt,
Id: &provider.Grantee_UserId{
UserId: &userpb.UserId{
Idp: *idp,
OpaqueId: *grantee,
},
},
},
Permissions: resourcePermissions,
},
RecipientMeshProvider: providerInfoResp.ProviderInfo,
}
createShareResponse, err := client.CreateOCMShare(ctx, createShareReq)
if err != nil {
return err
}
if createShareResponse.Status.Code != rpc.Code_CODE_OK {
if createShareResponse.Status.Code == rpc.Code_CODE_NOT_FOUND {
return formatError(statRes.Status)
}
return err
}
t := table.NewWriter()
t.SetOutputMirror(os.Stdout)
t.AppendHeader(table.Row{"#", "Owner.Idp", "Owner.OpaqueId", "ResourceId", "Permissions", "Type", "Grantee.Idp", "Grantee.OpaqueId", "ShareType", "Created", "Updated"})
s := createShareResponse.Share
t.AppendRows([]table.Row{
{s.Id.OpaqueId, s.Owner.Idp, s.Owner.OpaqueId, s.ResourceId.String(), s.Permissions.String(),
s.Grantee.Type.String(), s.Grantee.GetUserId().Idp, s.Grantee.GetUserId().OpaqueId, s.ShareType.String(),
time.Unix(int64(s.Ctime.Seconds), 0), time.Unix(int64(s.Mtime.Seconds), 0)},
})
t.Render()
return nil
}
return cmd
} |
|
message.go | package message
import (
"bufio"
"fmt"
"github.com/woojiahao/torrent.go/internal/connection"
. "github.com/woojiahao/torrent.go/internal/utility"
"io"
"log"
)
// Piece and Bitfield do not have a default length prefixes and payload sizes
// because they have dynamic sizes that have to be calculated separately
var (
lengthPrefixes = map[MessageID]int{
ChokeID: 1,
UnchokeID: 1,
InterestedID: 1,
NotInterestedID: 1,
CancelID: 13,
RequestID: 13,
HaveID: 5,
PortID: 3,
}
payloadSizes = map[MessageID]int{
ChokeID: 0,
UnchokeID: 0,
InterestedID: 0,
NotInterestedID: 0,
CancelID: 12,
RequestID: 12,
HaveID: 4,
PortID: 2,
}
// This variable is used to send a keep alive packet to the server that cannot be serialized
KeepAlive = []byte{0, 0, 0, 0}
)
// For all integers in the payload, they will be regarded as BigEndian integers with 4 bytes
type Message struct {
LengthPrefix int
MessageID MessageID
Payload []byte
}
// Serializes a message into a stream of bytes. The given lengthPrefix is ignored as it must be calculated
// given the MessageID and provided payload.
func (m *Message) Serialize() []byte {
messageID := m.MessageID
buf := make([]byte, 0)
var length int
if messageID == PieceID || messageID == BitfieldID {
length = len(m.Payload) + 1
} else {
length = lengthPrefixes[messageID]
}
lengthPrefix := ToBigEndian(length, 4)
buf = append(buf, lengthPrefix...)
buf = append(buf, byte(int(messageID)))
if m.Payload != nil {
buf = append(buf, m.Payload...)
}
return buf
}
func New(id MessageID, payload ...byte) *Message {
return &Message{MessageID: id, Payload: payload}
}
func Deserialize(b []byte) *Message {
lengthPrefix := FromBigEndian(b[:4])
// If keep alive
if lengthPrefix == 0 {
return nil
}
messageID := MessageID(b[4])
var payloadSize int
if messageID == PieceID || messageID == BitfieldID {
payloadSize = lengthPrefix - 1
} else {
payloadSize = payloadSizes[messageID]
}
payload := make([]byte, 0)
if payloadSize != 0 {
payload = b[5 : 5+payloadSize]
}
return &Message{
lengthPrefix,
messageID,
payload,
}
}
func Read(c *connection.Connection) (*Message, error) {
reader := bufio.NewReader(c.Conn)
lengthBuf := make([]byte, 4)
_, err := io.ReadFull(reader, lengthBuf)
if err != nil {
return nil, err
}
length := FromBigEndian(lengthBuf)
log.Printf("lengthBuf is %v and length is %d\n", lengthBuf, length)
if length == 0 {
return nil, nil
}
buf := make([]byte, length)
_, err = io.ReadFull(reader, buf)
if err != nil {
return nil, err
}
fullMessage := make([]byte, length+4)
copy(fullMessage[:4], lengthBuf)
copy(fullMessage[4:5], []byte{buf[0]})
copy(fullMessage[5:], buf[1:])
m := Deserialize(fullMessage)
fmt.Printf("deserialized messasge is %v\n", m)
return Deserialize(fullMessage), nil
}
// Reads a PIECE message payload into the buffer
// Copies the payload to the buffer
func (m *Message) ParseBlock(index int, buf []byte) (int, error) {
if m.MessageID != PieceID |
payload := m.Payload
if len(payload) < 8 {
return 0, fmt.Errorf("payload is an invalid length")
}
blockIndex, begin, block := FromBigEndian(payload[:4]), FromBigEndian(payload[4:8]), payload[8:]
if blockIndex != index {
return 0, fmt.Errorf("expected index %d, received %d instead", index, blockIndex)
}
// The begin either points to the end of the buffer or further
if begin >= len(buf) {
return 0, fmt.Errorf("block offset cannot be greater than or equal to the piece buffer size")
}
// If the offset + payload exceeds the piece buffer's size
if begin+len(block) > len(buf) {
return 0, fmt.Errorf("payload too large for offset %d with piece buffer length of %d", begin, len(buf))
}
copy(buf[begin:], buf)
return len(block), nil
}
func (m *Message) ParseHave() (int, error) {
if m.MessageID != HaveID {
return 0, fmt.Errorf("message is not of type HAVE")
}
if len(m.Payload) > 4 {
return 0, fmt.Errorf("HAVE messages should have payloads of length 4")
}
index := FromBigEndian(m.Payload)
return index, nil
}
func (m Message) String() string {
return fmt.Sprintf("message has length of %d, id of %d", m.LengthPrefix, m.MessageID)
}
| {
return 0, fmt.Errorf("message is not of type PIECE")
} |
test_state.go | // Copyright (C) 2019-2021, Ava Labs, Inc. All rights reserved.
// See the file LICENSE for licensing terms.
package validators
import (
"errors"
"testing"
"github.com/Toinounet21/avalanchego-trafficked-v1.7.4/ids"
)
var (
errCurrentHeight = errors.New("unexpectedly called GetCurrentHeight")
errGetValidatorSet = errors.New("unexpectedly called GetValidatorSet")
)
type TestState struct {
T *testing.T
CantGetCurrentHeight,
CantGetValidatorSet bool
GetCurrentHeightF func() (uint64, error)
GetValidatorSetF func(height uint64, subnetID ids.ID) (map[ids.ShortID]uint64, error)
}
func (vm *TestState) GetCurrentHeight() (uint64, error) {
if vm.GetCurrentHeightF != nil {
return vm.GetCurrentHeightF()
}
if vm.CantGetCurrentHeight && vm.T != nil {
vm.T.Fatal(errCurrentHeight)
} | return 0, errCurrentHeight
}
func (vm *TestState) GetValidatorSet(height uint64, subnetID ids.ID) (map[ids.ShortID]uint64, error) {
if vm.GetValidatorSetF != nil {
return vm.GetValidatorSetF(height, subnetID)
}
if vm.CantGetValidatorSet && vm.T != nil {
vm.T.Fatal(errGetValidatorSet)
}
return nil, errGetValidatorSet
} | |
aks_cluster_handler.go | package aks
import (
"context"
stderrors "errors"
"fmt"
"net"
"net/url"
"reflect"
"time"
"github.com/Azure/go-autorest/autorest/to"
"github.com/rancher/aks-operator/controller"
aksv1 "github.com/rancher/aks-operator/pkg/apis/aks.cattle.io/v1"
apimgmtv3 "github.com/rancher/rancher/pkg/apis/management.cattle.io/v3"
"github.com/rancher/rancher/pkg/controllers/management/clusteroperator"
"github.com/rancher/rancher/pkg/controllers/management/clusterupstreamrefresher"
"github.com/rancher/rancher/pkg/controllers/management/rbac"
"github.com/rancher/rancher/pkg/dialer"
mgmtv3 "github.com/rancher/rancher/pkg/generated/norman/management.cattle.io/v3"
"github.com/rancher/rancher/pkg/namespace"
"github.com/rancher/rancher/pkg/systemaccount"
"github.com/rancher/rancher/pkg/types/config"
"github.com/rancher/rancher/pkg/wrangler"
"github.com/sirupsen/logrus"
"k8s.io/apimachinery/pkg/api/errors"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
"k8s.io/apimachinery/pkg/fields"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/client-go/rest"
)
const (
aksAPIGroup = "aks.cattle.io"
aksV1 = "aks.cattle.io/v1"
aksOperatorTemplate = "system-library-rancher-aks-operator"
aksOperator = "rancher-aks-operator"
aksShortName = "AKS"
enqueueTime = time.Second * 5
)
type aksOperatorController struct {
clusteroperator.OperatorController
}
func Register(ctx context.Context, wContext *wrangler.Context, mgmtCtx *config.ManagementContext) {
aksClusterConfigResource := schema.GroupVersionResource{
Group: aksAPIGroup,
Version: "v1",
Resource: "aksclusterconfigs",
}
aksCCDynamicClient := mgmtCtx.DynamicClient.Resource(aksClusterConfigResource)
e := &aksOperatorController{clusteroperator.OperatorController{
ClusterEnqueueAfter: wContext.Mgmt.Cluster().EnqueueAfter,
SecretsCache: wContext.Core.Secret().Cache(),
TemplateCache: wContext.Mgmt.CatalogTemplate().Cache(),
ProjectCache: wContext.Mgmt.Project().Cache(),
AppLister: mgmtCtx.Project.Apps("").Controller().Lister(),
AppClient: mgmtCtx.Project.Apps(""),
NsClient: mgmtCtx.Core.Namespaces(""),
ClusterClient: wContext.Mgmt.Cluster(),
CatalogManager: mgmtCtx.CatalogManager,
SystemAccountManager: systemaccount.NewManager(mgmtCtx),
DynamicClient: aksCCDynamicClient,
ClientDialer: mgmtCtx.Dialer,
Discovery: wContext.K8s.Discovery(),
}}
wContext.Mgmt.Cluster().OnChange(ctx, "aks-operator-controller", e.onClusterChange)
}
func (e *aksOperatorController) onClusterChange(key string, cluster *mgmtv3.Cluster) (*mgmtv3.Cluster, error) {
if cluster == nil || cluster.DeletionTimestamp != nil || cluster.Spec.AKSConfig == nil {
return cluster, nil
}
// set driver name
if cluster.Status.Driver == "" {
cluster = cluster.DeepCopy()
cluster.Status.Driver = apimgmtv3.ClusterDriverAKS
return e.ClusterClient.Update(cluster)
}
cluster, err := e.CheckCrdReady(cluster, "aks")
if err != nil {
return cluster, err
}
// get aks Cluster Config, if it does not exist, create it
aksClusterConfigDynamic, err := e.DynamicClient.Namespace(namespace.GlobalNamespace).Get(context.TODO(), cluster.Name, v1.GetOptions{})
if err != nil {
if !errors.IsNotFound(err) {
return cluster, err
}
cluster, err = e.SetUnknown(cluster, apimgmtv3.ClusterConditionWaiting, "Waiting for API to be available")
if err != nil {
return cluster, err
}
aksClusterConfigDynamic, err = buildAKSCCCreateObject(cluster)
if err != nil |
aksClusterConfigDynamic, err = e.DynamicClient.Namespace(namespace.GlobalNamespace).Create(context.TODO(), aksClusterConfigDynamic, v1.CreateOptions{})
if err != nil {
return cluster, err
}
}
aksClusterConfigMap, err := runtime.DefaultUnstructuredConverter.ToUnstructured(&cluster.Spec.AKSConfig)
if err != nil {
return cluster, err
}
// check for changes between aks spec on cluster and the aks spec on the aksClusterConfig object
if !reflect.DeepEqual(aksClusterConfigMap, aksClusterConfigDynamic.Object["spec"]) {
logrus.Infof("change detected for cluster [%s], updating AKSClusterConfig", cluster.Name)
return e.updateAKSClusterConfig(cluster, aksClusterConfigDynamic, aksClusterConfigMap)
}
// get aks Cluster Config's phase
status, _ := aksClusterConfigDynamic.Object["status"].(map[string]interface{})
phase, _ := status["phase"]
failureMessage, _ := status["failureMessage"].(string)
switch phase {
case "creating":
if cluster.Status.AKSStatus.UpstreamSpec == nil {
return e.setInitialUpstreamSpec(cluster)
}
e.ClusterEnqueueAfter(cluster.Name, enqueueTime)
if failureMessage == "" {
logrus.Infof("waiting for cluster AKS [%s] to finish creating", cluster.Name)
return e.SetUnknown(cluster, apimgmtv3.ClusterConditionProvisioned, "")
}
logrus.Infof("waiting for cluster AKS [%s] create failure to be resolved", cluster.Name)
return e.SetFalse(cluster, apimgmtv3.ClusterConditionProvisioned, failureMessage)
case "active":
if cluster.Spec.AKSConfig.Imported {
if cluster.Status.AKSStatus.UpstreamSpec == nil {
// non imported clusters will have already had upstream spec set
return e.setInitialUpstreamSpec(cluster)
}
if apimgmtv3.ClusterConditionPending.IsUnknown(cluster) {
cluster = cluster.DeepCopy()
apimgmtv3.ClusterConditionPending.True(cluster)
return e.ClusterClient.Update(cluster)
}
}
cluster, err = e.SetTrue(cluster, apimgmtv3.ClusterConditionProvisioned, "")
if err != nil {
return cluster, err
}
if cluster.Status.AKSStatus.RBACEnabled == nil {
enabled, ok := status["rbacEnabled"].(bool)
if ok {
cluster = cluster.DeepCopy()
cluster.Status.AKSStatus.RBACEnabled = &enabled
return e.ClusterClient.Update(cluster)
}
}
if cluster.Status.APIEndpoint == "" {
return e.RecordCAAndAPIEndpoint(cluster)
}
if cluster.Status.AKSStatus.PrivateRequiresTunnel == nil &&
to.Bool(cluster.Status.AKSStatus.UpstreamSpec.PrivateCluster) {
// In this case, the API endpoint is private and it has not been determined if Rancher must tunnel to communicate with it.
// Check to see if we can still use the control plane endpoint even though
// the cluster has private-only access
serviceToken, mustTunnel, err := e.generateSATokenWithPublicAPI(cluster)
if err != nil {
return cluster, err
}
if mustTunnel != nil {
cluster = cluster.DeepCopy()
cluster.Status.AKSStatus.PrivateRequiresTunnel = mustTunnel
cluster.Status.ServiceAccountToken = serviceToken
return e.ClusterClient.Update(cluster)
}
}
if cluster.Status.ServiceAccountToken == "" {
cluster, err = e.generateAndSetServiceAccount(cluster)
if err != nil {
var statusErr error
if err == dialer.ErrAgentDisconnected {
// In this case, the API endpoint is private and rancher is waiting for the import cluster command to be run.
cluster, statusErr = e.SetUnknown(cluster, apimgmtv3.ClusterConditionWaiting, "waiting for cluster agent to be deployed")
if statusErr == nil {
e.ClusterEnqueueAfter(cluster.Name, enqueueTime)
}
return cluster, statusErr
}
cluster, statusErr = e.SetFalse(cluster, apimgmtv3.ClusterConditionWaiting,
fmt.Sprintf("failed to communicate with cluster: %v", err))
if statusErr != nil {
return cluster, statusErr
}
return cluster, err
}
}
cluster, err = e.recordAppliedSpec(cluster)
if err != nil {
return cluster, err
}
return e.SetTrue(cluster, apimgmtv3.ClusterConditionUpdated, "")
case "updating":
cluster, err = e.SetTrue(cluster, apimgmtv3.ClusterConditionProvisioned, "")
if err != nil {
return cluster, err
}
e.ClusterEnqueueAfter(cluster.Name, enqueueTime)
if failureMessage == "" {
logrus.Infof("waiting for cluster AKS [%s] to update", cluster.Name)
return e.SetUnknown(cluster, apimgmtv3.ClusterConditionUpdated, "")
}
logrus.Infof("waiting for cluster AKS [%s] update failure to be resolved", cluster.Name)
return e.SetFalse(cluster, apimgmtv3.ClusterConditionUpdated, failureMessage)
default:
if cluster.Spec.AKSConfig.Imported {
cluster, err = e.SetUnknown(cluster, apimgmtv3.ClusterConditionPending, "")
if err != nil {
return cluster, err
}
logrus.Infof("waiting for cluster import [%s] to start", cluster.Name)
} else {
logrus.Infof("waiting for cluster create [%s] to start", cluster.Name)
}
e.ClusterEnqueueAfter(cluster.Name, enqueueTime)
if failureMessage == "" {
if cluster.Spec.AKSConfig.Imported {
cluster, err = e.SetUnknown(cluster, apimgmtv3.ClusterConditionPending, "")
if err != nil {
return cluster, err
}
logrus.Infof("waiting for cluster import [%s] to start", cluster.Name)
} else {
logrus.Infof("waiting for cluster create [%s] to start", cluster.Name)
}
return e.SetUnknown(cluster, apimgmtv3.ClusterConditionProvisioned, "")
}
logrus.Infof("waiting for cluster AKS [%s] pre-create failure to be resolved", cluster.Name)
return e.SetFalse(cluster, apimgmtv3.ClusterConditionProvisioned, failureMessage)
}
}
func (e *aksOperatorController) setInitialUpstreamSpec(cluster *mgmtv3.Cluster) (*mgmtv3.Cluster, error) {
logrus.Infof("setting initial upstreamSpec on cluster [%s]", cluster.Name)
upstreamSpec, err := clusterupstreamrefresher.BuildAKSUpstreamSpec(e.SecretsCache, cluster)
if err != nil {
return cluster, err
}
cluster = cluster.DeepCopy()
cluster.Status.AKSStatus.UpstreamSpec = upstreamSpec
return e.ClusterClient.Update(cluster)
}
// updateAKSClusterConfig updates the AKSClusterConfig object's spec with the cluster's AKSConfig if they are not equal..
func (e *aksOperatorController) updateAKSClusterConfig(cluster *mgmtv3.Cluster, aksClusterConfigDynamic *unstructured.Unstructured, spec map[string]interface{}) (*mgmtv3.Cluster, error) {
list, err := e.DynamicClient.Namespace(namespace.GlobalNamespace).List(context.TODO(), v1.ListOptions{})
if err != nil {
return cluster, err
}
selector := fields.OneTermEqualSelector("metadata.name", cluster.Name)
w, err := e.DynamicClient.Namespace(namespace.GlobalNamespace).Watch(context.TODO(), v1.ListOptions{ResourceVersion: list.GetResourceVersion(), FieldSelector: selector.String()})
if err != nil {
return cluster, err
}
aksClusterConfigDynamic.Object["spec"] = spec
aksClusterConfigDynamic, err = e.DynamicClient.Namespace(namespace.GlobalNamespace).Update(context.TODO(), aksClusterConfigDynamic, v1.UpdateOptions{})
if err != nil {
return cluster, err
}
// AKS cluster and node pool statuses are not always immediately updated. This cause the AKSConfig to
// stay in "active" for a few seconds, causing the cluster to go back to "active".
timeout := time.NewTimer(10 * time.Second)
for {
select {
case event := <-w.ResultChan():
aksClusterConfigDynamic = event.Object.(*unstructured.Unstructured)
status, _ := aksClusterConfigDynamic.Object["status"].(map[string]interface{})
if status["phase"] == "active" {
continue
}
// this enqueue is necessary to ensure that the controller is reentered with the updating phase
e.ClusterEnqueueAfter(cluster.Name, enqueueTime)
return e.SetUnknown(cluster, apimgmtv3.ClusterConditionUpdated, "")
case <-timeout.C:
cluster, err = e.recordAppliedSpec(cluster)
if err != nil {
return cluster, err
}
return cluster, nil
}
}
}
// generateAndSetServiceAccount uses the API endpoint and CA cert to generate a service account token. The token is then copied to the cluster status.
func (e *aksOperatorController) generateAndSetServiceAccount(cluster *mgmtv3.Cluster) (*mgmtv3.Cluster, error) {
restConfig, err := e.getRestConfig(cluster)
if err != nil {
return cluster, fmt.Errorf("error getting service account token: %v", err)
}
clusterDialer, err := e.ClientDialer.ClusterDialer(cluster.Name)
if err != nil {
return cluster, err
}
restConfig.Dial = clusterDialer
saToken, err := clusteroperator.GenerateSAToken(restConfig)
if err != nil {
return cluster, fmt.Errorf("error getting service account token: %v", err)
}
cluster = cluster.DeepCopy()
cluster.Status.ServiceAccountToken = saToken
return e.ClusterClient.Update(cluster)
}
// buildAKSCCCreateObject returns an object that can be used with the kubernetes dynamic client to
// create an AKSClusterConfig that matches the spec contained in the cluster's AKSConfig.
func buildAKSCCCreateObject(cluster *mgmtv3.Cluster) (*unstructured.Unstructured, error) {
aksClusterConfig := aksv1.AKSClusterConfig{
TypeMeta: v1.TypeMeta{
Kind: "AKSClusterConfig",
APIVersion: aksV1,
},
ObjectMeta: v1.ObjectMeta{
Name: cluster.Name,
OwnerReferences: []v1.OwnerReference{
{
Kind: cluster.Kind,
APIVersion: rbac.RancherManagementAPIVersion,
Name: cluster.Name,
UID: cluster.UID,
},
},
},
Spec: *cluster.Spec.AKSConfig,
}
// convert AKS cluster config into unstructured object so it can be used with dynamic client
aksClusterConfigMap, err := runtime.DefaultUnstructuredConverter.ToUnstructured(&aksClusterConfig)
if err != nil {
return nil, err
}
return &unstructured.Unstructured{
Object: aksClusterConfigMap,
}, nil
}
// recordAppliedSpec sets the cluster's current spec as its appliedSpec
func (e *aksOperatorController) recordAppliedSpec(cluster *mgmtv3.Cluster) (*mgmtv3.Cluster, error) {
if reflect.DeepEqual(cluster.Status.AppliedSpec.AKSConfig, cluster.Spec.AKSConfig) {
return cluster, nil
}
cluster = cluster.DeepCopy()
cluster.Status.AppliedSpec.AKSConfig = cluster.Spec.AKSConfig
return e.ClusterClient.Update(cluster)
}
// generateSATokenWithPublicAPI tries to get a service account token from the cluster using the public API endpoint.
// This function is called if the cluster has only privateEndpoint enabled and is not publicly available.
// If Rancher is able to communicate with the cluster through its API endpoint even though it is private, then this function will retrieve
// a service account token and the *bool returned will refer to a false value (doesn't have to tunnel).
//
// If the Rancher server cannot connect to the cluster's API endpoint, then one of the two errors below will happen.
// In this case, we know that Rancher must use the cluster agent tunnel for communication. This function will return an empty service account token,
// and the *bool return value will refer to a true value (must tunnel).
//
// If an error different from the two below occur, then the *bool return value will be nil, indicating that Rancher was not able to determine if
// tunneling is required to communicate with the cluster.
func (e *aksOperatorController) generateSATokenWithPublicAPI(cluster *mgmtv3.Cluster) (string, *bool, error) {
restConfig, err := e.getRestConfig(cluster)
if err != nil {
return "", nil, err
}
requiresTunnel := new(bool)
restConfig.Dial = (&net.Dialer{
Timeout: 30 * time.Second,
KeepAlive: 30 * time.Second,
}).DialContext
serviceToken, err := clusteroperator.GenerateSAToken(restConfig)
if err != nil {
*requiresTunnel = true
var dnsError *net.DNSError
if stderrors.As(err, &dnsError) && !dnsError.IsTemporary {
return "", requiresTunnel, nil
}
// In the existence of a proxy, it may be the case that the following error occurs,
// in which case rancher should use the tunnel connection to communicate with the cluster.
var urlError *url.Error
if stderrors.As(err, &urlError) && urlError.Timeout() {
return "", requiresTunnel, nil
}
// Not able to determine if tunneling is required.
requiresTunnel = nil
}
return serviceToken, requiresTunnel, err
}
func (e *aksOperatorController) getRestConfig(cluster *mgmtv3.Cluster) (*rest.Config, error) {
ctx := context.Background()
restConfig, err := controller.GetClusterKubeConfig(ctx, e.SecretsCache, cluster.Spec.AKSConfig)
if err != nil {
return nil, err
}
return restConfig, nil
}
| {
return cluster, err
} |
DevicePhoneRefresh.js | /*
@adobe/react-spectrum-workflow (c) by Adobe
@adobe/react-spectrum-workflow is licensed under a
Creative Commons Attribution-NoDerivatives 4.0 International License.
You should have received a copy of the license along with this
work. If not, see <http://creativecommons.org/licenses/by-nd/4.0/>.
*/
import React from 'react';
import {viewBoxes} from './util';
export function | ({
scale = 'M',
color = 'LIGHT',
...props
}) {
return (
<svg viewBox={viewBoxes[scale]} {...props}>
<path d="M16 30H8V6h16v9.347a11.6 11.6 0 0 1 2-.416V2a2 2 0 0 0-2-2H8a2 2 0 0 0-2 2v32a2 2 0 0 0 2 2h8zM15 2h2a1.04 1.04 0 0 1 1 1 1.041 1.041 0 0 1-1 1h-2a1.024 1.024 0 0 1-1-1 1.024 1.024 0 0 1 1-1z" />
<path d="M18.4 24.451a8.882 8.882 0 0 1 15.5-3.09l1.251-1.251a.486.486 0 0 1 .349-.147.5.5 0 0 1 .5.5v5.051a.472.472 0 0 1-.179.334l.014.114H30.5a.5.5 0 0 1-.5-.5.486.486 0 0 1 .148-.35l1.739-1.74a6.057 6.057 0 0 0-10.6 1.436.975.975 0 0 1-.921.62h-1.248a.76.76 0 0 1-.718-.977zM35.6 29.511A8.882 8.882 0 0 1 20.1 32.6l-1.25 1.251a.489.489 0 0 1-.35.149.5.5 0 0 1-.5-.5v-5.053a.477.477 0 0 1 .179-.334c0-.037-.01-.075-.014-.113H23.5a.5.5 0 0 1 .5.5.489.489 0 0 1-.147.35l-1.74 1.739a6.056 6.056 0 0 0 10.6-1.436.976.976 0 0 1 .921-.619h1.251a.759.759 0 0 1 .715.977z" />
</svg>
);
}
| A4uDevicePhoneRefresh |
boring_test.go | // Copyright 2017 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:build boringcrypto
package tls
import (
"crypto/ecdsa"
"crypto/elliptic"
"crypto/internal/boring/fipstls"
"crypto/rand"
"crypto/rsa"
"crypto/x509"
"crypto/x509/pkix"
"encoding/pem"
"fmt"
"math/big"
"net"
"runtime"
"strings"
"testing"
"time"
)
func TestBoringServerProtocolVersion(t *testing.T) {
test := func(name string, v uint16, msg string) {
t.Run(name, func(t *testing.T) {
serverConfig := testConfig.Clone()
serverConfig.MinVersion = VersionSSL30
clientHello := &clientHelloMsg{
vers: v,
random: make([]byte, 32),
cipherSuites: allCipherSuites(),
compressionMethods: []uint8{compressionNone},
supportedVersions: []uint16{v},
}
testClientHelloFailure(t, serverConfig, clientHello, msg)
})
}
test("VersionTLS10", VersionTLS10, "")
test("VersionTLS11", VersionTLS11, "")
test("VersionTLS12", VersionTLS12, "")
test("VersionTLS13", VersionTLS13, "")
fipstls.Force()
defer fipstls.Abandon()
test("VersionSSL30", VersionSSL30, "client offered only unsupported versions")
test("VersionTLS10", VersionTLS10, "client offered only unsupported versions")
test("VersionTLS11", VersionTLS11, "client offered only unsupported versions")
test("VersionTLS12", VersionTLS12, "")
test("VersionTLS13", VersionTLS13, "client offered only unsupported versions")
}
| func isBoringCipherSuite(id uint16) bool {
switch id {
case TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
TLS_RSA_WITH_AES_128_GCM_SHA256,
TLS_RSA_WITH_AES_256_GCM_SHA384:
return true
}
return false
}
func isBoringCurve(id CurveID) bool {
switch id {
case CurveP256, CurveP384, CurveP521:
return true
}
return false
}
func isECDSA(id uint16) bool {
for _, suite := range cipherSuites {
if suite.id == id {
return suite.flags&suiteECSign == suiteECSign
}
}
panic(fmt.Sprintf("unknown cipher suite %#x", id))
}
func isBoringSignatureScheme(alg SignatureScheme) bool {
switch alg {
default:
return false
case PKCS1WithSHA256,
ECDSAWithP256AndSHA256,
PKCS1WithSHA384,
ECDSAWithP384AndSHA384,
PKCS1WithSHA512,
ECDSAWithP521AndSHA512,
PSSWithSHA256,
PSSWithSHA384,
PSSWithSHA512:
// ok
}
return true
}
func TestBoringServerCipherSuites(t *testing.T) {
serverConfig := testConfig.Clone()
serverConfig.CipherSuites = allCipherSuites()
serverConfig.Certificates = make([]Certificate, 1)
for _, id := range allCipherSuites() {
if isECDSA(id) {
serverConfig.Certificates[0].Certificate = [][]byte{testECDSACertificate}
serverConfig.Certificates[0].PrivateKey = testECDSAPrivateKey
} else {
serverConfig.Certificates[0].Certificate = [][]byte{testRSACertificate}
serverConfig.Certificates[0].PrivateKey = testRSAPrivateKey
}
serverConfig.BuildNameToCertificate()
t.Run(fmt.Sprintf("suite=%#x", id), func(t *testing.T) {
clientHello := &clientHelloMsg{
vers: VersionTLS12,
random: make([]byte, 32),
cipherSuites: []uint16{id},
compressionMethods: []uint8{compressionNone},
supportedCurves: defaultCurvePreferences,
supportedPoints: []uint8{pointFormatUncompressed},
}
testClientHello(t, serverConfig, clientHello)
t.Run("fipstls", func(t *testing.T) {
fipstls.Force()
defer fipstls.Abandon()
msg := ""
if !isBoringCipherSuite(id) {
msg = "no cipher suite supported by both client and server"
}
testClientHelloFailure(t, serverConfig, clientHello, msg)
})
})
}
}
func TestBoringServerCurves(t *testing.T) {
serverConfig := testConfig.Clone()
serverConfig.Certificates = make([]Certificate, 1)
serverConfig.Certificates[0].Certificate = [][]byte{testECDSACertificate}
serverConfig.Certificates[0].PrivateKey = testECDSAPrivateKey
serverConfig.BuildNameToCertificate()
for _, curveid := range defaultCurvePreferences {
t.Run(fmt.Sprintf("curve=%d", curveid), func(t *testing.T) {
clientHello := &clientHelloMsg{
vers: VersionTLS12,
random: make([]byte, 32),
cipherSuites: []uint16{TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256},
compressionMethods: []uint8{compressionNone},
supportedCurves: []CurveID{curveid},
supportedPoints: []uint8{pointFormatUncompressed},
}
testClientHello(t, serverConfig, clientHello)
// With fipstls forced, bad curves should be rejected.
t.Run("fipstls", func(t *testing.T) {
fipstls.Force()
defer fipstls.Abandon()
msg := ""
if !isBoringCurve(curveid) {
msg = "no cipher suite supported by both client and server"
}
testClientHelloFailure(t, serverConfig, clientHello, msg)
})
})
}
}
func boringHandshake(t *testing.T, clientConfig, serverConfig *Config) (clientErr, serverErr error) {
c, s := localPipe(t)
client := Client(c, clientConfig)
server := Server(s, serverConfig)
done := make(chan error, 1)
go func() {
done <- client.Handshake()
c.Close()
}()
serverErr = server.Handshake()
s.Close()
clientErr = <-done
return
}
func TestBoringServerSignatureAndHash(t *testing.T) {
defer func() {
testingOnlyForceClientHelloSignatureAlgorithms = nil
}()
for _, sigHash := range defaultSupportedSignatureAlgorithms {
t.Run(fmt.Sprintf("%#x", sigHash), func(t *testing.T) {
serverConfig := testConfig.Clone()
serverConfig.Certificates = make([]Certificate, 1)
testingOnlyForceClientHelloSignatureAlgorithms = []SignatureScheme{sigHash}
sigType, _, _ := typeAndHashFromSignatureScheme(sigHash)
switch sigType {
case signaturePKCS1v15, signatureRSAPSS:
serverConfig.CipherSuites = []uint16{TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256}
serverConfig.Certificates[0].Certificate = [][]byte{testRSA2048Certificate}
serverConfig.Certificates[0].PrivateKey = testRSA2048PrivateKey
case signatureEd25519:
serverConfig.CipherSuites = []uint16{TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256}
serverConfig.Certificates[0].Certificate = [][]byte{testEd25519Certificate}
serverConfig.Certificates[0].PrivateKey = testEd25519PrivateKey
case signatureECDSA:
serverConfig.CipherSuites = []uint16{TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256}
serverConfig.Certificates[0].Certificate = [][]byte{testECDSACertificate}
serverConfig.Certificates[0].PrivateKey = testECDSAPrivateKey
}
serverConfig.BuildNameToCertificate()
// PKCS#1 v1.5 signature algorithms can't be used standalone in TLS
// 1.3, and the ECDSA ones bind to the curve used.
serverConfig.MaxVersion = VersionTLS12
clientErr, serverErr := boringHandshake(t, testConfig, serverConfig)
if clientErr != nil {
t.Fatalf("expected handshake with %#x to succeed; client error: %v; server error: %v", sigHash, clientErr, serverErr)
}
// With fipstls forced, bad curves should be rejected.
t.Run("fipstls", func(t *testing.T) {
fipstls.Force()
defer fipstls.Abandon()
clientErr, _ := boringHandshake(t, testConfig, serverConfig)
if isBoringSignatureScheme(sigHash) {
if clientErr != nil {
t.Fatalf("expected handshake with %#x to succeed; err=%v", sigHash, clientErr)
}
} else {
if clientErr == nil {
t.Fatalf("expected handshake with %#x to fail, but it succeeded", sigHash)
}
}
})
})
}
}
func TestBoringClientHello(t *testing.T) {
// Test that no matter what we put in the client config,
// the client does not offer non-FIPS configurations.
fipstls.Force()
defer fipstls.Abandon()
c, s := net.Pipe()
defer c.Close()
defer s.Close()
clientConfig := testConfig.Clone()
// All sorts of traps for the client to avoid.
clientConfig.MinVersion = VersionSSL30
clientConfig.MaxVersion = VersionTLS13
clientConfig.CipherSuites = allCipherSuites()
clientConfig.CurvePreferences = defaultCurvePreferences
go Client(c, clientConfig).Handshake()
srv := Server(s, testConfig)
msg, err := srv.readHandshake()
if err != nil {
t.Fatal(err)
}
hello, ok := msg.(*clientHelloMsg)
if !ok {
t.Fatalf("unexpected message type %T", msg)
}
if !isBoringVersion(hello.vers) {
t.Errorf("client vers=%#x, want %#x (TLS 1.2)", hello.vers, VersionTLS12)
}
for _, v := range hello.supportedVersions {
if !isBoringVersion(v) {
t.Errorf("client offered disallowed version %#x", v)
}
}
for _, id := range hello.cipherSuites {
if !isBoringCipherSuite(id) {
t.Errorf("client offered disallowed suite %#x", id)
}
}
for _, id := range hello.supportedCurves {
if !isBoringCurve(id) {
t.Errorf("client offered disallowed curve %d", id)
}
}
for _, sigHash := range hello.supportedSignatureAlgorithms {
if !isBoringSignatureScheme(sigHash) {
t.Errorf("client offered disallowed signature-and-hash %v", sigHash)
}
}
}
func TestBoringCertAlgs(t *testing.T) {
// NaCl, arm and wasm time out generating keys. Nothing in this test is architecture-specific, so just don't bother on those.
if runtime.GOOS == "nacl" || runtime.GOARCH == "arm" || runtime.GOOS == "js" {
t.Skipf("skipping on %s/%s because key generation takes too long", runtime.GOOS, runtime.GOARCH)
}
// Set up some roots, intermediate CAs, and leaf certs with various algorithms.
// X_Y is X signed by Y.
R1 := boringCert(t, "R1", boringRSAKey(t, 2048), nil, boringCertCA|boringCertFIPSOK)
R2 := boringCert(t, "R2", boringRSAKey(t, 4096), nil, boringCertCA)
M1_R1 := boringCert(t, "M1_R1", boringECDSAKey(t, elliptic.P256()), R1, boringCertCA|boringCertFIPSOK)
M2_R1 := boringCert(t, "M2_R1", boringECDSAKey(t, elliptic.P224()), R1, boringCertCA)
I_R1 := boringCert(t, "I_R1", boringRSAKey(t, 3072), R1, boringCertCA|boringCertFIPSOK)
I_R2 := boringCert(t, "I_R2", I_R1.key, R2, boringCertCA|boringCertFIPSOK)
I_M1 := boringCert(t, "I_M1", I_R1.key, M1_R1, boringCertCA|boringCertFIPSOK)
I_M2 := boringCert(t, "I_M2", I_R1.key, M2_R1, boringCertCA|boringCertFIPSOK)
L1_I := boringCert(t, "L1_I", boringECDSAKey(t, elliptic.P384()), I_R1, boringCertLeaf|boringCertFIPSOK)
L2_I := boringCert(t, "L2_I", boringRSAKey(t, 1024), I_R1, boringCertLeaf)
// client verifying server cert
testServerCert := func(t *testing.T, desc string, pool *x509.CertPool, key interface{}, list [][]byte, ok bool) {
clientConfig := testConfig.Clone()
clientConfig.RootCAs = pool
clientConfig.InsecureSkipVerify = false
clientConfig.ServerName = "example.com"
serverConfig := testConfig.Clone()
serverConfig.Certificates = []Certificate{{Certificate: list, PrivateKey: key}}
serverConfig.BuildNameToCertificate()
clientErr, _ := boringHandshake(t, clientConfig, serverConfig)
if (clientErr == nil) == ok {
if ok {
t.Logf("%s: accept", desc)
} else {
t.Logf("%s: reject", desc)
}
} else {
if ok {
t.Errorf("%s: BAD reject (%v)", desc, clientErr)
} else {
t.Errorf("%s: BAD accept", desc)
}
}
}
// server verifying client cert
testClientCert := func(t *testing.T, desc string, pool *x509.CertPool, key interface{}, list [][]byte, ok bool) {
clientConfig := testConfig.Clone()
clientConfig.ServerName = "example.com"
clientConfig.Certificates = []Certificate{{Certificate: list, PrivateKey: key}}
serverConfig := testConfig.Clone()
serverConfig.ClientCAs = pool
serverConfig.ClientAuth = RequireAndVerifyClientCert
_, serverErr := boringHandshake(t, clientConfig, serverConfig)
if (serverErr == nil) == ok {
if ok {
t.Logf("%s: accept", desc)
} else {
t.Logf("%s: reject", desc)
}
} else {
if ok {
t.Errorf("%s: BAD reject (%v)", desc, serverErr)
} else {
t.Errorf("%s: BAD accept", desc)
}
}
}
// Run simple basic test with known answers before proceeding to
// exhaustive test with computed answers.
r1pool := x509.NewCertPool()
r1pool.AddCert(R1.cert)
testServerCert(t, "basic", r1pool, L2_I.key, [][]byte{L2_I.der, I_R1.der}, true)
testClientCert(t, "basic (client cert)", r1pool, L2_I.key, [][]byte{L2_I.der, I_R1.der}, true)
fipstls.Force()
testServerCert(t, "basic (fips)", r1pool, L2_I.key, [][]byte{L2_I.der, I_R1.der}, false)
testClientCert(t, "basic (fips, client cert)", r1pool, L2_I.key, [][]byte{L2_I.der, I_R1.der}, false)
fipstls.Abandon()
if t.Failed() {
t.Fatal("basic test failed, skipping exhaustive test")
}
if testing.Short() {
t.Logf("basic test passed; skipping exhaustive test in -short mode")
return
}
for l := 1; l <= 2; l++ {
leaf := L1_I
if l == 2 {
leaf = L2_I
}
for i := 0; i < 64; i++ {
reachable := map[string]bool{leaf.parentOrg: true}
reachableFIPS := map[string]bool{leaf.parentOrg: leaf.fipsOK}
list := [][]byte{leaf.der}
listName := leaf.name
addList := func(cond int, c *boringCertificate) {
if cond != 0 {
list = append(list, c.der)
listName += "," + c.name
if reachable[c.org] {
reachable[c.parentOrg] = true
}
if reachableFIPS[c.org] && c.fipsOK {
reachableFIPS[c.parentOrg] = true
}
}
}
addList(i&1, I_R1)
addList(i&2, I_R2)
addList(i&4, I_M1)
addList(i&8, I_M2)
addList(i&16, M1_R1)
addList(i&32, M2_R1)
for r := 1; r <= 3; r++ {
pool := x509.NewCertPool()
rootName := ","
shouldVerify := false
shouldVerifyFIPS := false
addRoot := func(cond int, c *boringCertificate) {
if cond != 0 {
rootName += "," + c.name
pool.AddCert(c.cert)
if reachable[c.org] {
shouldVerify = true
}
if reachableFIPS[c.org] && c.fipsOK {
shouldVerifyFIPS = true
}
}
}
addRoot(r&1, R1)
addRoot(r&2, R2)
rootName = rootName[1:] // strip leading comma
testServerCert(t, listName+"->"+rootName[1:], pool, leaf.key, list, shouldVerify)
testClientCert(t, listName+"->"+rootName[1:]+"(client cert)", pool, leaf.key, list, shouldVerify)
fipstls.Force()
testServerCert(t, listName+"->"+rootName[1:]+" (fips)", pool, leaf.key, list, shouldVerifyFIPS)
testClientCert(t, listName+"->"+rootName[1:]+" (fips, client cert)", pool, leaf.key, list, shouldVerifyFIPS)
fipstls.Abandon()
}
}
}
}
const (
boringCertCA = iota
boringCertLeaf
boringCertFIPSOK = 0x80
)
func boringRSAKey(t *testing.T, size int) *rsa.PrivateKey {
k, err := rsa.GenerateKey(rand.Reader, size)
if err != nil {
t.Fatal(err)
}
return k
}
func boringECDSAKey(t *testing.T, curve elliptic.Curve) *ecdsa.PrivateKey {
k, err := ecdsa.GenerateKey(curve, rand.Reader)
if err != nil {
t.Fatal(err)
}
return k
}
type boringCertificate struct {
name string
org string
parentOrg string
der []byte
cert *x509.Certificate
key interface{}
fipsOK bool
}
func boringCert(t *testing.T, name string, key interface{}, parent *boringCertificate, mode int) *boringCertificate {
org := name
parentOrg := ""
if i := strings.Index(org, "_"); i >= 0 {
org = org[:i]
parentOrg = name[i+1:]
}
tmpl := &x509.Certificate{
SerialNumber: big.NewInt(1),
Subject: pkix.Name{
Organization: []string{org},
},
NotBefore: time.Unix(0, 0),
NotAfter: time.Unix(0, 0),
KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature,
ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth, x509.ExtKeyUsageClientAuth},
BasicConstraintsValid: true,
}
if mode&^boringCertFIPSOK == boringCertLeaf {
tmpl.DNSNames = []string{"example.com"}
} else {
tmpl.IsCA = true
tmpl.KeyUsage |= x509.KeyUsageCertSign
}
var pcert *x509.Certificate
var pkey interface{}
if parent != nil {
pcert = parent.cert
pkey = parent.key
} else {
pcert = tmpl
pkey = key
}
var pub interface{}
switch k := key.(type) {
case *rsa.PrivateKey:
pub = &k.PublicKey
case *ecdsa.PrivateKey:
pub = &k.PublicKey
default:
t.Fatalf("invalid key %T", key)
}
der, err := x509.CreateCertificate(rand.Reader, tmpl, pcert, pub, pkey)
if err != nil {
t.Fatal(err)
}
cert, err := x509.ParseCertificate(der)
if err != nil {
t.Fatal(err)
}
fipsOK := mode&boringCertFIPSOK != 0
return &boringCertificate{name, org, parentOrg, der, cert, key, fipsOK}
}
// A self-signed test certificate with an RSA key of size 2048, for testing
// RSA-PSS with SHA512. SAN of example.golang.
var (
testRSA2048Certificate []byte
testRSA2048PrivateKey *rsa.PrivateKey
)
func init() {
block, _ := pem.Decode([]byte(`
-----BEGIN CERTIFICATE-----
MIIC/zCCAeegAwIBAgIRALHHX/kh4+4zMU9DarzBEcQwDQYJKoZIhvcNAQELBQAw
EjEQMA4GA1UEChMHQWNtZSBDbzAeFw0xMTAxMDExNTA0MDVaFw0yMDEyMjkxNTA0
MDVaMBIxEDAOBgNVBAoTB0FjbWUgQ28wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
ggEKAoIBAQCf8fk0N6ieCBX4IOVIfKitt4kGcOQLeimCfsjqqHcysMIVGEtFSM6E
4Ay141f/7IqdW0UtIqNb4PXhROID7yDxR284xL6XbCuv/t5hP3UcehYc3hmLiyVd
MkZQiZWtfUUJf/1qOtM+ohNg59LRWp4d+6iX0la1JL3EwCIckkNjJ9hQbF7Pb2CS
+ES9Yo55KAap8KOblpcR8MBSN38bqnwjfQdCXvOEOjam2HUxKzEFX5MA+fA0me4C
ioCcCRLWKl+GoN9F8fABfoZ+T+2eal4DLuO95rXR8SrOIVBh3XFOr/RVhjtXcNVF
ZKcvDt6d68V6jAKAYKm5nlj9GPpd4v+rAgMBAAGjUDBOMA4GA1UdDwEB/wQEAwIF
oDATBgNVHSUEDDAKBggrBgEFBQcDATAMBgNVHRMBAf8EAjAAMBkGA1UdEQQSMBCC
DmV4YW1wbGUuZ29sYW5nMA0GCSqGSIb3DQEBCwUAA4IBAQCOoYsVcFCBhboqe3WH
dC6V7XXXECmnjh01r8h80yv0NR379nSD3cw2M+HKvaXysWqrl5hjGVKw0vtwD81r
V4JzDu7IfIog5m8+QNC+7LqDZsz88vDKOrsoySVOmUCgmCKFXew+LA+eO/iQEJTr
7ensddOeXJEp27Ed5vW+kmWW3Qmglc2Gwy8wFrMDIqnrnOzBA4oCnDEgtXJt0zog
nRwbfEMAWi1aQRy5dT9KA3SP9mo5SeTFSzGGHiE4s4gHUe7jvsAFF2qgtD6+wH6s
z9b6shxnC7g5IlBKhI7SVB/Uqt2ydJ+kH1YbjMcIq6NAM5eNMKgZuJr3+zwsSgwh
GNaE
-----END CERTIFICATE-----`))
testRSA2048Certificate = block.Bytes
block, _ = pem.Decode([]byte(`
-----BEGIN RSA PRIVATE KEY-----
MIIEpAIBAAKCAQEAn/H5NDeonggV+CDlSHyorbeJBnDkC3opgn7I6qh3MrDCFRhL
RUjOhOAMteNX/+yKnVtFLSKjW+D14UTiA+8g8UdvOMS+l2wrr/7eYT91HHoWHN4Z
i4slXTJGUImVrX1FCX/9ajrTPqITYOfS0VqeHfuol9JWtSS9xMAiHJJDYyfYUGxe
z29gkvhEvWKOeSgGqfCjm5aXEfDAUjd/G6p8I30HQl7zhDo2pth1MSsxBV+TAPnw
NJnuAoqAnAkS1ipfhqDfRfHwAX6Gfk/tnmpeAy7jvea10fEqziFQYd1xTq/0VYY7
V3DVRWSnLw7enevFeowCgGCpuZ5Y/Rj6XeL/qwIDAQABAoIBAQCNpMZifd/vg42h
HdCvLuZaYS0R7SunFlpoXEsltGdLFsnp0IfoJZ/ugFQBSAIIfLwMumU6oXA1z7Uv
98aIYV61DePrTCDVDFBsHbNmP8JAo8WtbusEbwd5zyoB7LYG2+clkJklWE73KqUq
rmI+UJeyScl2Gin7ZTxBXz1WPBk9VwcnwkeaXpgASIBW23fhECM9gnYEEwaBez5T
6Me8d1tHtYQv7vsKe7ro9w9/HKrRXejqYKK1LxkhfFriyV+m8LZJZn2nXOa6G3gF
Nb8Qk1Uk5PUBENBmyMFJhT4M/uuSq4YtMrrO2gi8Q+fPhuGzc5SshYKRBp0W4P5r
mtVCtEFRAoGBAMENBIFLrV2+HsGj0xYFasKov/QPe6HSTR1Hh2IZONp+oK4oszWE
jBT4VcnITmpl6tC1Wy4GcrxjNgKIFZAj+1x1LUULdorXkuG8yr0tAhG9zNyfWsSy
PrSovC0UVbzr8Jxxla+kQVxEQQqWQxPlEVuL8kXaIDA6Lyt1Hpua2LvPAoGBANQZ
c6Lq2T7+BxLxNdi2m8kZzej5kgzBp/XdVsbFWRlebIX2KrFHsrHzT9PUk3DE1vZK
M6pzTt94nQhWSkDgCaw1SohElJ3HFIFwcusF1SJAc3pQepd8ug6IYdlpDMLtBj/P
/5P6BVUtgo05E4+I/T3iYatmglQxTtlZ0RkSV2llAoGBALOXkKFX7ahPvf0WksDh
uTfuFOTPoowgQG0EpgW0wRdCxeg/JLic3lSD0gsttQV2WsRecryWcxaelRg10RmO
38BbogmhaF4xvgsSvujOfiZTE8oK1T43M+6NKsIlML3YILbpU/9aJxPWy0s2DqDr
cQJhZrlk+pzjBA7Bnf/URdwxAoGAKR/CNw14D+mrL3YLbbiCXiydqxVwxv5pdZdz
8thi3TNcsWC4iGURdcVqbfUinVPdJiXe/Kac3WGCeRJaFVgbKAOxLti1RB5MkIhg
D8eyupBqk4W1L1gkrxqsdj4TFlxkwMywjl2E2S4YyQ8PBt6V04DoVRZsIKzqz+PF
UionPq0CgYBCYXvqioJhPewkOq/Y5wrDBeZW1FQK5QD9W5M8/5zxd4rdvJtjhbJp
oOrtvMdrl6upy9Hz4BJD3FXwVFiPFE7jqeNqi0F21viLxBPMMD3UODF6LL5EyLiR
9V4xVMS8KXxvg7rxsuqzMPscViaWUL6WNVBhsD2+92dHxSXzz5EJKQ==
-----END RSA PRIVATE KEY-----`))
var err error
testRSA2048PrivateKey, err = x509.ParsePKCS1PrivateKey(block.Bytes)
if err != nil {
panic(err)
}
} | func isBoringVersion(v uint16) bool {
return v == VersionTLS12
}
|
getTitle.js | /* @flow */ | export default (type: $PropertyType<Transaction, 'type'>) =>
type.substring(0, type.length - TRANSACTION_LENGTH); | import { type Transaction } from '@neo-one/client-common';
const TRANSACTION_LENGTH = 'Transaction'.length;
|
aptpkg.py | # -*- coding: utf-8 -*-
'''
Package management operations specific to APT- and DEB-based systems
====================================================================
'''
from __future__ import absolute_import
# Import python libs
import logging
# Import salt libs
import salt.utils
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'apt'
def __virtual__():
'''
Only work on apt-based platforms with pkg.get_selections
'''
return (__virtualname__
if __salt__.get('pkg.get_selections', False)
else False)
def held(name):
'''
Set package in 'hold' state, meaning it will not be upgraded.
name | The name of the package, e.g., 'tmux'
'''
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
state = __salt__['pkg.get_selections'](
pattern=name,
)
if not state:
ret.update(comment='Package {0} does not have a state'.format(name))
elif not salt.utils.is_true(state.get('hold', False)):
if not __opts__['test']:
result = __salt__['pkg.set_selections'](
selection={'hold': [name]}
)
ret.update(changes=result[name],
result=True,
comment='Package {0} is now being held'.format(name))
else:
ret.update(result=None,
comment='Package {0} is set to be held'.format(name))
else:
ret.update(result=True,
comment='Package {0} is already held'.format(name))
return ret | |
client.rs | // Copyright 2018 Google LLC
//
// Use of this source code is governed by an MIT-style
// license that can be found in the LICENSE file or at
// https://opensource.org/licenses/MIT.
//! Provides a client that connects to a server and sends multiplexed requests.
mod in_flight_requests;
use crate::{context, trace, ClientMessage, Request, Response, ServerError, Transport};
use futures::{prelude::*, ready, stream::Fuse, task::*};
use in_flight_requests::{DeadlineExceededError, InFlightRequests};
use pin_project::pin_project;
use std::{
convert::TryFrom,
error::Error,
fmt, mem,
pin::Pin,
sync::{
atomic::{AtomicUsize, Ordering},
Arc,
},
};
use tokio::sync::{mpsc, oneshot};
use tracing::Span;
/// Settings that control the behavior of the client.
#[derive(Clone, Debug)]
#[non_exhaustive]
pub struct Config {
/// The number of requests that can be in flight at once.
/// `max_in_flight_requests` controls the size of the map used by the client
/// for storing pending requests.
pub max_in_flight_requests: usize,
/// The number of requests that can be buffered client-side before being sent.
/// `pending_requests_buffer` controls the size of the channel clients use
/// to communicate with the request dispatch task.
pub pending_request_buffer: usize,
}
impl Default for Config {
fn default() -> Self {
Config {
max_in_flight_requests: 1_000,
pending_request_buffer: 100,
}
}
}
/// A channel and dispatch pair. The dispatch drives the sending and receiving of requests
/// and must be polled continuously or spawned.
pub struct NewClient<C, D> {
/// The new client.
pub client: C,
/// The client's dispatch.
pub dispatch: D,
}
impl<C, D, E> NewClient<C, D>
where
D: Future<Output = Result<(), E>> + Send + 'static,
E: std::error::Error + Send + Sync + 'static,
{
/// Helper method to spawn the dispatch on the default executor.
#[cfg(feature = "tokio1")]
#[cfg_attr(docsrs, doc(cfg(feature = "tokio1")))]
pub fn spawn(self) -> C {
let dispatch = self.dispatch.unwrap_or_else(move |e| {
let e = anyhow::Error::new(e);
tracing::warn!("Connection broken: {:?}", e);
});
tokio::spawn(dispatch);
self.client
}
}
impl<C, D> fmt::Debug for NewClient<C, D> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(fmt, "NewClient")
}
}
#[allow(dead_code)]
#[allow(clippy::no_effect)]
const CHECK_USIZE: () = {
if std::mem::size_of::<usize>() > std::mem::size_of::<u64>() {
// TODO: replace this with panic!() as soon as RFC 2345 gets stabilized
["usize is too big to fit in u64"][42];
}
};
/// Handles communication from the client to request dispatch.
#[derive(Debug)]
pub struct Channel<Req, Resp> {
to_dispatch: mpsc::Sender<DispatchRequest<Req, Resp>>,
/// Channel to send a cancel message to the dispatcher.
cancellation: RequestCancellation,
/// The ID to use for the next request to stage.
next_request_id: Arc<AtomicUsize>,
}
impl<Req, Resp> Clone for Channel<Req, Resp> {
fn clone(&self) -> Self {
Self {
to_dispatch: self.to_dispatch.clone(),
cancellation: self.cancellation.clone(),
next_request_id: self.next_request_id.clone(),
}
}
}
impl<Req, Resp> Channel<Req, Resp> {
/// Sends a request to the dispatch task to forward to the server, returning a [`Future`] that
/// resolves to the response.
#[tracing::instrument(
name = "RPC",
skip(self, ctx, request_name, request),
fields(
rpc.trace_id = tracing::field::Empty,
otel.kind = "client",
otel.name = request_name)
)]
pub async fn call(
&self,
mut ctx: context::Context,
request_name: &str,
request: Req,
) -> Result<Resp, RpcError> {
let span = Span::current();
ctx.trace_context = trace::Context::try_from(&span).unwrap_or_else(|_| {
tracing::warn!(
"OpenTelemetry subscriber not installed; making unsampled child context."
);
ctx.trace_context.new_child()
});
span.record("rpc.trace_id", &tracing::field::display(ctx.trace_id()));
let (response_completion, mut response) = oneshot::channel();
let request_id =
u64::try_from(self.next_request_id.fetch_add(1, Ordering::Relaxed)).unwrap();
// ResponseGuard impls Drop to cancel in-flight requests. It should be created before
// sending out the request; otherwise, the response future could be dropped after the
// request is sent out but before ResponseGuard is created, rendering the cancellation
// logic inactive.
let response_guard = ResponseGuard {
response: &mut response,
request_id,
cancellation: &self.cancellation,
};
self.to_dispatch
.send(DispatchRequest {
ctx,
span,
request_id,
request,
response_completion,
})
.await
.map_err(|mpsc::error::SendError(_)| RpcError::Disconnected)?;
response_guard.response().await
}
}
/// A server response that is completed by request dispatch when the corresponding response
/// arrives off the wire.
struct ResponseGuard<'a, Resp> {
response: &'a mut oneshot::Receiver<Result<Response<Resp>, DeadlineExceededError>>,
cancellation: &'a RequestCancellation,
request_id: u64,
}
/// An error that can occur in the processing of an RPC. This is not request-specific errors but
/// rather cross-cutting errors that can always occur.
#[derive(thiserror::Error, Debug)]
pub enum RpcError {
/// The client disconnected from the server.
#[error("the client disconnected from the server")]
Disconnected,
/// The request exceeded its deadline.
#[error("the request exceeded its deadline")]
DeadlineExceeded,
/// The server aborted request processing.
#[error("the server aborted request processing")]
Server(#[from] ServerError),
}
impl From<DeadlineExceededError> for RpcError {
fn from(_: DeadlineExceededError) -> Self {
RpcError::DeadlineExceeded
}
}
impl<Resp> ResponseGuard<'_, Resp> {
async fn response(mut self) -> Result<Resp, RpcError> {
let response = (&mut self.response).await;
// Cancel drop logic once a response has been received.
mem::forget(self);
match response {
Ok(resp) => Ok(resp?.message?),
Err(oneshot::error::RecvError { .. }) => {
// The oneshot is Canceled when the dispatch task ends. In that case,
// there's nothing listening on the other side, so there's no point in
// propagating cancellation.
Err(RpcError::Disconnected)
}
}
}
}
// Cancels the request when dropped, if not already complete.
impl<Resp> Drop for ResponseGuard<'_, Resp> {
fn drop(&mut self) {
// The receiver needs to be closed to handle the edge case that the request has not
// yet been received by the dispatch task. It is possible for the cancel message to
// arrive before the request itself, in which case the request could get stuck in the
// dispatch map forever if the server never responds (e.g. if the server dies while
// responding). Even if the server does respond, it will have unnecessarily done work
// for a client no longer waiting for a response. To avoid this, the dispatch task
// checks if the receiver is closed before inserting the request in the map. By
// closing the receiver before sending the cancel message, it is guaranteed that if the
// dispatch task misses an early-arriving cancellation message, then it will see the
// receiver as closed.
self.response.close();
self.cancellation.cancel(self.request_id);
}
}
/// Returns a channel and dispatcher that manages the lifecycle of requests initiated by the
/// channel.
pub fn new<Req, Resp, C>(
config: Config,
transport: C,
) -> NewClient<Channel<Req, Resp>, RequestDispatch<Req, Resp, C>>
where
C: Transport<ClientMessage<Req>, Response<Resp>>,
{
let (to_dispatch, pending_requests) = mpsc::channel(config.pending_request_buffer);
let (cancellation, canceled_requests) = cancellations();
let canceled_requests = canceled_requests;
NewClient {
client: Channel {
to_dispatch,
cancellation,
next_request_id: Arc::new(AtomicUsize::new(0)),
},
dispatch: RequestDispatch {
config,
canceled_requests,
transport: transport.fuse(),
in_flight_requests: InFlightRequests::default(),
pending_requests,
},
}
}
/// Handles the lifecycle of requests, writing requests to the wire, managing cancellations,
/// and dispatching responses to the appropriate channel.
#[pin_project]
#[derive(Debug)]
pub struct RequestDispatch<Req, Resp, C> {
/// Writes requests to the wire and reads responses off the wire.
#[pin]
transport: Fuse<C>,
/// Requests waiting to be written to the wire.
pending_requests: mpsc::Receiver<DispatchRequest<Req, Resp>>,
/// Requests that were dropped.
canceled_requests: CanceledRequests,
/// Requests already written to the wire that haven't yet received responses.
in_flight_requests: InFlightRequests<Resp>,
/// Configures limits to prevent unlimited resource usage.
config: Config,
}
/// Critical errors that result in a Channel disconnecting.
#[derive(thiserror::Error, Debug)]
pub enum ChannelError<E>
where
E: Error + Send + Sync + 'static,
{
/// Could not read from the transport.
#[error("could not read from the transport")]
Read(#[source] E),
/// Could not ready the transport for writes.
#[error("could not ready the transport for writes")]
Ready(#[source] E),
/// Could not write to the transport.
#[error("could not write to the transport")]
Write(#[source] E),
/// Could not flush the transport.
#[error("could not flush the transport")]
Flush(#[source] E),
/// Could not poll expired requests.
#[error("could not poll expired requests")]
Timer(#[source] tokio::time::error::Error),
}
impl<Req, Resp, C> RequestDispatch<Req, Resp, C>
where
C: Transport<ClientMessage<Req>, Response<Resp>>,
{
fn in_flight_requests<'a>(self: &'a mut Pin<&mut Self>) -> &'a mut InFlightRequests<Resp> {
self.as_mut().project().in_flight_requests
}
fn transport_pin_mut<'a>(self: &'a mut Pin<&mut Self>) -> Pin<&'a mut Fuse<C>> {
self.as_mut().project().transport
}
fn poll_ready<'a>(
self: &'a mut Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Result<(), ChannelError<C::Error>>> {
self.transport_pin_mut()
.poll_ready(cx)
.map_err(ChannelError::Ready)
}
fn start_send(
self: &mut Pin<&mut Self>,
message: ClientMessage<Req>,
) -> Result<(), ChannelError<C::Error>> {
self.transport_pin_mut()
.start_send(message)
.map_err(ChannelError::Write)
}
fn poll_flush<'a>(
self: &'a mut Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Result<(), ChannelError<C::Error>>> {
self.transport_pin_mut()
.poll_flush(cx)
.map_err(ChannelError::Flush)
}
fn canceled_requests_mut<'a>(self: &'a mut Pin<&mut Self>) -> &'a mut CanceledRequests {
self.as_mut().project().canceled_requests
}
fn pending_requests_mut<'a>(
self: &'a mut Pin<&mut Self>,
) -> &'a mut mpsc::Receiver<DispatchRequest<Req, Resp>> {
self.as_mut().project().pending_requests
}
fn pump_read(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<(), ChannelError<C::Error>>>> {
self.transport_pin_mut()
.poll_next(cx)
.map_err(ChannelError::Read)
.map_ok(|response| {
self.complete(response);
})
}
fn pump_write(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<(), ChannelError<C::Error>>>> {
enum ReceiverStatus {
Pending,
Closed,
}
let pending_requests_status = match self.as_mut().poll_write_request(cx)? {
Poll::Ready(Some(())) => return Poll::Ready(Some(Ok(()))),
Poll::Ready(None) => ReceiverStatus::Closed,
Poll::Pending => ReceiverStatus::Pending,
};
let canceled_requests_status = match self.as_mut().poll_write_cancel(cx)? {
Poll::Ready(Some(())) => return Poll::Ready(Some(Ok(()))),
Poll::Ready(None) => ReceiverStatus::Closed,
Poll::Pending => ReceiverStatus::Pending,
};
// Receiving Poll::Ready(None) when polling expired requests never indicates "Closed",
// because there can temporarily be zero in-flight rquests. Therefore, there is no need to
// track the status like is done with pending and cancelled requests.
if let Poll::Ready(Some(_)) = self
.in_flight_requests()
.poll_expired(cx)
.map_err(ChannelError::Timer)?
{
// Expired requests are considered complete; there is no compelling reason to send a
// cancellation message to the server, since it will have already exhausted its
// allotted processing time.
return Poll::Ready(Some(Ok(())));
}
match (pending_requests_status, canceled_requests_status) {
(ReceiverStatus::Closed, ReceiverStatus::Closed) => {
ready!(self.poll_flush(cx)?);
Poll::Ready(None)
}
(ReceiverStatus::Pending, _) | (_, ReceiverStatus::Pending) => {
// No more messages to process, so flush any messages buffered in the transport.
ready!(self.poll_flush(cx)?);
// Even if we fully-flush, we return Pending, because we have no more requests
// or cancellations right now.
Poll::Pending
}
}
}
/// Yields the next pending request, if one is ready to be sent.
///
/// Note that a request will only be yielded if the transport is *ready* to be written to (i.e.
/// start_send would succeed).
fn poll_next_request(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<DispatchRequest<Req, Resp>, ChannelError<C::Error>>>> {
if self.in_flight_requests().len() >= self.config.max_in_flight_requests {
tracing::info!(
"At in-flight request capacity ({}/{}).",
self.in_flight_requests().len(),
self.config.max_in_flight_requests
);
// No need to schedule a wakeup, because timers and responses are responsible
// for clearing out in-flight requests.
return Poll::Pending;
}
ready!(self.ensure_writeable(cx)?);
loop {
match ready!(self.pending_requests_mut().poll_recv(cx)) {
Some(request) => {
if request.response_completion.is_closed() {
let _entered = request.span.enter();
tracing::info!("AbortRequest");
continue;
}
return Poll::Ready(Some(Ok(request)));
}
None => return Poll::Ready(None),
}
}
}
/// Yields the next pending cancellation, and, if one is ready, cancels the associated request.
///
/// Note that a request to cancel will only be yielded if the transport is *ready* to be
/// written to (i.e. start_send would succeed).
fn poll_next_cancellation(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<(context::Context, Span, u64), ChannelError<C::Error>>>> |
/// Returns Ready if writing a message to the transport (i.e. via write_request or
/// write_cancel) would not fail due to a full buffer. If the transport is not ready to be
/// written to, flushes it until it is ready.
fn ensure_writeable<'a>(
self: &'a mut Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<(), ChannelError<C::Error>>>> {
while self.poll_ready(cx)?.is_pending() {
ready!(self.poll_flush(cx)?);
}
Poll::Ready(Some(Ok(())))
}
fn poll_write_request<'a>(
self: &'a mut Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<(), ChannelError<C::Error>>>> {
let DispatchRequest {
ctx,
span,
request_id,
request,
response_completion,
} = match ready!(self.as_mut().poll_next_request(cx)?) {
Some(dispatch_request) => dispatch_request,
None => return Poll::Ready(None),
};
let entered = span.enter();
// poll_next_request only returns Ready if there is room to buffer another request.
// Therefore, we can call write_request without fear of erroring due to a full
// buffer.
let request_id = request_id;
let request = ClientMessage::Request(Request {
id: request_id,
message: request,
context: context::Context {
deadline: ctx.deadline,
trace_context: ctx.trace_context,
},
});
self.start_send(request)?;
let deadline = ctx.deadline;
tracing::info!(
tarpc.deadline = %humantime::format_rfc3339(deadline),
"SendRequest"
);
drop(entered);
self.in_flight_requests()
.insert_request(request_id, ctx, span, response_completion)
.expect("Request IDs should be unique");
Poll::Ready(Some(Ok(())))
}
fn poll_write_cancel<'a>(
self: &'a mut Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<(), ChannelError<C::Error>>>> {
let (context, span, request_id) = match ready!(self.as_mut().poll_next_cancellation(cx)?) {
Some(triple) => triple,
None => return Poll::Ready(None),
};
let _entered = span.enter();
let cancel = ClientMessage::Cancel {
trace_context: context.trace_context,
request_id,
};
self.start_send(cancel)?;
tracing::info!("CancelRequest");
Poll::Ready(Some(Ok(())))
}
/// Sends a server response to the client task that initiated the associated request.
fn complete(mut self: Pin<&mut Self>, response: Response<Resp>) -> bool {
self.in_flight_requests().complete_request(response)
}
}
impl<Req, Resp, C> Future for RequestDispatch<Req, Resp, C>
where
C: Transport<ClientMessage<Req>, Response<Resp>>,
{
type Output = Result<(), ChannelError<C::Error>>;
fn poll(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Result<(), ChannelError<C::Error>>> {
loop {
match (self.as_mut().pump_read(cx)?, self.as_mut().pump_write(cx)?) {
(Poll::Ready(None), _) => {
tracing::info!("Shutdown: read half closed, so shutting down.");
return Poll::Ready(Ok(()));
}
(read, Poll::Ready(None)) => {
if self.in_flight_requests.is_empty() {
tracing::info!("Shutdown: write half closed, and no requests in flight.");
return Poll::Ready(Ok(()));
}
tracing::info!(
"Shutdown: write half closed, and {} requests in flight.",
self.in_flight_requests().len()
);
match read {
Poll::Ready(Some(())) => continue,
_ => return Poll::Pending,
}
}
(Poll::Ready(Some(())), _) | (_, Poll::Ready(Some(()))) => {}
_ => return Poll::Pending,
}
}
}
}
/// A server-bound request sent from a [`Channel`] to request dispatch, which will then manage
/// the lifecycle of the request.
#[derive(Debug)]
struct DispatchRequest<Req, Resp> {
pub ctx: context::Context,
pub span: Span,
pub request_id: u64,
pub request: Req,
pub response_completion: oneshot::Sender<Result<Response<Resp>, DeadlineExceededError>>,
}
/// Sends request cancellation signals.
#[derive(Debug, Clone)]
struct RequestCancellation(mpsc::UnboundedSender<u64>);
/// A stream of IDs of requests that have been canceled.
#[derive(Debug)]
struct CanceledRequests(mpsc::UnboundedReceiver<u64>);
/// Returns a channel to send request cancellation messages.
fn cancellations() -> (RequestCancellation, CanceledRequests) {
// Unbounded because messages are sent in the drop fn. This is fine, because it's still
// bounded by the number of in-flight requests.
let (tx, rx) = mpsc::unbounded_channel();
(RequestCancellation(tx), CanceledRequests(rx))
}
impl RequestCancellation {
/// Cancels the request with ID `request_id`.
fn cancel(&self, request_id: u64) {
let _ = self.0.send(request_id);
}
}
impl CanceledRequests {
fn poll_recv(&mut self, cx: &mut Context<'_>) -> Poll<Option<u64>> {
self.0.poll_recv(cx)
}
}
impl Stream for CanceledRequests {
type Item = u64;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<u64>> {
self.poll_recv(cx)
}
}
#[cfg(test)]
mod tests {
use super::{
cancellations, CanceledRequests, Channel, DispatchRequest, RequestCancellation,
RequestDispatch, ResponseGuard,
};
use crate::{
client::{
in_flight_requests::{DeadlineExceededError, InFlightRequests},
Config,
},
context,
transport::{self, channel::UnboundedChannel},
ClientMessage, Response,
};
use assert_matches::assert_matches;
use futures::{prelude::*, task::*};
use std::{
convert::TryFrom,
pin::Pin,
sync::atomic::{AtomicUsize, Ordering},
sync::Arc,
};
use tokio::sync::{mpsc, oneshot};
use tracing::Span;
#[tokio::test]
async fn response_completes_request_future() {
let (mut dispatch, mut _channel, mut server_channel) = set_up();
let cx = &mut Context::from_waker(&noop_waker_ref());
let (tx, mut rx) = oneshot::channel();
dispatch
.in_flight_requests
.insert_request(0, context::current(), Span::current(), tx)
.unwrap();
server_channel
.send(Response {
request_id: 0,
message: Ok("Resp".into()),
})
.await
.unwrap();
assert_matches!(dispatch.as_mut().poll(cx), Poll::Pending);
assert_matches!(rx.try_recv(), Ok(Ok(Response { request_id: 0, message: Ok(resp) })) if resp == "Resp");
}
#[tokio::test]
async fn dispatch_response_cancels_on_drop() {
let (cancellation, mut canceled_requests) = cancellations();
let (_, mut response) = oneshot::channel();
drop(ResponseGuard::<u32> {
response: &mut response,
cancellation: &cancellation,
request_id: 3,
});
// resp's drop() is run, which should send a cancel message.
let cx = &mut Context::from_waker(&noop_waker_ref());
assert_eq!(canceled_requests.0.poll_recv(cx), Poll::Ready(Some(3)));
}
#[tokio::test]
async fn dispatch_response_doesnt_cancel_after_complete() {
let (cancellation, mut canceled_requests) = cancellations();
let (tx, mut response) = oneshot::channel();
tx.send(Ok(Response {
request_id: 0,
message: Ok("well done"),
}))
.unwrap();
// resp's drop() is run, but should not send a cancel message.
ResponseGuard {
response: &mut response,
cancellation: &cancellation,
request_id: 3,
}
.response()
.await
.unwrap();
drop(cancellation);
let cx = &mut Context::from_waker(&noop_waker_ref());
assert_eq!(canceled_requests.0.poll_recv(cx), Poll::Ready(None));
}
#[tokio::test]
async fn stage_request() {
let (mut dispatch, mut channel, _server_channel) = set_up();
let cx = &mut Context::from_waker(&noop_waker_ref());
let (tx, mut rx) = oneshot::channel();
let _resp = send_request(&mut channel, "hi", tx, &mut rx).await;
let req = dispatch.as_mut().poll_next_request(cx).ready();
assert!(req.is_some());
let req = req.unwrap();
assert_eq!(req.request_id, 0);
assert_eq!(req.request, "hi".to_string());
}
// Regression test for https://github.com/google/tarpc/issues/220
#[tokio::test]
async fn stage_request_channel_dropped_doesnt_panic() {
let (mut dispatch, mut channel, mut server_channel) = set_up();
let cx = &mut Context::from_waker(&noop_waker_ref());
let (tx, mut rx) = oneshot::channel();
let _ = send_request(&mut channel, "hi", tx, &mut rx).await;
drop(channel);
assert!(dispatch.as_mut().poll(cx).is_ready());
send_response(
&mut server_channel,
Response {
request_id: 0,
message: Ok("hello".into()),
},
)
.await;
dispatch.await.unwrap();
}
#[tokio::test]
async fn stage_request_response_future_dropped_is_canceled_before_sending() {
let (mut dispatch, mut channel, _server_channel) = set_up();
let cx = &mut Context::from_waker(&noop_waker_ref());
let (tx, mut rx) = oneshot::channel();
let _ = send_request(&mut channel, "hi", tx, &mut rx).await;
// Drop the channel so polling returns none if no requests are currently ready.
drop(channel);
// Test that a request future dropped before it's processed by dispatch will cause the request
// to not be added to the in-flight request map.
assert!(dispatch.as_mut().poll_next_request(cx).ready().is_none());
}
#[tokio::test]
async fn stage_request_response_future_dropped_is_canceled_after_sending() {
let (mut dispatch, mut channel, _server_channel) = set_up();
let cx = &mut Context::from_waker(&noop_waker_ref());
let (tx, mut rx) = oneshot::channel();
let req = send_request(&mut channel, "hi", tx, &mut rx).await;
assert!(dispatch.as_mut().pump_write(cx).ready().is_some());
assert!(!dispatch.in_flight_requests.is_empty());
// Test that a request future dropped after it's processed by dispatch will cause the request
// to be removed from the in-flight request map.
drop(req);
assert_matches!(
dispatch.as_mut().poll_next_cancellation(cx),
Poll::Ready(Some(Ok(_)))
);
assert!(dispatch.in_flight_requests.is_empty());
}
#[tokio::test]
async fn stage_request_response_closed_skipped() {
let (mut dispatch, mut channel, _server_channel) = set_up();
let cx = &mut Context::from_waker(&noop_waker_ref());
let (tx, mut rx) = oneshot::channel();
// Test that a request future that's closed its receiver but not yet canceled its request --
// i.e. still in `drop fn` -- will cause the request to not be added to the in-flight request
// map.
let resp = send_request(&mut channel, "hi", tx, &mut rx).await;
resp.response.close();
assert!(dispatch.as_mut().poll_next_request(cx).is_pending());
}
fn set_up() -> (
Pin<
Box<
RequestDispatch<
String,
String,
UnboundedChannel<Response<String>, ClientMessage<String>>,
>,
>,
>,
Channel<String, String>,
UnboundedChannel<ClientMessage<String>, Response<String>>,
) {
let _ = tracing_subscriber::fmt().with_test_writer().try_init();
let (to_dispatch, pending_requests) = mpsc::channel(1);
let (cancel_tx, canceled_requests) = mpsc::unbounded_channel();
let (client_channel, server_channel) = transport::channel::unbounded();
let dispatch = RequestDispatch::<String, String, _> {
transport: client_channel.fuse(),
pending_requests: pending_requests,
canceled_requests: CanceledRequests(canceled_requests),
in_flight_requests: InFlightRequests::default(),
config: Config::default(),
};
let cancellation = RequestCancellation(cancel_tx);
let channel = Channel {
to_dispatch,
cancellation,
next_request_id: Arc::new(AtomicUsize::new(0)),
};
(Box::pin(dispatch), channel, server_channel)
}
async fn send_request<'a>(
channel: &'a mut Channel<String, String>,
request: &str,
response_completion: oneshot::Sender<Result<Response<String>, DeadlineExceededError>>,
response: &'a mut oneshot::Receiver<Result<Response<String>, DeadlineExceededError>>,
) -> ResponseGuard<'a, String> {
let request_id =
u64::try_from(channel.next_request_id.fetch_add(1, Ordering::Relaxed)).unwrap();
let request = DispatchRequest {
ctx: context::current(),
span: Span::current(),
request_id,
request: request.to_string(),
response_completion,
};
channel.to_dispatch.send(request).await.unwrap();
ResponseGuard {
response,
cancellation: &channel.cancellation,
request_id,
}
}
async fn send_response(
channel: &mut UnboundedChannel<ClientMessage<String>, Response<String>>,
response: Response<String>,
) {
channel.send(response).await.unwrap();
}
trait PollTest {
type T;
fn unwrap(self) -> Poll<Self::T>;
fn ready(self) -> Self::T;
}
impl<T, E> PollTest for Poll<Option<Result<T, E>>>
where
E: ::std::fmt::Display,
{
type T = Option<T>;
fn unwrap(self) -> Poll<Option<T>> {
match self {
Poll::Ready(Some(Ok(t))) => Poll::Ready(Some(t)),
Poll::Ready(None) => Poll::Ready(None),
Poll::Ready(Some(Err(e))) => panic!("{}", e.to_string()),
Poll::Pending => Poll::Pending,
}
}
fn ready(self) -> Option<T> {
match self {
Poll::Ready(Some(Ok(t))) => Some(t),
Poll::Ready(None) => None,
Poll::Ready(Some(Err(e))) => panic!("{}", e.to_string()),
Poll::Pending => panic!("Pending"),
}
}
}
}
| {
ready!(self.ensure_writeable(cx)?);
loop {
match ready!(self.canceled_requests_mut().poll_next_unpin(cx)) {
Some(request_id) => {
if let Some((ctx, span)) = self.in_flight_requests().cancel_request(request_id)
{
return Poll::Ready(Some(Ok((ctx, span, request_id))));
}
}
None => return Poll::Ready(None),
}
}
} |
uint128-9-read-big-endian.rs | extern crate byteorder_1_2_6 ; extern crate lolbench_support ; use
lolbench_support :: { criterion_from_env , init_logging } ; fn | ( ) {
init_logging ( ) ; let mut crit = criterion_from_env ( ) ; byteorder_1_2_6
:: uint128_9 :: read_big_endian ( & mut crit ) ; } | main |
csel.py | #!/usr/bin/env python3
import argparse
import copy
import datetime
#from firecloud import fiss
import json
import operator
import subprocess
import sys
import time
#print(fiss.meth_list(args=argparse.Namespace()))
import firecloud.api as fapi
SEL_NAMESPACE='um1-encode-y2s1'
SEL_WORKSPACE='selection-sim'
#dir(fapi)
#help(fapi)
z = fapi.list_workspace_configs(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE, allRepos=True).json()
print(z)
z = fapi.get_workspace_config(workspace=SEL_WORKSPACE, namespace=SEL_NAMESPACE,
config='dockstore-tool-cms2', cnamespace=SEL_NAMESPACE)
print('CONFIG_IS', z, z.json())
def | (fname, value):
"""store string in file"""
with open(fname, 'w') as out:
out.write(str(value))
#z = fapi.create_submission(wnamespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE,
# cnamespace=SEL_NAMESPACE, config='dockstore-tool-cosi2')
#print('SUBMISSION IS', z, z.json())
#z = fapi.get_config_template(namespace='dockstore', method='dockstore-tool-cosi2', version=1)
#print(z.json())
def _pretty_print_json(json_dict, sort_keys=True):
"""Return a pretty-printed version of a dict converted to json, as a string."""
return json.dumps(json_dict, indent=4, separators=(',', ': '), sort_keys=sort_keys)
def _write_json(fname, **json_dict):
dump_file(fname=fname, value=_pretty_print_json(json_dict))
print('converting', fname, 'to org')
subprocess.check_call(f'./util/to_org.sh {fname}', shell=True)
print('converted', fname, 'to org')
def get_workflow_metadata_gz(namespace, workspace, submission_id, workflow_id):
"""Request the metadata for a workflow in a submission.
Args:
namespace (str): project to which workspace belongs
workspace (str): Workspace name
submission_id (str): Submission's unique identifier
workflow_id (str): Workflow's unique identifier.
Swagger:
https://api.firecloud.org/#!/Submissions/workflowMetadata
"""
uri = "workspaces/{0}/{1}/submissions/{2}/workflows/{3}".format(namespace,
workspace, submission_id, workflow_id)
headers = copy.deepcopy(fapi._fiss_agent_header())
headers.update({'Accept-Encoding': 'gzip', 'User-Agent': 'gzip'})
return fapi.__get(uri, headers=headers)
#print('ENTITIES ARE', fapi.list_entity_types(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE).json())
z = fapi.list_submissions(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE)
#print('SUBMISSIONS ARE', z, z.json())
_write_json('tmp/submissions.json', **{'result': list(z.json())})
tot_time = 0
for submission_idx, s in enumerate(sorted(list(z.json()), key=operator.itemgetter('submissionDate'), reverse=True)):
print('looking at submission from', s['submissionDate'])
submission_date = s['submissionDate']
if not submission_date.startswith(datetime.datetime.now().strftime('%Y-%m-%d')):
print('skipping submission date ', submission_date)
continue
print('====================================================')
print(s)
print('getting submission')
submission_id = s['submissionId']
y = fapi.get_submission(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE, submission_id=submission_id).json()
print('got submission')
_write_json(f'tmp/{submission_date}.{submission_idx}.{submission_id}.subm.json', **y)
if 'workflowId' not in y['workflows'][0]:
print('workflow ID missing from submission!')
continue
print('getting workflow metadata for workflow id ', y['workflows'][0]['workflowId'])
beg = time.time()
zz_result = get_workflow_metadata_gz(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE, submission_id=submission_id,
workflow_id=y['workflows'][0]['workflowId'])
print('ZZ_RESULT: ', type(zz_result), dir(zz_result), zz_result)
for f in dir(zz_result):
print(' ', f, ' = ', getattr(zz_result, f))
print('ZZ_RESULT.raw: ', type(zz_result.raw), dir(zz_result.raw), zz_result.raw)
for f in dir(zz_result.raw):
print(' ', f, ' = ', getattr(zz_result.raw, f))
print('converting workflow metadata to json')
try:
zz = zz_result.json()
except Exception as e:
print('Error converting to json:', e)
zz = {}
tot_time += (time.time() - beg)
print('saving workflow metadata')
_write_json(f'tmp/{submission_date}.{submission_idx}.{submission_id}.mdata.json', **zz)
if 'submittedFiles' in zz:
dump_file(fname=f'tmp/{submission_date}.{submission_idx}.{submission_id}.workflow.wdl', value=zz['submittedFiles']['workflow'])
#succ = [v["succeeded"] for v in zz['outputs']["run_sims_cosi2.replicaInfos"]]
#print(f'Succeeded: {sum(succ)} of {len(succ)}')
# zzz = fapi.get_workflow_metadata(namespace=SEL_NAMESPACE, workspace=SEL_WORKSPACE, submission_id=s['submissionId'],
# workflow_id='ad1e8271-fe66-4e05-9005-af570e9e5884').json()
# _write_json('tmp/jz.json', **zzz)
print('tot_time=', tot_time, file=sys.stderr)
| dump_file |
longitudinal_planner.py | #!/usr/bin/env python3
import math
import numpy as np
from common.numpy_fast import interp
from common.cached_params import CachedParams
import cereal.messaging as messaging
from common.realtime import DT_MDL
from selfdrive.modeld.constants import T_IDXS
from selfdrive.config import Conversions as CV
from selfdrive.controls.lib.longcontrol import LongCtrlState
from selfdrive.controls.lib.longitudinal_mpc_lib.long_mpc import LongitudinalMpc
from selfdrive.controls.lib.longitudinal_mpc_lib.long_mpc import T_IDXS as T_IDXS_MPC
from selfdrive.controls.lib.drive_helpers import V_CRUISE_MAX, CONTROL_N
from selfdrive.swaglog import cloudlog
LON_MPC_STEP = 0.2 # first step is 0.2s
AWARENESS_DECEL = -0.2 # car smoothly decel at .2m/s^2 when user is distracted
A_CRUISE_MIN = -1.2
A_CRUISE_MAX_VALS = [1.2, 1.2, 0.8, 0.6]
A_CRUISE_MAX_BP = [0., 15., 25., 40.]
# Lookup table for turns
_A_TOTAL_MAX_V = [1.7, 3.2]
_A_TOTAL_MAX_BP = [20., 40.]
def get_max_accel(v_ego):
return interp(v_ego, A_CRUISE_MAX_BP, A_CRUISE_MAX_VALS)
def | (v_ego, angle_steers, a_target, CP):
"""
This function returns a limited long acceleration allowed, depending on the existing lateral acceleration
this should avoid accelerating when losing the target in turns
"""
a_total_max = interp(v_ego, _A_TOTAL_MAX_BP, _A_TOTAL_MAX_V)
a_y = v_ego**2 * angle_steers * CV.DEG_TO_RAD / (CP.steerRatio * CP.wheelbase)
a_x_allowed = math.sqrt(max(a_total_max**2 - a_y**2, 0.))
return [a_target[0], min(a_target[1], a_x_allowed)]
class Planner():
def __init__(self, CP, init_v=0.0, init_a=0.0):
self.CP = CP
self.mpc = LongitudinalMpc()
self.fcw = False
self.cachedParams = CachedParams()
self.v_desired = init_v
self.a_desired = init_a
self.alpha = np.exp(-DT_MDL/2.0)
self.v_desired_trajectory = np.zeros(CONTROL_N)
self.a_desired_trajectory = np.zeros(CONTROL_N)
self.j_desired_trajectory = np.zeros(CONTROL_N)
def update(self, sm, CP, lateral_planner):
v_ego = sm['carState'].vEgo
a_ego = sm['carState'].aEgo
v_cruise_kph = sm['controlsState'].vCruise
v_cruise_kph = min(v_cruise_kph, V_CRUISE_MAX)
v_cruise = v_cruise_kph * CV.KPH_TO_MS
long_control_state = sm['controlsState'].longControlState
force_slow_decel = sm['controlsState'].forceDecel
enabled = (long_control_state == LongCtrlState.pid) or (long_control_state == LongCtrlState.stopping)
if not enabled or sm['carState'].gasPressed:
self.v_desired = v_ego
self.a_desired = a_ego
# Prevent divergence, smooth in current v_ego
self.v_desired = self.alpha * self.v_desired + (1 - self.alpha) * v_ego
self.v_desired = max(0.0, self.v_desired)
accel_limits = [A_CRUISE_MIN, get_max_accel(v_ego)]
if not self.cachedParams.get('jvePilot.settings.slowInCurves', 5000) == "1":
accel_limits = limit_accel_in_turns(v_ego, sm['carState'].steeringAngleDeg, accel_limits, self.CP)
if force_slow_decel:
# if required so, force a smooth deceleration
accel_limits[1] = min(accel_limits[1], AWARENESS_DECEL)
accel_limits[0] = min(accel_limits[0], accel_limits[1])
# clip limits, cannot init MPC outside of bounds
accel_limits[0] = min(accel_limits[0], self.a_desired + 0.05)
accel_limits[1] = max(accel_limits[1], self.a_desired - 0.05)
self.mpc.set_accel_limits(accel_limits[0], accel_limits[1])
self.mpc.set_cur_state(self.v_desired, self.a_desired)
self.mpc.update(sm['carState'], sm['radarState'], v_cruise)
self.v_desired_trajectory = np.interp(T_IDXS[:CONTROL_N], T_IDXS_MPC, self.mpc.v_solution)
self.a_desired_trajectory = np.interp(T_IDXS[:CONTROL_N], T_IDXS_MPC, self.mpc.a_solution)
self.j_desired_trajectory = np.interp(T_IDXS[:CONTROL_N], T_IDXS_MPC[:-1], self.mpc.j_solution)
#TODO counter is only needed because radar is glitchy, remove once radar is gone
self.fcw = self.mpc.crash_cnt > 5
if self.fcw:
cloudlog.info("FCW triggered")
# Interpolate 0.05 seconds and save as starting point for next iteration
a_prev = self.a_desired
self.a_desired = float(interp(DT_MDL, T_IDXS[:CONTROL_N], self.a_desired_trajectory))
self.v_desired = self.v_desired + DT_MDL * (self.a_desired + a_prev)/2.0
if lateral_planner.lateralPlan and self.cachedParams.get('jvePilot.settings.slowInCurves', 5000) == "1":
curvs = list(lateral_planner.lateralPlan.curvatures)
if len(curvs):
# find the largest curvature in the solution and use that.
curv = abs(curvs[-1])
if curv != 0:
self.v_desired = float(min(self.v_desired, self.limit_speed_in_curv(sm, curv)))
def publish(self, sm, pm):
plan_send = messaging.new_message('longitudinalPlan')
plan_send.valid = sm.all_alive_and_valid(service_list=['carState', 'controlsState'])
longitudinalPlan = plan_send.longitudinalPlan
longitudinalPlan.modelMonoTime = sm.logMonoTime['modelV2']
longitudinalPlan.processingDelay = (plan_send.logMonoTime / 1e9) - sm.logMonoTime['modelV2']
longitudinalPlan.speeds = [float(x) for x in self.v_desired_trajectory]
longitudinalPlan.accels = [float(x) for x in self.a_desired_trajectory]
longitudinalPlan.jerks = [float(x) for x in self.j_desired_trajectory]
longitudinalPlan.hasLead = sm['radarState'].leadOne.status
longitudinalPlan.longitudinalPlanSource = self.mpc.source
longitudinalPlan.fcw = self.fcw
pm.send('longitudinalPlan', plan_send)
def limit_speed_in_curv(self, sm, curv):
v_ego = sm['carState'].vEgo
a_y_max = 2.975 - v_ego * 0.0375 # ~1.85 @ 75mph, ~2.6 @ 25mph
# drop off
drop_off = self.cachedParams.get_float('jvePilot.settings.slowInCurves.speedDropOff', 5000)
if drop_off != 2 and a_y_max > 0:
a_y_max = np.sqrt(a_y_max) ** drop_off
v_curvature = np.sqrt(a_y_max / np.clip(curv, 1e-4, None))
model_speed = np.min(v_curvature)
return model_speed * self.cachedParams.get_float('jvePilot.settings.slowInCurves.speedRatio', 5000) | limit_accel_in_turns |
cycle.rs | // Copyright 2018 Ed McCardell
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use machine_int::MachineInt;
use crate::mi::{Addr, AddrExt, AddrMath};
use crate::{Cmos, Sys};
impl Cmos {
// BRK
fn cycle_op_00<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
if self.do_int {
self.read(sys, self.pc)?;
} else {
self.fetch_operand(sys)?;
}
}
if self.op_cycle == 2 {
if self.reset {
self.read_stack(sys)?;
} else {
self.write_stack(sys, self.pc.hi())?;
}
self.sp -= 1;
}
if self.op_cycle == 3 {
if self.reset {
self.read_stack(sys)?;
} else {
self.write_stack(sys, self.pc.lo())?;
}
self.sp -= 1;
self.base1 = self.signal_vector(sys);
}
if self.op_cycle == 4 {
if self.reset {
self.read_stack(sys)?;
} else if self.do_int {
self.write_stack(sys, self.flags.to_byte() & 0b1110_1111)?;
} else {
self.write_stack(sys, self.flags.to_byte())?;
}
self.sp -= 1;
}
if self.op_cycle == 5 {
self.lo_byte = self.read(sys, self.base1)?;
}
// op_cycle == 6
self.hi_byte = self.read(sys, self.base1 + 1)?;
self.pc = Addr::from_bytes(self.lo_byte, self.hi_byte);
self.flags.i = true;
self.flags.d = false;
self.clear_signals();
Some(())
}
// ORA ($nn,X)
fn cycle_op_01<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izx(sys)?;
}
// op_cycle == 5
let val = self.load(sys, self.base1)?;
self.ORA(val);
Some(())
}
// cycle_op_02 = op_02
// cycle_op_03 = op_03
// TSB $nn
fn cycle_op_04<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle >= 2
self.cycle_rmw(sys, self.base1, Cmos::TSB, 2)
}
// ORA $nn
fn cycle_op_05<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle == 2
let val = self.load(sys, self.base1)?;
self.ORA(val);
Some(())
}
// ASL $nn
fn cycle_op_06<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle >= 2
self.cycle_rmw(sys, self.base1, Cmos::ASL, 2)
}
// cycle_op_07 = op_07
// PHP
fn cycle_op_08<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.read(sys, self.pc)?;
}
// op_cycle == 2
self.store(sys, Addr::stack(self.sp), self.flags.to_byte())?;
self.sp -= 1;
Some(())
}
// cycle_op_09 = op_09
// cycle_op_0a = op_0a
// cycle_op_0b = op_0b
// TSB $nnnn
fn cycle_op_0c<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle >= 3
self.cycle_rmw(sys, self.base1, Cmos::TSB, 3)
}
// ORA $nnnn
fn cycle_op_0d<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 3
let val = self.load(sys, self.base1)?;
self.ORA(val);
Some(())
}
// ASL $nnnn
fn cycle_op_0e<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle >= 3
self.cycle_rmw(sys, self.base1, Cmos::ASL, 3)
}
// cycle_op_0f = op_0f
// BPL
fn cycle_op_10<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
self.cycle_branch(sys, !self.flags.n())
}
// ORA ($nn),Y
fn cycle_op_11<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izy(sys, false)?;
}
// op_cycle == 5
let val = self.load(sys, self.base1)?;
self.ORA(val);
Some(())
}
// ORA ($nn)
fn cycle_op_12<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_izp(sys)?;
}
// op_cycle == 4
let val = self.load(sys, self.base1)?;
self.ORA(val);
Some(())
}
// cycle_op_13 = op_13
// TRB $nn
fn cycle_op_14<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle >= 2
self.cycle_rmw(sys, self.base1, Cmos::TRB, 2)
}
// ORA $nn,X
fn cycle_op_15<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle == 3
let val = self.load(sys, self.base1)?;
self.ORA(val);
Some(())
}
// ASL $nn,X
fn cycle_op_16<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle >= 3
self.cycle_rmw(sys, self.base1, Cmos::ASL, 3)
}
// cycle_op_17 = op_18
// cycle_op_18 = op_18
// ORA $nnnn,Y
fn cycle_op_19<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.y, false)?;
}
// op_cycle == 4
let val = self.load(sys, self.base1)?;
self.ORA(val);
Some(())
}
// cycle_op_1a = op_1a
// cycle_op_1b = op_1b
// TRB $nnnn
fn cycle_op_1c<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle >= 3
self.cycle_rmw(sys, self.base1, Cmos::TRB, 3)
}
// ORA $nnnn,X
fn cycle_op_1d<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, false)?;
}
// op_cycle == 4
let val = self.load(sys, self.base1)?;
self.ORA(val);
Some(())
}
// ASL $nnnn,X
fn cycle_op_1e<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, false)?;
}
// op_cycle >= 4
self.cycle_rmw(sys, self.base1, Cmos::ASL, 4)
}
// cycle_op_1f = op_1f
// JSR $nnnn
fn cycle_op_20<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.lo_byte = self.fetch_operand(sys)?;
}
if self.op_cycle == 2 {
self.read_stack(sys)?;
}
if self.op_cycle == 3 {
self.write_stack(sys, self.pc.hi())?;
self.sp -= 1;
}
if self.op_cycle == 4 {
self.write_stack(sys, self.pc.lo())?;
self.sp -= 1;
self.poll_signals(sys);
}
// op_cycle == 5
self.hi_byte = self.fetch_operand(sys)?;
self.pc = Addr::from_bytes(self.lo_byte, self.hi_byte);
Some(())
}
// AND ($nn,X)
fn cycle_op_21<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izx(sys)?;
}
// op_cycle == 5
let val = self.load(sys, self.base1)?;
self.AND(val);
Some(())
}
// cycle_op_22 = op_22
// cycle_op_23 = op_23
// BIT $nn
fn cycle_op_24<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle == 2
let val = self.load(sys, self.base1)?;
self.BIT(val);
Some(())
}
// AND $nn
fn cycle_op_25<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle == 2
let val = self.load(sys, self.base1)?;
self.AND(val);
Some(())
}
// ROL $nn
fn cycle_op_26<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle >= 2
self.cycle_rmw(sys, self.base1, Cmos::ROL, 2)
}
// cycle_op_27 = op_27
// PLP
fn cycle_op_28<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.read(sys, self.pc)?;
}
if self.op_cycle == 2 {
self.read_stack(sys)?;
self.sp += 1;
}
// op_cycle == 3
let p = self.load(sys, Addr::stack(self.sp))?;
self.flags.from_byte(p);
Some(())
}
// cycle_op_29 = op_29
// cycle_op_2a = op_2a
// cycle_op_2b = op_2b
// BIT $nnnn
fn cycle_op_2c<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 3
let val = self.load(sys, self.base1)?;
self.BIT(val);
Some(())
}
// AND $nnnn
fn cycle_op_2d<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 3
let val = self.load(sys, self.base1)?;
self.AND(val);
Some(())
}
// ROL $nnnn
fn cycle_op_2e<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle >= 3
self.cycle_rmw(sys, self.base1, Cmos::ROL, 3)
}
// cycle_op_2f = op_2f
// BMI
fn cycle_op_30<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
self.cycle_branch(sys, self.flags.n())
}
// AND ($nn),Y
fn cycle_op_31<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izy(sys, false)?;
}
// op_cycle == 5
let val = self.load(sys, self.base1)?;
self.AND(val);
Some(())
}
// AND ($nn)
fn cycle_op_32<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_izp(sys)?;
}
// op_cycle == 4
let val = self.load(sys, self.base1)?;
self.AND(val);
Some(())
}
// cycle_op_33 = op_33
// BIT $nn,X
fn cycle_op_34<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle == 3
let val = self.load(sys, self.base1)?;
self.BIT(val);
Some(())
}
// AND $nn,X
fn cycle_op_35<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle == 3
let val = self.load(sys, self.base1)?;
self.AND(val);
Some(())
}
// ROL $nn,X
fn cycle_op_36<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle >= 3
self.cycle_rmw(sys, self.base1, Cmos::ROL, 3)
}
// cycle_op_37 = op_37
// cycle_op_38 = op_38
// AND $nnnn,Y
fn cycle_op_39<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.y, false)?;
}
// op_cycle == 4
let val = self.load(sys, self.base1)?;
self.AND(val);
Some(())
}
// cycle_op_3a = op_3a
// cycle_op_3b = op_3b
// BIT $nnnn,X
fn cycle_op_3c<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, false)?;
}
// op_cycle == 4
let val = self.load(sys, self.base1)?;
self.BIT(val);
Some(())
}
// AND $nnnn,X
fn cycle_op_3d<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, false)?;
}
// op_cycle == 4
let val = self.load(sys, self.base1)?;
self.AND(val);
Some(())
}
// ROL $nnnn,X
fn cycle_op_3e<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, false)?;
}
// op_cycle >= 4
self.cycle_rmw(sys, self.base1, Cmos::ROL, 4)
}
// cycle_op_3f = op_3f
// RTI
fn cycle_op_40<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.read(sys, self.pc)?;
}
if self.op_cycle == 2 {
self.read_stack(sys)?;
self.sp += 1;
}
if self.op_cycle == 3 {
let p = self.read_stack(sys)?;
self.sp += 1;
self.flags.from_byte(p);
}
if self.op_cycle == 4 {
self.lo_byte = self.read_stack(sys)?;
self.sp += 1;
self.poll_signals(sys);
}
// op_cycle == 5
self.hi_byte = self.read_stack(sys)?;
self.pc = Addr::from_bytes(self.lo_byte, self.hi_byte);
Some(())
}
// EOR ($nn,X)
fn cycle_op_41<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izx(sys)?;
}
// op_cycle == 5
let val = self.load(sys, self.base1)?;
self.EOR(val);
Some(())
}
// cycle_op_42 = op_42
// cycle_op_43 = op_43
// NOP $nn
fn cycle_op_44<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle == 2
self.load(sys, self.base1)?;
Some(())
}
// EOR $nn
fn cycle_op_45<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle == 2
let val = self.load(sys, self.base1)?;
self.EOR(val);
Some(())
}
// LSR $nn
fn cycle_op_46<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle >= 2
self.cycle_rmw(sys, self.base1, Cmos::LSR, 2)
}
// cycle_op_47 = op_47
// PHA
fn cycle_op_48<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.read(sys, self.pc)?;
}
// op_cycle == 2
self.store(sys, Addr::stack(self.sp), self.a)?;
self.sp -= 1;
Some(())
}
// cycle_op_49 = op_49
// cycle_op_4a = op_4a
// cycle_op_4b = op_4b
// JMP $nnnn
fn cycle_op_4c<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.lo_byte = self.fetch_operand(sys)?;
self.poll_signals(sys);
}
// op_cycle == 2
self.hi_byte = self.fetch_operand(sys)?;
self.pc = Addr::from_bytes(self.lo_byte, self.hi_byte);
Some(())
}
// EOR $nnnn
fn cycle_op_4d<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 3
let val = self.load(sys, self.base1)?;
self.EOR(val);
Some(())
}
// LSR $nnnn
fn cycle_op_4e<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle >= 3
self.cycle_rmw(sys, self.base1, Cmos::LSR, 3)
}
// cycle_op_4f = op_4f
// BVC
fn cycle_op_50<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
self.cycle_branch(sys, !self.flags.v())
}
// EOR ($nn),Y
fn cycle_op_51<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izy(sys, false)?;
}
// op_cycle == 5
let val = self.load(sys, self.base1)?;
self.EOR(val);
Some(())
}
// EOR ($nn)
fn cycle_op_52<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_izp(sys)?;
}
// op_cycle == 4
let val = self.load(sys, self.base1)?;
self.EOR(val);
Some(())
}
// cycle_op_53 = op_53
// NOP $nn,X
fn cycle_op_54<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle == 3
self.load(sys, self.base1)?;
Some(())
}
// EOR $nn,X
fn cycle_op_55<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle == 3
let val = self.load(sys, self.base1)?;
self.EOR(val);
Some(())
}
// LSR $nn,X
fn cycle_op_56<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle >= 3
self.cycle_rmw(sys, self.base1, Cmos::LSR, 3)
}
// cycle_op_57 = op_57
// cycle_op_58 = op_58
// EOR $nnnn,Y
fn cycle_op_59<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.y, false)?;
}
// op_cycle == 4
let val = self.load(sys, self.base1)?;
self.EOR(val);
Some(())
}
// PHY
fn cycle_op_5a<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.read(sys, self.pc)?;
}
// op_cycle == 2
self.store(sys, Addr::stack(self.sp), self.y)?;
self.sp -= 1;
Some(())
}
// cycle_op_5b = op_5b
// NOP (eight-cycle)
fn cycle_op_5c<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = Addr::from_bytes(
self.cycle_addr_abs(sys)?.lo(),
MachineInt(0xff),
);
}
if self.op_cycle == 3 {
self.load(sys, self.base1)?;
}
while self.op_cycle < 7 {
self.read(sys, MachineInt(0xffff))?;
}
self.load(sys, MachineInt(0xffff))?;
Some(())
}
// EOR $nnnn,X
fn cycle_op_5d<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, false)?;
}
// op_cycle == 4
let val = self.load(sys, self.base1)?;
self.EOR(val);
Some(())
}
// LSR $nnnn,X
fn cycle_op_5e<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, false)?;
}
// op_cycle >= 4
self.cycle_rmw(sys, self.base1, Cmos::LSR, 4)
}
// cycle_op_5f = op_5f
// RTS
fn cycle_op_60<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.read(sys, self.pc)?;
}
if self.op_cycle == 2 {
self.read_stack(sys)?;
self.sp += 1;
}
if self.op_cycle == 3 {
self.lo_byte = self.read_stack(sys)?;
self.sp += 1;
}
if self.op_cycle == 4 {
self.hi_byte = self.read_stack(sys)?;
self.pc = Addr::from_bytes(self.lo_byte, self.hi_byte);
self.poll_signals(sys);
}
// op_cycle == 5
self.fetch_operand(sys)?;
Some(())
}
// ADC ($nn,X)
fn cycle_op_61<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izx(sys)?;
}
// op_cycle >= 5
self.cycle_decimal(sys, self.base1, Cmos::ADC, 5)
}
// cycle_op_62 = op_62
// cycle_op_63 = op_63
// NOP* $nn
fn cycle_op_64<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle == 2
self.store(sys, self.base1, MachineInt(0))?;
Some(())
}
// ADC $nn
fn cycle_op_65<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle >= 2
self.cycle_decimal(sys, self.base1, Cmos::ADC, 2)
}
// ROR $nn
fn cycle_op_66<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle >= 2
self.cycle_rmw(sys, self.base1, Cmos::ROR, 2)
}
// cycle_op_67 = op_67
// PLA
fn cycle_op_68<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.read(sys, self.pc)?;
}
if self.op_cycle == 2 {
self.read_stack(sys)?;
self.sp += 1;
}
// op_cycle == 3
self.a = self.load(sys, Addr::stack(self.sp))?;
self.flags.nz(self.a);
Some(())
}
// ADC #nn
fn cycle_op_69<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
if !self.flags.d {
self.poll_signals(sys);
}
self.lo_byte = self.fetch_operand(sys)?;
}
// op_cycle == 2
if self.flags.d {
self.load(sys, self.pc)?;
}
self.ADC(self.lo_byte);
Some(())
}
// cycle_op_6a = op_6a
// cycle_op_6b = op_6b
// JMP ($nnnn)
fn cycle_op_6c<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
if self.op_cycle == 3 {
self.lo_byte = self.read(sys, self.base1)?;
}
// op_cycle == 4
self.hi_byte = self.load(sys, self.base1 + 1)?;
self.pc = Addr::from_bytes(self.lo_byte, self.hi_byte);
Some(())
}
// ADC $nnnn
fn cycle_op_6d<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle >= 3
self.cycle_decimal(sys, self.base1, Cmos::ADC, 3)
}
// ROR $nnnn
fn cycle_op_6e<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle >= 3
self.cycle_rmw(sys, self.base1, Cmos::ROR, 3)
}
// cycle_op_6f = op_6f
// BVS
fn cycle_op_70<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
self.cycle_branch(sys, self.flags.v())
}
// ADC ($nn),Y
fn cycle_op_71<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izy(sys, false)?;
}
// op_cycle >= 5
self.cycle_decimal(sys, self.base1, Cmos::ADC, 5)
}
// ADC ($nn)
fn cycle_op_72<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_izp(sys)?;
}
// op_cycle == 4
self.cycle_decimal(sys, self.base1, Cmos::ADC, 4)
}
// cycle_op_73 = op_73
// STZ $nn,X
fn cycle_op_74<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle == 3
self.store(sys, self.base1, MachineInt(0))?;
Some(())
}
// ADC $nn,X
fn cycle_op_75<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle >= 3
self.cycle_decimal(sys, self.base1, Cmos::ADC, 3)
}
// ROR $nn,X
fn | <S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle >= 3
self.cycle_rmw(sys, self.base1, Cmos::ROR, 3)
}
// cycle_op_77 = op_77
// cycle_op_78 = op_78
// ADC $nnnn,Y
fn cycle_op_79<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.y, false)?;
}
// op_cycle >= 4
self.cycle_decimal(sys, self.base1, Cmos::ADC, 4)
}
// PLY
fn cycle_op_7a<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.read(sys, self.pc)?;
}
if self.op_cycle == 2 {
self.read_stack(sys)?;
self.sp += 1;
}
// op_cycle == 3
self.y = self.load(sys, Addr::stack(self.sp))?;
self.flags.nz(self.y);
Some(())
}
// cycle_op_7b = op_7b
// JMP ($nnnn,X)
fn cycle_op_7c<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.lo_byte = self.fetch_operand(sys)?;
}
if self.op_cycle == 2 {
self.hi_byte = self.read(sys, self.pc)?;
self.base1 = self.addr() + self.x;
}
if self.op_cycle == 3 {
self.fetch_operand(sys)?;
}
if self.op_cycle == 4 {
self.lo_byte = self.read(sys, self.base1)?;
}
// op_cycle == 5
self.hi_byte = self.load(sys, self.base1 + 1)?;
self.pc = Addr::from_bytes(self.lo_byte, self.hi_byte);
Some(())
}
// ADC $nnnn,X
fn cycle_op_7d<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, false)?;
}
// op_cycle == 4
self.cycle_decimal(sys, self.base1, Cmos::ADC, 4)
}
// ROR $nnnn,X
fn cycle_op_7e<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, false)?;
}
// op_cycle >= 4
self.cycle_rmw(sys, self.base1, Cmos::ROR, 4)
}
// cycle_op_7f = op_7f
fn cycle_op_80<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
self.cycle_branch(sys, true)
}
// STA ($nn,X)
fn cycle_op_81<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izx(sys)?;
}
// op_cycle == 5
self.store(sys, self.base1, self.a)
}
// cycle_op_82 = op_82
// cycle_op_83 = op_83
// STY $nn
fn cycle_op_84<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle == 2
self.store(sys, self.base1, self.y)
}
// STA $nn
fn cycle_op_85<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle == 2
self.store(sys, self.base1, self.a)
}
// STX $nn
fn cycle_op_86<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle == 2
self.store(sys, self.base1, self.x)
}
// cycle_op_87 = op_87
// cycle_op_88 = op_88
// cycle_op_89 = op_89
// cycle_op_8a = op_8a
// cycle_op_8b = op_8b
// STY $nnnn
fn cycle_op_8c<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 3
self.store(sys, self.base1, self.y)
}
// STA $nnnn
fn cycle_op_8d<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 3
self.store(sys, self.base1, self.a)
}
// STX $nnnn
fn cycle_op_8e<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 3
self.store(sys, self.base1, self.x)
}
// cycle_op_8f = op_8f
// BCC
fn cycle_op_90<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
self.cycle_branch(sys, !self.flags.c())
}
// STA ($nn),Y
fn cycle_op_91<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izy(sys, true)?;
}
// op_cycle == 5
self.store(sys, self.base1, self.a)
}
// STA ($nn)
fn cycle_op_92<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_izp(sys)?;
}
// op_cycle == 4
self.store(sys, self.base1, self.a)
}
// cycle_op_93 = op_93
// STY $nn,X
fn cycle_op_94<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle == 3
self.store(sys, self.base1, self.y)
}
// STA $nn,X
fn cycle_op_95<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle == 3
self.store(sys, self.base1, self.a)
}
// STX $nn,Y
fn cycle_op_96<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.y)?;
}
// op_cycle == 3
self.store(sys, self.base1, self.x)
}
// cycle_op_97 = op_97
// cycle_op_98 = op_98
// STA $nnnn,Y
fn cycle_op_99<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.y, true)?;
}
// op_cycle == 4
self.store(sys, self.base1, self.a)
}
// cycle_op_9a = op_9a
// cycle_op_9b = op_9b
// SHY $nnnn,X
fn cycle_op_9c<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 3
self.store(sys, self.base1, MachineInt(0))
}
// STA $nnnn,X
fn cycle_op_9d<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, true)?;
}
// op_cycle == 4
self.store(sys, self.base1, self.a)
}
// STZ $nnnn,X
fn cycle_op_9e<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, true)?;
}
// op_cycle == 4
self.store(sys, self.base1, MachineInt(0))
}
// cycle_op_9f = op_9f
// cycle_op_a0 = op_a0
// LDA ($nn,X)
fn cycle_op_a1<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izx(sys)?;
}
// op_cycle == 5
self.a = self.load(sys, self.base1)?;
self.flags.nz(self.a);
Some(())
}
// cycle_op_a2 = op_a2
// cycle_op_a3 = op_a3
// LDY $nn
fn cycle_op_a4<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle == 2
self.y = self.load(sys, self.base1)?;
self.flags.nz(self.y);
Some(())
}
// LDA $nn
fn cycle_op_a5<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle == 2
self.a = self.load(sys, self.base1)?;
self.flags.nz(self.a);
Some(())
}
// LDX $nn
fn cycle_op_a6<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle == 2
self.x = self.load(sys, self.base1)?;
self.flags.nz(self.x);
Some(())
}
// cycle_op_a7 = op_a7
// cycle_op_a8 = op_a8
// cycle_op_a9 = op_a9
// cycle_op_aa = op_aa
// cycle_op_ab = op_ab
// LDY $nnnn
fn cycle_op_ac<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 3
self.y = self.load(sys, self.base1)?;
self.flags.nz(self.y);
Some(())
}
// LDA $nnnn
fn cycle_op_ad<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 3
self.a = self.load(sys, self.base1)?;
self.flags.nz(self.a);
Some(())
}
// LDX $nnnn
fn cycle_op_ae<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 3
self.x = self.load(sys, self.base1)?;
self.flags.nz(self.x);
Some(())
}
// cycle_op_af = op_af
// BCS
fn cycle_op_b0<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
self.cycle_branch(sys, self.flags.c())
}
// LDA ($nn),Y
fn cycle_op_b1<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izy(sys, false)?;
}
// op_cycle == 5
self.a = self.load(sys, self.base1)?;
self.flags.nz(self.a);
Some(())
}
// LDA ($nn)
fn cycle_op_b2<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_izp(sys)?;
}
// op_cycle == 4
self.a = self.load(sys, self.base1)?;
self.flags.nz(self.a);
Some(())
}
// cycle_op_b3 = op_b3
// LDY $nn,X
fn cycle_op_b4<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle == 3
self.y = self.load(sys, self.base1)?;
self.flags.nz(self.y);
Some(())
}
// LDA $nn,X
fn cycle_op_b5<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle == 3
self.a = self.load(sys, self.base1)?;
self.flags.nz(self.a);
Some(())
}
// LDX $nn,Y
fn cycle_op_b6<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.y)?;
}
// op_cycle == 3
self.x = self.load(sys, self.base1)?;
self.flags.nz(self.x);
Some(())
}
// cycle_op_b7 = op_b7
// cycle_op_b8 = op_b8
// LDA $nnnn,Y
fn cycle_op_b9<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.y, false)?;
}
// op_cycle == 4
self.a = self.load(sys, self.base1)?;
self.flags.nz(self.a);
Some(())
}
// cycle_op_ba = op_ba
// cycle_op_bb = op_bb
// LDY $nnnn,X
fn cycle_op_bc<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, false)?;
}
// op_cycle == 4
self.y = self.load(sys, self.base1)?;
self.flags.nz(self.y);
Some(())
}
// LDA $nnnn,X
fn cycle_op_bd<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, false)?;
}
// op_cycle == 4
self.a = self.load(sys, self.base1)?;
self.flags.nz(self.a);
Some(())
}
// LDX $nnnn,Y
fn cycle_op_be<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.y, false)?;
}
// op_cycle == 4
self.x = self.load(sys, self.base1)?;
self.flags.nz(self.x);
Some(())
}
// cycle_op_bf = op_bf
// cycle_op_c0 = op_c0
// CMP ($nn,X)
fn cycle_op_c1<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izx(sys)?;
}
// op_cycle == 5
let val = self.load(sys, self.base1)?;
self.CMP(self.a, val);
Some(())
}
// cycle_op_c2 = op_c2
// cycle_op_c3 = op_c3
// CPY $nn
fn cycle_op_c4<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle == 2
let val = self.load(sys, self.base1)?;
self.CMP(self.y, val);
Some(())
}
// CMP $nn
fn cycle_op_c5<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle == 2
let val = self.load(sys, self.base1)?;
self.CMP(self.a, val);
Some(())
}
// DEC $nn
fn cycle_op_c6<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle >= 2
self.cycle_rmw(sys, self.base1, Cmos::DEC, 2)
}
// cycle_op_c7 = op_c7
// cycle_op_c8 = op_c8
// cycle_op_c9 = op_c9
// cycle_op_ca = op_ca
// cycle_op_cb = op_cb
// CPY $nnnn
fn cycle_op_cc<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 3
let val = self.load(sys, self.base1)?;
self.CMP(self.y, val);
Some(())
}
// CMP $nnnn
fn cycle_op_cd<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 3
let val = self.load(sys, self.base1)?;
self.CMP(self.a, val);
Some(())
}
// DEC $nnnn
fn cycle_op_ce<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle >= 3
self.cycle_rmw(sys, self.base1, Cmos::DEC, 3)
}
// cycle_op_cf = op_cf
// BNE
fn cycle_op_d0<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
self.cycle_branch(sys, !self.flags.z())
}
// CMP ($nn),Y
fn cycle_op_d1<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izy(sys, false)?;
}
// op_cycle == 5
let val = self.load(sys, self.base1)?;
self.CMP(self.a, val);
Some(())
}
// CMP ($nn)
fn cycle_op_d2<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_izp(sys)?;
}
// op_cycle == 4
let val = self.load(sys, self.base1)?;
self.CMP(self.a, val);
Some(())
}
// cycle_op_d3 = op_d3
// NOP $nn,X
fn cycle_op_d4<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle == 3
self.load(sys, self.base1)?;
Some(())
}
// CMP $nn,X
fn cycle_op_d5<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle == 3
let val = self.load(sys, self.base1)?;
self.CMP(self.a, val);
Some(())
}
// DEC $nn,X
fn cycle_op_d6<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle >= 3
self.cycle_rmw(sys, self.base1, Cmos::DEC, 3)
}
// cycle_op_d7 = op_d7
// cycle_op_d8 = op_d8
// CMP $nnnn,Y
fn cycle_op_d9<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.y, false)?;
}
// op_cycle == 4
let val = self.load(sys, self.base1)?;
self.CMP(self.a, val);
Some(())
}
// PHX
fn cycle_op_da<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.read(sys, self.pc)?;
}
// op_cycle == 2
self.store(sys, Addr::stack(self.sp), self.x)?;
self.sp -= 1;
Some(())
}
// cycle_op_db = op_db
// NOP $nnnn,X (4-cycle)
fn cycle_op_dc<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 4
self.load(sys, self.base1.no_carry(self.x))?;
Some(())
}
// CMP $nnnn,X
fn cycle_op_dd<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, false)?;
}
// op_cycle == 4
let val = self.load(sys, self.base1)?;
self.CMP(self.a, val);
Some(())
}
// DEC $nnnn,X
fn cycle_op_de<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, true)?;
}
// op_cycle >= 4
self.cycle_rmw(sys, self.base1, Cmos::DEC, 4)
}
// cycle_op_df = op_df
// cycle_op_e0 = op_e0
// SBC ($nn,X)
fn cycle_op_e1<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izx(sys)?;
}
// op_cycle >= 5
self.cycle_decimal(sys, self.base1, Cmos::SBC, 5)
}
// cycle_op_e2 = op_e2
// cycle_op_e3 = op_e3
// CPX $nn
fn cycle_op_e4<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle == 2
let val = self.load(sys, self.base1)?;
self.CMP(self.x, val);
Some(())
}
// SBC $nn
fn cycle_op_e5<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle >= 2
self.cycle_decimal(sys, self.base1, Cmos::SBC, 2)
}
// INC $nn
fn cycle_op_e6<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.base1 = self.addr_zp(sys)?;
}
// op_cycle >= 2
self.cycle_rmw(sys, self.base1, Cmos::INC, 2)
}
// cycle_op_e7 = op_e7
// cycle_op_e8 = op_e8
fn cycle_op_e9<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
if !self.flags.d {
self.poll_signals(sys);
}
self.lo_byte = self.fetch_operand(sys)?;
}
// op_cycle == 2
if self.flags.d {
self.load(sys, self.pc)?;
}
self.SBC(self.lo_byte);
Some(())
}
// cycle_op_ea = op_ea
// cycle_op_eb = op_eb
// CPX $nnnn
fn cycle_op_ec<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 3
let val = self.load(sys, self.base1)?;
self.CMP(self.x, val);
Some(())
}
// SBC $nnnn
fn cycle_op_ed<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle >= 3
self.cycle_decimal(sys, self.base1, Cmos::SBC, 3)
}
// INC $nnnn
fn cycle_op_ee<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle >= 3
self.cycle_rmw(sys, self.base1, Cmos::INC, 3)
}
// cycle_op_ef = op_ef
// BEQ
fn cycle_op_f0<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
self.cycle_branch(sys, self.flags.z())
}
// SBC ($nn),Y
fn cycle_op_f1<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 5 {
self.base1 = self.cycle_addr_izy(sys, false)?;
}
// op_cycle >= 5
self.cycle_decimal(sys, self.base1, Cmos::SBC, 5)
}
// SBC ($nn)
fn cycle_op_f2<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_izp(sys)?;
}
// op_cycle >= 4
self.cycle_decimal(sys, self.base1, Cmos::SBC, 4)
}
// cycle_op_f3 = op_f3
// NOP $nn,X
fn cycle_op_f4<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle == 3
self.load(sys, self.base1)?;
Some(())
}
// SBC $nn,X
fn cycle_op_f5<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle >= 3
self.cycle_decimal(sys, self.base1, Cmos::SBC, 3)
}
// INC $nn,X
fn cycle_op_f6<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_zpi(sys, self.x)?;
}
// op_cycle >= 3
self.cycle_rmw(sys, self.base1, Cmos::INC, 3)
}
// cycle_op_f7 = op_f7
// cycle_op_f8 = op_f8
// SBC $nnnn,Y
fn cycle_op_f9<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.y, false)?;
}
// op_cycle >= 4
self.cycle_decimal(sys, self.base1, Cmos::SBC, 4)
}
// PLX
fn cycle_op_fa<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle == 1 {
self.read(sys, self.pc)?;
}
if self.op_cycle == 2 {
self.read_stack(sys)?;
self.sp += 1;
}
// op_cycle == 3
self.x = self.load(sys, Addr::stack(self.sp))?;
self.flags.nz(self.x);
Some(())
}
// cycle_op_fb = op_fb
// NOP $nnnn,X
fn cycle_op_fc<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 3 {
self.base1 = self.cycle_addr_abs(sys)?;
}
// op_cycle == 4
self.load(sys, self.base1.no_carry(self.x))?;
Some(())
}
// SBC $nnnn,X
fn cycle_op_fd<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, false)?;
}
// op_cycle >= 4
self.cycle_decimal(sys, self.base1, Cmos::SBC, 4)
}
// INC $nnnn,X
fn cycle_op_fe<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
if self.op_cycle < 4 {
self.base1 = self.cycle_addr_abi(sys, self.x, true)?;
}
// op_cycle >= 4
self.cycle_rmw(sys, self.base1, Cmos::INC, 4)
}
// cycle_op_ff = op_ff
}
impl Cmos {
#[cfg_attr(
feature = "cargo-clippy",
allow(clippy::cyclomatic_complexity)
)]
pub(crate) fn cycle_exec<S: Sys>(&mut self, sys: &mut S) -> Option<()> {
match self.op {
0x00 => self.cycle_op_00(sys)?,
0x01 => self.cycle_op_01(sys)?,
0x02 => self.op_02(sys)?,
0x03 => self.op_03(sys)?,
0x04 => self.cycle_op_04(sys)?,
0x05 => self.cycle_op_05(sys)?,
0x06 => self.cycle_op_06(sys)?,
0x07 => self.op_07(sys)?,
0x08 => self.cycle_op_08(sys)?,
0x09 => self.op_09(sys)?,
0x0a => self.op_0a(sys)?,
0x0b => self.op_0b(sys)?,
0x0c => self.cycle_op_0c(sys)?,
0x0d => self.cycle_op_0d(sys)?,
0x0e => self.cycle_op_0e(sys)?,
0x0f => self.op_0f(sys)?,
0x10 => self.cycle_op_10(sys)?,
0x11 => self.cycle_op_11(sys)?,
0x12 => self.cycle_op_12(sys)?,
0x13 => self.op_13(sys)?,
0x14 => self.cycle_op_14(sys)?,
0x15 => self.cycle_op_15(sys)?,
0x16 => self.cycle_op_16(sys)?,
0x17 => self.op_17(sys)?,
0x18 => self.op_18(sys)?,
0x19 => self.cycle_op_19(sys)?,
0x1a => self.op_1a(sys)?,
0x1b => self.op_1b(sys)?,
0x1c => self.cycle_op_1c(sys)?,
0x1d => self.cycle_op_1d(sys)?,
0x1e => self.cycle_op_1e(sys)?,
0x1f => self.op_1f(sys)?,
0x20 => self.cycle_op_20(sys)?,
0x21 => self.cycle_op_21(sys)?,
0x22 => self.op_22(sys)?,
0x23 => self.op_23(sys)?,
0x24 => self.cycle_op_24(sys)?,
0x25 => self.cycle_op_25(sys)?,
0x26 => self.cycle_op_26(sys)?,
0x27 => self.op_27(sys)?,
0x28 => self.cycle_op_28(sys)?,
0x29 => self.op_29(sys)?,
0x2a => self.op_2a(sys)?,
0x2b => self.op_2b(sys)?,
0x2c => self.cycle_op_2c(sys)?,
0x2d => self.cycle_op_2d(sys)?,
0x2e => self.cycle_op_2e(sys)?,
0x2f => self.op_2f(sys)?,
0x30 => self.cycle_op_30(sys)?,
0x31 => self.cycle_op_31(sys)?,
0x32 => self.cycle_op_32(sys)?,
0x33 => self.op_33(sys)?,
0x34 => self.cycle_op_34(sys)?,
0x35 => self.cycle_op_35(sys)?,
0x36 => self.cycle_op_36(sys)?,
0x37 => self.op_37(sys)?,
0x38 => self.op_38(sys)?,
0x39 => self.cycle_op_39(sys)?,
0x3a => self.op_3a(sys)?,
0x3b => self.op_3b(sys)?,
0x3c => self.cycle_op_3c(sys)?,
0x3d => self.cycle_op_3d(sys)?,
0x3e => self.cycle_op_3e(sys)?,
0x3f => self.op_3f(sys)?,
0x40 => self.cycle_op_40(sys)?,
0x41 => self.cycle_op_41(sys)?,
0x42 => self.op_42(sys)?,
0x43 => self.op_43(sys)?,
0x44 => self.cycle_op_44(sys)?,
0x45 => self.cycle_op_45(sys)?,
0x46 => self.cycle_op_46(sys)?,
0x47 => self.op_47(sys)?,
0x48 => self.cycle_op_48(sys)?,
0x49 => self.op_49(sys)?,
0x4a => self.op_4a(sys)?,
0x4b => self.op_4b(sys)?,
0x4c => self.cycle_op_4c(sys)?,
0x4d => self.cycle_op_4d(sys)?,
0x4e => self.cycle_op_4e(sys)?,
0x4f => self.op_4f(sys)?,
0x50 => self.cycle_op_50(sys)?,
0x51 => self.cycle_op_51(sys)?,
0x52 => self.cycle_op_52(sys)?,
0x53 => self.op_53(sys)?,
0x54 => self.cycle_op_54(sys)?,
0x55 => self.cycle_op_55(sys)?,
0x56 => self.cycle_op_56(sys)?,
0x57 => self.op_57(sys)?,
0x58 => self.op_58(sys)?,
0x59 => self.cycle_op_59(sys)?,
0x5a => self.cycle_op_5a(sys)?,
0x5b => self.op_5b(sys)?,
0x5c => self.cycle_op_5c(sys)?,
0x5d => self.cycle_op_5d(sys)?,
0x5e => self.cycle_op_5e(sys)?,
0x5f => self.op_5f(sys)?,
0x60 => self.cycle_op_60(sys)?,
0x61 => self.cycle_op_61(sys)?,
0x62 => self.op_62(sys)?,
0x63 => self.op_63(sys)?,
0x64 => self.cycle_op_64(sys)?,
0x65 => self.cycle_op_65(sys)?,
0x66 => self.cycle_op_66(sys)?,
0x67 => self.op_67(sys)?,
0x68 => self.cycle_op_68(sys)?,
0x69 => self.cycle_op_69(sys)?,
0x6a => self.op_6a(sys)?,
0x6b => self.op_6b(sys)?,
0x6c => self.cycle_op_6c(sys)?,
0x6d => self.cycle_op_6d(sys)?,
0x6e => self.cycle_op_6e(sys)?,
0x6f => self.op_6f(sys)?,
0x70 => self.cycle_op_70(sys)?,
0x71 => self.cycle_op_71(sys)?,
0x72 => self.cycle_op_72(sys)?,
0x73 => self.op_73(sys)?,
0x74 => self.cycle_op_74(sys)?,
0x75 => self.cycle_op_75(sys)?,
0x76 => self.cycle_op_76(sys)?,
0x77 => self.op_77(sys)?,
0x78 => self.op_78(sys)?,
0x79 => self.cycle_op_79(sys)?,
0x7a => self.cycle_op_7a(sys)?,
0x7b => self.op_7b(sys)?,
0x7c => self.cycle_op_7c(sys)?,
0x7d => self.cycle_op_7d(sys)?,
0x7e => self.cycle_op_7e(sys)?,
0x7f => self.op_7f(sys)?,
0x80 => self.cycle_op_80(sys)?,
0x81 => self.cycle_op_81(sys)?,
0x82 => self.op_82(sys)?,
0x83 => self.op_83(sys)?,
0x84 => self.cycle_op_84(sys)?,
0x85 => self.cycle_op_85(sys)?,
0x86 => self.cycle_op_86(sys)?,
0x87 => self.op_87(sys)?,
0x88 => self.op_88(sys)?,
0x89 => self.op_89(sys)?,
0x8a => self.op_8a(sys)?,
0x8b => self.op_8b(sys)?,
0x8c => self.cycle_op_8c(sys)?,
0x8d => self.cycle_op_8d(sys)?,
0x8e => self.cycle_op_8e(sys)?,
0x8f => self.op_8f(sys)?,
0x90 => self.cycle_op_90(sys)?,
0x91 => self.cycle_op_91(sys)?,
0x92 => self.cycle_op_92(sys)?,
0x93 => self.op_93(sys)?,
0x94 => self.cycle_op_94(sys)?,
0x95 => self.cycle_op_95(sys)?,
0x96 => self.cycle_op_96(sys)?,
0x97 => self.op_97(sys)?,
0x98 => self.op_98(sys)?,
0x99 => self.cycle_op_99(sys)?,
0x9a => self.op_9a(sys)?,
0x9b => self.op_9b(sys)?,
0x9c => self.cycle_op_9c(sys)?,
0x9d => self.cycle_op_9d(sys)?,
0x9e => self.cycle_op_9e(sys)?,
0x9f => self.op_9f(sys)?,
0xa0 => self.op_a0(sys)?,
0xa1 => self.cycle_op_a1(sys)?,
0xa2 => self.op_a2(sys)?,
0xa3 => self.op_a3(sys)?,
0xa4 => self.cycle_op_a4(sys)?,
0xa5 => self.cycle_op_a5(sys)?,
0xa6 => self.cycle_op_a6(sys)?,
0xa7 => self.op_a7(sys)?,
0xa8 => self.op_a8(sys)?,
0xa9 => self.op_a9(sys)?,
0xaa => self.op_aa(sys)?,
0xab => self.op_ab(sys)?,
0xac => self.cycle_op_ac(sys)?,
0xad => self.cycle_op_ad(sys)?,
0xae => self.cycle_op_ae(sys)?,
0xaf => self.op_af(sys)?,
0xb0 => self.cycle_op_b0(sys)?,
0xb1 => self.cycle_op_b1(sys)?,
0xb2 => self.cycle_op_b2(sys)?,
0xb3 => self.op_b3(sys)?,
0xb4 => self.cycle_op_b4(sys)?,
0xb5 => self.cycle_op_b5(sys)?,
0xb6 => self.cycle_op_b6(sys)?,
0xb7 => self.op_b7(sys)?,
0xb8 => self.op_b8(sys)?,
0xb9 => self.cycle_op_b9(sys)?,
0xba => self.op_ba(sys)?,
0xbb => self.op_bb(sys)?,
0xbc => self.cycle_op_bc(sys)?,
0xbd => self.cycle_op_bd(sys)?,
0xbe => self.cycle_op_be(sys)?,
0xbf => self.op_bf(sys)?,
0xc0 => self.op_c0(sys)?,
0xc1 => self.cycle_op_c1(sys)?,
0xc2 => self.op_c2(sys)?,
0xc3 => self.op_c3(sys)?,
0xc4 => self.cycle_op_c4(sys)?,
0xc5 => self.cycle_op_c5(sys)?,
0xc6 => self.cycle_op_c6(sys)?,
0xc7 => self.op_c7(sys)?,
0xc8 => self.op_c8(sys)?,
0xc9 => self.op_c9(sys)?,
0xca => self.op_ca(sys)?,
0xcb => self.op_cb(sys)?,
0xcc => self.cycle_op_cc(sys)?,
0xcd => self.cycle_op_cd(sys)?,
0xce => self.cycle_op_ce(sys)?,
0xcf => self.op_cf(sys)?,
0xd0 => self.cycle_op_d0(sys)?,
0xd1 => self.cycle_op_d1(sys)?,
0xd2 => self.cycle_op_d2(sys)?,
0xd3 => self.op_d3(sys)?,
0xd4 => self.cycle_op_d4(sys)?,
0xd5 => self.cycle_op_d5(sys)?,
0xd6 => self.cycle_op_d6(sys)?,
0xd7 => self.op_d7(sys)?,
0xd8 => self.op_d8(sys)?,
0xd9 => self.cycle_op_d9(sys)?,
0xda => self.cycle_op_da(sys)?,
0xdb => self.op_db(sys)?,
0xdc => self.cycle_op_dc(sys)?,
0xdd => self.cycle_op_dd(sys)?,
0xde => self.cycle_op_de(sys)?,
0xdf => self.op_df(sys)?,
0xe0 => self.op_e0(sys)?,
0xe1 => self.cycle_op_e1(sys)?,
0xe2 => self.op_e2(sys)?,
0xe3 => self.op_e3(sys)?,
0xe4 => self.cycle_op_e4(sys)?,
0xe5 => self.cycle_op_e5(sys)?,
0xe6 => self.cycle_op_e6(sys)?,
0xe7 => self.op_e7(sys)?,
0xe8 => self.op_e8(sys)?,
0xe9 => self.cycle_op_e9(sys)?,
0xea => self.op_ea(sys)?,
0xeb => self.op_eb(sys)?,
0xec => self.cycle_op_ec(sys)?,
0xed => self.cycle_op_ed(sys)?,
0xee => self.cycle_op_ee(sys)?,
0xef => self.op_ef(sys)?,
0xf0 => self.cycle_op_f0(sys)?,
0xf1 => self.cycle_op_f1(sys)?,
0xf2 => self.cycle_op_f2(sys)?,
0xf3 => self.op_f3(sys)?,
0xf4 => self.cycle_op_f4(sys)?,
0xf5 => self.cycle_op_f5(sys)?,
0xf6 => self.cycle_op_f6(sys)?,
0xf7 => self.op_f7(sys)?,
0xf8 => self.op_f8(sys)?,
0xf9 => self.cycle_op_f9(sys)?,
0xfa => self.cycle_op_fa(sys)?,
0xfb => self.op_fb(sys)?,
0xfc => self.cycle_op_fc(sys)?,
0xfd => self.cycle_op_fd(sys)?,
0xfe => self.cycle_op_fe(sys)?,
0xff => self.op_ff(sys)?,
_ => unreachable!(),
}
Some(())
}
}
| cycle_op_76 |
Alexnet_to_onnx.py | import torch
from inference_Alexnet import AlexNet
def | ():
pytorch_model = AlexNet()
pytorch_model.load_state_dict(torch.load('cifar100_Alexnet.pt'))
pytorch_model.eval()
dummy_input = torch.zeros(128*128*4)
torch.onnx.export(pytorch_model, dummy_input, 'cifar100_Alexnet.onnx', verbose=True)
if __name__ == '__main__':
main()
| main |
fr_GN.go | package fr_GN
import (
"math"
"strconv"
"time"
"package/locales"
"package/locales/currency"
)
type fr_GN struct {
locale string
pluralsCardinal []locales.PluralRule
pluralsOrdinal []locales.PluralRule
pluralsRange []locales.PluralRule
decimal string
group string
minus string
percent string
percentSuffix string
perMille string
timeSeparator string
inifinity string
currencies []string // idx = enum of currency code
currencyPositiveSuffix string
currencyNegativePrefix string
currencyNegativeSuffix string
monthsAbbreviated []string
monthsNarrow []string
monthsWide []string
daysAbbreviated []string
daysNarrow []string
daysShort []string
daysWide []string
periodsAbbreviated []string
periodsNarrow []string
periodsShort []string
periodsWide []string
erasAbbreviated []string
erasNarrow []string
erasWide []string
timezones map[string]string
}
// New returns a new instance of translator for the 'fr_GN' locale
func New() locales.Translator {
return &fr_GN{
locale: "fr_GN",
pluralsCardinal: []locales.PluralRule{2, 6},
pluralsOrdinal: []locales.PluralRule{2, 6},
pluralsRange: []locales.PluralRule{2, 6},
decimal: ",",
group: " ",
minus: "-",
percent: "%",
perMille: "‰",
timeSeparator: ":",
inifinity: "∞",
currencies: []string{"ADP", "AED", "AFA", "AFN", "ALK", "ALL", "AMD", "ANG", "AOA", "AOK", "AON", "AOR", "ARA", "ARL", "ARM", "ARP", "ARS", "ATS", "AUD", "AWG", "AZM", "AZN", "BAD", "BAM", "BAN", "BBD", "BDT", "BEC", "BEF", "BEL", "BGL", "BGM", "BGN", "BGO", "BHD", "BIF", "BMD", "BND", "BOB", "BOL", "BOP", "BOV", "BRB", "BRC", "BRE", "BRL", "BRN", "BRR", "BRZ", "BSD", "BTN", "BUK", "BWP", "BYB", "BYN", "BYR", "BZD", "CAD", "CDF", "CHE", "CHF", "CHW", "CLE", "CLF", "CLP", "CNH", "CNX", "CNY", "COP", "COU", "CRC", "CSD", "CSK", "CUC", "CUP", "CVE", "CYP", "CZK", "DDM", "DEM", "DJF", "DKK", "DOP", "DZD", "ECS", "ECV", "EEK", "EGP", "ERN", "ESA", "ESB", "ESP", "ETB", "EUR", "FIM", "FJD", "FKP", "FRF", "GBP", "GEK", "GEL", "GHC", "GHS", "GIP", "GMD", "FG", "GNS", "GQE", "GRD", "GTQ", "GWE", "GWP", "GYD", "HKD", "HNL", "HRD", "HRK", "HTG", "HUF", "IDR", "IEP", "ILP", "ILR", "ILS", "INR", "IQD", "IRR", "ISJ", "ISK", "ITL", "JMD", "JOD", "JPY", "KES", "KGS", "KHR", "KMF", "KPW", "KRH", "KRO", "KRW", "KWD", "KYD", "KZT", "LAK", "LBP", "LKR", "LRD", "LSL", "LTL", "LTT", "LUC", "LUF", "LUL", "LVL", "LVR", "LYD", "MAD", "MAF", "MCF", "MDC", "MDL", "MGA", "MGF", "MKD", "MKN", "MLF", "MMK", "MNT", "MOP", "MRO", "MTL", "MTP", "MUR", "MVP", "MVR", "MWK", "MXN", "MXP", "MXV", "MYR", "MZE", "MZM", "MZN", "NAD", "NGN", "NIC", "NIO", "NLG", "NOK", "NPR", "NZD", "OMR", "PAB", "PEI", "PEN", "PES", "PGK", "PHP", "PKR", "PLN", "PLZ", "PTE", "PYG", "QAR", "RHD", "ROL", "RON", "RSD", "RUB", "RUR", "RWF", "SAR", "SBD", "SCR", "SDD", "SDG", "SDP", "SEK", "SGD", "SHP", "SIT", "SKK", "SLL", "SOS", "SRD", "SRG", "SSP", "STD", "STN", "SUR", "SVC", "SYP", "SZL", "THB", "TJR", "TJS", "TMM", "TMT", "TND", "TOP", "TPE", "TRL", "TRY", "TTD", "TWD", "TZS", "UAH", "UAK", "UGS", "UGX", "USD", "USN", "USS", "UYI", "UYP", "UYU", "UZS", "VEB", "VEF", "VND", "VNN", "VUV", "WST", "XAF", "XAG", "XAU", "XBA", "XBB", "XBC", "XBD", "XCD", "XDR", "XEU", "XFO", "XFU", "XOF", "XPD", "XPF", "XPT", "XRE", "XSU", "XTS", "XUA", "XXX", "YDD", "YER", "YUD", "YUM", "YUN", "YUR", "ZAL", "ZAR", "ZMK", "ZMW", "ZRN", "ZRZ", "ZWD", "ZWL", "ZWR"},
percentSuffix: " ",
currencyPositiveSuffix: " ",
currencyNegativePrefix: "(",
currencyNegativeSuffix: " )",
monthsAbbreviated: []string{"", "janv.", "févr.", "mars", "avr.", "mai", "juin", "juil.", "août", "sept.", "oct.", "nov.", "déc."},
monthsNarrow: []string{"", "J", "F", "M", "A", "M", "J", "J", "A", "S", "O", "N", "D"},
monthsWide: []string{"", "janvier", "février", "mars", "avril", "mai", "juin", "juillet", "août", "septembre", "octobre", "novembre", "décembre"},
daysAbbreviated: []string{"dim.", "lun.", "mar.", "mer.", "jeu.", "ven.", "sam."},
daysNarrow: []string{"D", "L", "M", "M", "J", "V", "S"},
daysShort: []string{"di", "lu", "ma", "me", "je", "ve", "sa"},
daysWide: []string{"dimanche", "lundi", "mardi", "mercredi", "jeudi", "vendredi", "samedi"},
periodsAbbreviated: []string{"AM", "PM"},
periodsNarrow: []string{"AM", "PM"},
periodsWide: []string{"AM", "PM"},
erasAbbreviated: []string{"av. J.-C.", "ap. J.-C."},
erasNarrow: []string{"av. J.-C.", "ap. J.-C."},
erasWide: []string{"avant Jésus-Christ", "après Jésus-Christ"},
timezones: map[string]string{"CAT": "heure normale d’Afrique centrale", "COT": "heure normale de Colombie", "GMT": "heure moyenne de Greenwich", "HNCU": "heure normale de Cuba", "EDT": "heure d’été de l’Est", "HEOG": "heure d’été de l’Ouest du Groenland", "HKT": "heure normale de Hong Kong", "LHST": "heure normale de Lord Howe", "ART": "heure normale d’Argentine", "ARST": "heure d’été de l’Argentine", "HEPMX": "heure d’été du Pacifique mexicain", "HEPM": "heure d’été de Saint-Pierre-et-Miquelon", "TMST": "heure d’été du Turkménistan", "CHAST": "heure normale des îles Chatham", "CDT": "heure d’été du Centre", "WESZ": "heure d’été d’Europe de l’Ouest", "VET": "heure du Venezuela", "MST": "heure normale de Macao", "COST": "heure d’été de Colombie", "LHDT": "heure d’été de Lord Howe", "WART": "heure normale de l’Ouest argentin", "WIT": "heure de l’Est indonésien", "AWDT": "heure d’été de l’Ouest de l’Australie", "CHADT": "heure d’été des îles Chatham", "WIB": "heure de l’Ouest indonésien", "EST": "heure normale de l’Est nord-américain", "HNEG": "heure normale de l’Est du Groenland", "HKST": "heure d’été de Hong Kong", "MDT": "heure d’été de Macao", "OEZ": "heure normale d’Europe de l’Est", "PST": "heure normale du Pacifique nord-américain", "ACWST": "heure normale du centre-ouest de l’Australie", "HNOG": "heure normale de l’Ouest du Groenland", "EAT": "heure normale d’Afrique de l’Est", "UYT": "heure normale de l’Uruguay", "AEST": "heure normale de l’Est de l’Australie", "WEZ": "heure normale d’Europe de l’Ouest", "AKST": "heure normale de l’Alaska", "ACWDT": "heure d’été du centre-ouest de l’Australie", "WARST": "heure d’été de l’Ouest argentin", "HENOMX": "heure d’été du Nord-Ouest du Mexique", "TMT": "heure normale du Turkménistan", "HAST": "heure normale d’Hawaii - Aléoutiennes", "AST": "heure normale de l’Atlantique", "WAT": "heure normale d’Afrique de l’Ouest", "GYT": "heure du Guyana", "CST": "heure normale du centre nord-américain", "PDT": "heure d’été du Pacifique", "AWST": "heure normale de l’Ouest de l’Australie", "BT": "heure du Bhoutan", "NZST": "heure normale de la Nouvelle-Zélande", "GFT": "heure de la Guyane française", "WITA": "heure du Centre indonésien", "CLT": "heure normale du Chili", "HNPMX": "heure normale du Pacifique mexicain", "ADT": "heure d’été de l’Atlantique", "AEDT": "heure d’été de l’Est de l’Australie", "WAST": "heure d’été d’Afrique de l’Ouest", "MYT": "heure de la Malaisie", "BOT": "heure de Bolivie", "SGT": "heure de Singapour", "HNT": "heure normale de Terre-Neuve", "HECU": "heure d’été de Cuba", "JDT": "heure d’été du Japon", "ACDT": "heure d’été du centre de l’Australie", "IST": "heure de l’Inde", "HNNOMX": "heure normale du Nord-Ouest du Mexique", "SRT": "heure du Suriname", "OESZ": "heure d’été d’Europe de l’Est", "HADT": "heure d’été d’Hawaii - Aléoutiennes", "UYST": "heure d’été de l’Uruguay", "ChST": "heure des Chamorro", "AKDT": "heure d’été de l’Alaska", "MEZ": "heure normale d’Europe centrale", "MESZ": "heure d’été d’Europe centrale", "HNPM": "heure normale de Saint-Pierre-et-Miquelon", "NZDT": "heure d’été de la Nouvelle-Zélande", "ACST": "heure normale du centre de l’Australie", "HEEG": "heure d’été de l’Est du Groenland", "HAT": "heure d’été de Terre-Neuve", "JST": "heure normale du Japon", "ECT": "heure de l’Équateur", "CLST": "heure d’été du Chili", "∅∅∅": "heure d’été de l’Amazonie", "SAST": "heure normale d’Afrique méridionale"},
}
}
// Locale returns the current translators string locale
func (fr *fr_GN) Locale() string {
return fr.locale
}
// PluralsCardinal returns the list of cardinal plural rules associated with 'fr_GN'
func (fr *fr_GN) PluralsCardinal() []locales.PluralRule {
return fr.pluralsCardinal
}
// PluralsOrdinal returns the list of ordinal plural rules associated with 'fr_GN'
| // PluralsRange returns the list of range plural rules associated with 'fr_GN'
func (fr *fr_GN) PluralsRange() []locales.PluralRule {
return fr.pluralsRange
}
// CardinalPluralRule returns the cardinal PluralRule given 'num' and digits/precision of 'v' for 'fr_GN'
func (fr *fr_GN) CardinalPluralRule(num float64, v uint64) locales.PluralRule {
n := math.Abs(num)
i := int64(n)
if i == 0 || i == 1 {
return locales.PluralRuleOne
}
return locales.PluralRuleOther
}
// OrdinalPluralRule returns the ordinal PluralRule given 'num' and digits/precision of 'v' for 'fr_GN'
func (fr *fr_GN) OrdinalPluralRule(num float64, v uint64) locales.PluralRule {
n := math.Abs(num)
if n == 1 {
return locales.PluralRuleOne
}
return locales.PluralRuleOther
}
// RangePluralRule returns the ordinal PluralRule given 'num1', 'num2' and digits/precision of 'v1' and 'v2' for 'fr_GN'
func (fr *fr_GN) RangePluralRule(num1 float64, v1 uint64, num2 float64, v2 uint64) locales.PluralRule {
start := fr.CardinalPluralRule(num1, v1)
end := fr.CardinalPluralRule(num2, v2)
if start == locales.PluralRuleOne && end == locales.PluralRuleOne {
return locales.PluralRuleOne
} else if start == locales.PluralRuleOne && end == locales.PluralRuleOther {
return locales.PluralRuleOther
}
return locales.PluralRuleOther
}
// MonthAbbreviated returns the locales abbreviated month given the 'month' provided
func (fr *fr_GN) MonthAbbreviated(month time.Month) string {
return fr.monthsAbbreviated[month]
}
// MonthsAbbreviated returns the locales abbreviated months
func (fr *fr_GN) MonthsAbbreviated() []string {
return fr.monthsAbbreviated[1:]
}
// MonthNarrow returns the locales narrow month given the 'month' provided
func (fr *fr_GN) MonthNarrow(month time.Month) string {
return fr.monthsNarrow[month]
}
// MonthsNarrow returns the locales narrow months
func (fr *fr_GN) MonthsNarrow() []string {
return fr.monthsNarrow[1:]
}
// MonthWide returns the locales wide month given the 'month' provided
func (fr *fr_GN) MonthWide(month time.Month) string {
return fr.monthsWide[month]
}
// MonthsWide returns the locales wide months
func (fr *fr_GN) MonthsWide() []string {
return fr.monthsWide[1:]
}
// WeekdayAbbreviated returns the locales abbreviated weekday given the 'weekday' provided
func (fr *fr_GN) WeekdayAbbreviated(weekday time.Weekday) string {
return fr.daysAbbreviated[weekday]
}
// WeekdaysAbbreviated returns the locales abbreviated weekdays
func (fr *fr_GN) WeekdaysAbbreviated() []string {
return fr.daysAbbreviated
}
// WeekdayNarrow returns the locales narrow weekday given the 'weekday' provided
func (fr *fr_GN) WeekdayNarrow(weekday time.Weekday) string {
return fr.daysNarrow[weekday]
}
// WeekdaysNarrow returns the locales narrow weekdays
func (fr *fr_GN) WeekdaysNarrow() []string {
return fr.daysNarrow
}
// WeekdayShort returns the locales short weekday given the 'weekday' provided
func (fr *fr_GN) WeekdayShort(weekday time.Weekday) string {
return fr.daysShort[weekday]
}
// WeekdaysShort returns the locales short weekdays
func (fr *fr_GN) WeekdaysShort() []string {
return fr.daysShort
}
// WeekdayWide returns the locales wide weekday given the 'weekday' provided
func (fr *fr_GN) WeekdayWide(weekday time.Weekday) string {
return fr.daysWide[weekday]
}
// WeekdaysWide returns the locales wide weekdays
func (fr *fr_GN) WeekdaysWide() []string {
return fr.daysWide
}
// Decimal returns the decimal point of number
func (fr *fr_GN) Decimal() string {
return fr.decimal
}
// Group returns the group of number
func (fr *fr_GN) Group() string {
return fr.group
}
// Group returns the minus sign of number
func (fr *fr_GN) Minus() string {
return fr.minus
}
// FmtNumber returns 'num' with digits/precision of 'v' for 'fr_GN' and handles both Whole and Real numbers based on 'v'
func (fr *fr_GN) FmtNumber(num float64, v uint64) string {
s := strconv.FormatFloat(math.Abs(num), 'f', int(v), 64)
l := len(s) + 2 + 2*len(s[:len(s)-int(v)-1])/3
count := 0
inWhole := v == 0
b := make([]byte, 0, l)
for i := len(s) - 1; i >= 0; i-- {
if s[i] == '.' {
b = append(b, fr.decimal[0])
inWhole = true
continue
}
if inWhole {
if count == 3 {
for j := len(fr.group) - 1; j >= 0; j-- {
b = append(b, fr.group[j])
}
count = 1
} else {
count++
}
}
b = append(b, s[i])
}
if num < 0 {
b = append(b, fr.minus[0])
}
// reverse
for i, j := 0, len(b)-1; i < j; i, j = i+1, j-1 {
b[i], b[j] = b[j], b[i]
}
return string(b)
}
// FmtPercent returns 'num' with digits/precision of 'v' for 'fr_GN' and handles both Whole and Real numbers based on 'v'
// NOTE: 'num' passed into FmtPercent is assumed to be in percent already
func (fr *fr_GN) FmtPercent(num float64, v uint64) string {
s := strconv.FormatFloat(math.Abs(num), 'f', int(v), 64)
l := len(s) + 5
b := make([]byte, 0, l)
for i := len(s) - 1; i >= 0; i-- {
if s[i] == '.' {
b = append(b, fr.decimal[0])
continue
}
b = append(b, s[i])
}
if num < 0 {
b = append(b, fr.minus[0])
}
// reverse
for i, j := 0, len(b)-1; i < j; i, j = i+1, j-1 {
b[i], b[j] = b[j], b[i]
}
b = append(b, fr.percentSuffix...)
b = append(b, fr.percent...)
return string(b)
}
// FmtCurrency returns the currency representation of 'num' with digits/precision of 'v' for 'fr_GN'
func (fr *fr_GN) FmtCurrency(num float64, v uint64, currency currency.Type) string {
s := strconv.FormatFloat(math.Abs(num), 'f', int(v), 64)
symbol := fr.currencies[currency]
l := len(s) + len(symbol) + 4 + 2*len(s[:len(s)-int(v)-1])/3
count := 0
inWhole := v == 0
b := make([]byte, 0, l)
for i := len(s) - 1; i >= 0; i-- {
if s[i] == '.' {
b = append(b, fr.decimal[0])
inWhole = true
continue
}
if inWhole {
if count == 3 {
for j := len(fr.group) - 1; j >= 0; j-- {
b = append(b, fr.group[j])
}
count = 1
} else {
count++
}
}
b = append(b, s[i])
}
if num < 0 {
b = append(b, fr.minus[0])
}
// reverse
for i, j := 0, len(b)-1; i < j; i, j = i+1, j-1 {
b[i], b[j] = b[j], b[i]
}
if int(v) < 2 {
if v == 0 {
b = append(b, fr.decimal...)
}
for i := 0; i < 2-int(v); i++ {
b = append(b, '0')
}
}
b = append(b, fr.currencyPositiveSuffix...)
b = append(b, symbol...)
return string(b)
}
// FmtAccounting returns the currency representation of 'num' with digits/precision of 'v' for 'fr_GN'
// in accounting notation.
func (fr *fr_GN) FmtAccounting(num float64, v uint64, currency currency.Type) string {
s := strconv.FormatFloat(math.Abs(num), 'f', int(v), 64)
symbol := fr.currencies[currency]
l := len(s) + len(symbol) + 6 + 2*len(s[:len(s)-int(v)-1])/3
count := 0
inWhole := v == 0
b := make([]byte, 0, l)
for i := len(s) - 1; i >= 0; i-- {
if s[i] == '.' {
b = append(b, fr.decimal[0])
inWhole = true
continue
}
if inWhole {
if count == 3 {
for j := len(fr.group) - 1; j >= 0; j-- {
b = append(b, fr.group[j])
}
count = 1
} else {
count++
}
}
b = append(b, s[i])
}
if num < 0 {
b = append(b, fr.currencyNegativePrefix[0])
}
// reverse
for i, j := 0, len(b)-1; i < j; i, j = i+1, j-1 {
b[i], b[j] = b[j], b[i]
}
if int(v) < 2 {
if v == 0 {
b = append(b, fr.decimal...)
}
for i := 0; i < 2-int(v); i++ {
b = append(b, '0')
}
}
if num < 0 {
b = append(b, fr.currencyNegativeSuffix...)
b = append(b, symbol...)
} else {
b = append(b, fr.currencyPositiveSuffix...)
b = append(b, symbol...)
}
return string(b)
}
// FmtDateShort returns the short date representation of 't' for 'fr_GN'
func (fr *fr_GN) FmtDateShort(t time.Time) string {
b := make([]byte, 0, 32)
if t.Day() < 10 {
b = append(b, '0')
}
b = strconv.AppendInt(b, int64(t.Day()), 10)
b = append(b, []byte{0x2f}...)
if t.Month() < 10 {
b = append(b, '0')
}
b = strconv.AppendInt(b, int64(t.Month()), 10)
b = append(b, []byte{0x2f}...)
if t.Year() > 0 {
b = strconv.AppendInt(b, int64(t.Year()), 10)
} else {
b = strconv.AppendInt(b, int64(-t.Year()), 10)
}
return string(b)
}
// FmtDateMedium returns the medium date representation of 't' for 'fr_GN'
func (fr *fr_GN) FmtDateMedium(t time.Time) string {
b := make([]byte, 0, 32)
b = strconv.AppendInt(b, int64(t.Day()), 10)
b = append(b, []byte{0x20}...)
b = append(b, fr.monthsAbbreviated[t.Month()]...)
b = append(b, []byte{0x20}...)
if t.Year() > 0 {
b = strconv.AppendInt(b, int64(t.Year()), 10)
} else {
b = strconv.AppendInt(b, int64(-t.Year()), 10)
}
return string(b)
}
// FmtDateLong returns the long date representation of 't' for 'fr_GN'
func (fr *fr_GN) FmtDateLong(t time.Time) string {
b := make([]byte, 0, 32)
b = strconv.AppendInt(b, int64(t.Day()), 10)
b = append(b, []byte{0x20}...)
b = append(b, fr.monthsWide[t.Month()]...)
b = append(b, []byte{0x20}...)
if t.Year() > 0 {
b = strconv.AppendInt(b, int64(t.Year()), 10)
} else {
b = strconv.AppendInt(b, int64(-t.Year()), 10)
}
return string(b)
}
// FmtDateFull returns the full date representation of 't' for 'fr_GN'
func (fr *fr_GN) FmtDateFull(t time.Time) string {
b := make([]byte, 0, 32)
b = append(b, fr.daysWide[t.Weekday()]...)
b = append(b, []byte{0x20}...)
b = strconv.AppendInt(b, int64(t.Day()), 10)
b = append(b, []byte{0x20}...)
b = append(b, fr.monthsWide[t.Month()]...)
b = append(b, []byte{0x20}...)
if t.Year() > 0 {
b = strconv.AppendInt(b, int64(t.Year()), 10)
} else {
b = strconv.AppendInt(b, int64(-t.Year()), 10)
}
return string(b)
}
// FmtTimeShort returns the short time representation of 't' for 'fr_GN'
func (fr *fr_GN) FmtTimeShort(t time.Time) string {
b := make([]byte, 0, 32)
if t.Hour() < 10 {
b = append(b, '0')
}
b = strconv.AppendInt(b, int64(t.Hour()), 10)
b = append(b, fr.timeSeparator...)
if t.Minute() < 10 {
b = append(b, '0')
}
b = strconv.AppendInt(b, int64(t.Minute()), 10)
return string(b)
}
// FmtTimeMedium returns the medium time representation of 't' for 'fr_GN'
func (fr *fr_GN) FmtTimeMedium(t time.Time) string {
b := make([]byte, 0, 32)
if t.Hour() < 10 {
b = append(b, '0')
}
b = strconv.AppendInt(b, int64(t.Hour()), 10)
b = append(b, fr.timeSeparator...)
if t.Minute() < 10 {
b = append(b, '0')
}
b = strconv.AppendInt(b, int64(t.Minute()), 10)
b = append(b, fr.timeSeparator...)
if t.Second() < 10 {
b = append(b, '0')
}
b = strconv.AppendInt(b, int64(t.Second()), 10)
return string(b)
}
// FmtTimeLong returns the long time representation of 't' for 'fr_GN'
func (fr *fr_GN) FmtTimeLong(t time.Time) string {
b := make([]byte, 0, 32)
if t.Hour() < 10 {
b = append(b, '0')
}
b = strconv.AppendInt(b, int64(t.Hour()), 10)
b = append(b, fr.timeSeparator...)
if t.Minute() < 10 {
b = append(b, '0')
}
b = strconv.AppendInt(b, int64(t.Minute()), 10)
b = append(b, fr.timeSeparator...)
if t.Second() < 10 {
b = append(b, '0')
}
b = strconv.AppendInt(b, int64(t.Second()), 10)
b = append(b, []byte{0x20}...)
tz, _ := t.Zone()
b = append(b, tz...)
return string(b)
}
// FmtTimeFull returns the full time representation of 't' for 'fr_GN'
func (fr *fr_GN) FmtTimeFull(t time.Time) string {
b := make([]byte, 0, 32)
if t.Hour() < 10 {
b = append(b, '0')
}
b = strconv.AppendInt(b, int64(t.Hour()), 10)
b = append(b, fr.timeSeparator...)
if t.Minute() < 10 {
b = append(b, '0')
}
b = strconv.AppendInt(b, int64(t.Minute()), 10)
b = append(b, fr.timeSeparator...)
if t.Second() < 10 {
b = append(b, '0')
}
b = strconv.AppendInt(b, int64(t.Second()), 10)
b = append(b, []byte{0x20}...)
tz, _ := t.Zone()
if btz, ok := fr.timezones[tz]; ok {
b = append(b, btz...)
} else {
b = append(b, tz...)
}
return string(b)
} | func (fr *fr_GN) PluralsOrdinal() []locales.PluralRule {
return fr.pluralsOrdinal
}
|
server.js | const express = require('express');
const mongoose = require('mongoose');
const bodyParser = require('body-parser');
const passport = require('passport'); | // load configs
const config = require('./config/index');
// load routes
const users = require('./routes/api/users');
const profile = require('./routes/api/profile');
const posts = require('./routes/api/posts');
const app = express();
app.use(bodyParser.urlencoded({extended: false}));
app.use(bodyParser.json());
mongoose.connect(config.mongoURI)
.then(() => console.log('MongoDB connected.'))
.catch((error) => console.log(error));
app.use(passport.initialize());
require('./config/passport')(passport)
// use routes
app.use('/api/users', users);
app.use('/api/profile', profile);
app.use('/api/posts', posts);
app.get('/', (req, res) => res.send('Hello World!'));
const port = process.env.PORT || 5000;
app.listen(port, () => console.log(`Server running on port ${port}`)); | |
__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import traceback
import wx
import pygame
import util
import battle
import yadodb
import data
import dice
import effectmotion
import event
import eventhandler
import eventrelay
import features
import scenariodb
import setting
import skin
import animation
import thread
import header
import image
import imageretouch
import frame
import deck
import character
import effectbooster
import content
import xmlcreater
import bassplayer
import binary
import advlog
import update
import calculator
import dialog
import debug
import sprite
import argparser
# CWPyThread
cwpy = None
tempdir_init = u"Data/Temp/Global"
tempdir = tempdir_init
# アプリケーション情報
APP_VERSION = (4, "0a")
APP_NAME = "CardWirthPyLite"
# CardWirthの標準文字コード
if sys.platform == "win32":
MBCS = "mbcs"
else:
MBCS = "ms932"
# コール系イベントの再期限界回数
LIMIT_RECURSE = 10000
# サイズ
SIZE_SCR = (640, 480)
SIZE_GAME = (632, 453)
SIZE_AREA = (632, 420)
SIZE_CARDIMAGE = (74, 94)
SIZE_BOOK = (460, 280)
SIZE_BILL = (400, 370)
RECT_STATUSBAR = (0, 420, 632, 33)
# 対応するWSNデータバージョン
SUPPORTED_WSN = ("", "1", "2", "3", "4")
# 対応するスキンバージョン
SUPPORTED_SKIN = ("8", "9", "10", "11", "12")
# スケーリングされたイメージファイルを検索する時、以下のスケール値を使用する
SCALE_LIST = (2, 4, 8, 16)
# 特殊エリアのID
AREAS_SP = (-1, -2, -3, -4, -5)
AREAS_TRADE = (-1, -2, -5) # カード移動操作エリア
AREA_TRADE1 = -1 # カード移動操作エリア(宿・パーティなし時)
AREA_TRADE2 = -2 # カード移動操作エリア(宿・パーティロード中時)
AREA_TRADE3 = -5 # カード移動操作エリア(キャンプエリア)
AREA_BREAKUP = -3 # パーティ解散エリア
AREA_CAMP = -4 # キャンプエリア
AREAS_TITLE = (1,) # タイトル画面のエリア
AREAS_YADO = (-3, -2, -1, 1, 2, 3, 4) # 宿のエリア
# スキン固有エリアのID上下限
SKIN_AREAS_MIN = 10001
SKIN_AREAS_MAX = 20000
# カードポケットのインデックス
POCKET_SKILL = 0
POCKET_ITEM = 1
POCKET_BEAST = 2
# イベント用子コンテンツ特殊インデックス
IDX_TREEEND = -1
# 素材タイプ
M_IMG = 0
M_MSC = 1
M_SND = 2
# 対応拡張子
EXTS_IMG = (".bmp", ".jpg", ".jpeg", ".png", ".gif", ".pcx", ".tif", ".xpm")
EXTS_MSC = (".mid", ".midi", ".mp3", ".ogg")
EXTS_SND = (".wav", ".wave", ".ogg")
# 互換性マークのインデックス
HINT_MESSAGE = 0 # メッセージ表示時の話者(キャストまたはカード)
HINT_CARD = 1 # 使用中のカード
HINT_AREA = 2 # エリア・バトル・パッケージ
HINT_SCENARIO = 3 # シナリオ本体
# 標準のサウンドフォント
DEFAULT_SOUNDFONT = "Data/SoundFont/TimGM_A320_Mix.sf2"
# 表示レイヤ
LTYPE_MESSAGE = 1
LTYPE_BACKGROUND = 2
LTYPE_MCARDS = 3
LTYPE_PCARDS = 4
LTYPE_FCARDS = 0
LTYPE_SPMESSAGE = 1
LTYPE_SPMCARDS = 3
LAYER_SP_LAYER = 10000000000
LAYER_BACKGROUND = 0 # 背景
LAYER_SPBACKGROUND = 0x70000000 # 背景
LAYER_MCARDS = 100 # メニューカード・エネミーカード
LAYER_PCARDS = 200 # プレイヤーカード
LAYER_MCARDS_120 = 300 # CardWirth 1.20でのメニューカード(PCより手前に表示)
LAYER_FCARDS_T = 0x7fffffff # デバッグモードで表示される戦闘中の同行キャスト
LAYER_FCARDS = 1000 # 同行キャスト
# (layer, index, kind)
LAYER_BATTLE_START = (0x7fffffff, 0x7fffffff-3, 0x7fffffff, 0x7fffffff) # バトル開始カード
LAYER_FRONT_INUSECARD = (0x7fffffff, 0x7fffffff-2, 0x7fffffff, 0x7fffffff) # カーソル下のカードの使用カード
LAYER_TARGET_ARROW = (0x7fffffff, 0x7fffffff-1, 0x7fffffff, 0x7fffffff) # 対象選択の指マーク
# index=-1は背景セル
LAYER_MESSAGE = (1000, LTYPE_MESSAGE, 0, 0) # メッセージ
LAYER_SELECTIONBAR_1 = (1000, LTYPE_MESSAGE, 1, 0) # メッセージ選択肢
LAYER_SELECTIONBAR_2 = (1000, LTYPE_MESSAGE, 2, 0) # メッセージ選択肢(クリック中)
LAYER_SPMESSAGE = (LAYER_SP_LAYER+1000, LTYPE_SPMESSAGE, 0, 0) #特殊エリアのメッセージ
LAYER_SPSELECTIONBAR_1 = (LAYER_SP_LAYER+1000, LTYPE_MESSAGE, 1, 0) # 特殊エリアのメッセージ選択肢
LAYER_SPSELECTIONBAR_2 = (LAYER_SP_LAYER+1000, LTYPE_MESSAGE, 2, 0) # 特殊エリアのメッセージ選択肢(クリック中)
LAYER_TRANSITION = (0x7fffffff, 0x7fffffff, 0x7fffffff, 0x7fffffff) # 背景遷移用
LAYER_LOG_CURTAIN = (2000, 0, 0, 0) # ログ背景
LAYER_LOG = (2001, 0, 0, 0) # メッセージログ
LAYER_LOG_BAR = (2002, 0, 0, 0) # ログ選択肢
LAYER_LOG_PAGE = (2003, 0, 0, 0) # ログのページ
LAYER_LOG_SCROLLBAR = (2004, 0, 0, 0) # ログのスクロールバー
# ゲーム画面構築の拡大率
UP_SCR = 1
# ダイアログ描画時の拡大率(UP_SCRが1の時の値)
UP_WIN = 1
# ゲーム画面の拡大率
# フルスクリーン時にはダイアログを若干小さく表示するため、
# UP_WINとは異なる値になる
UP_WIN_M = 1
# wxPythonでイメージをスムージングしつつサイズ変更する際に用いるフラグ
if 3 <= wx.VERSION[0]:
RESCALE_QUALITY = wx.IMAGE_QUALITY_BILINEAR
else:
RESCALE_QUALITY = wx.IMAGE_QUALITY_HIGH
# プレイログの区切り線の長さ
LOG_SEPARATOR_LEN_LONG = 80
LOG_SEPARATOR_LEN_MIDDLE = 60
LOG_SEPARATOR_LEN_SHORT = 45
# 起動オプション
_argparser = argparser.ArgParser(appname=APP_NAME,
description=u"%s %s\n\nオープンソースのCardWirthエンジン" % (APP_NAME, ".".join(map(lambda a: str(a), APP_VERSION))))
_argparser.add_argument("-h", type=bool, nargs=0,
help=u"このメッセージを表示して終了します。", arg2="--help")
_argparser.add_argument("-debug", type=bool, nargs=0,
help=u"デバッグモードで起動します。")
_argparser.add_argument("-yado", type=str, nargs=1, default="",
help=u"起動と同時に<YADO>のパスにある宿を読み込みます。")
_argparser.add_argument("-party", type=str, nargs=1, default="",
help=u"起動と同時に<PARTY>のパスにあるパーティを読み込みます。\n"
+ u"-yadoと同時に指定しなかった場合は無視されます。")
_argparser.add_argument("-scenario", type=str, nargs=1, default="",
help=u"起動と同時に<SCENARIO>のパスにあるシナリオを開始します。\n"
+ u"-yado及び-partyと同時に指定しなかった場合は無視されます。")
OPTIONS = _argparser.parse_args(sys.argv[1:])
if OPTIONS.help:
_argparser.print_help()
sys.exit(0)
_encoding = sys.getfilesystemencoding()
OPTIONS.yado = OPTIONS.yado.decode(_encoding)
OPTIONS.party = OPTIONS.party.decode(_encoding)
OPTIONS.scenario = OPTIONS.scenario.decode(_encoding)
# 起動オプション(スキン自動生成元)
SKIN_CONV_ARGS = []
for arg in OPTIONS.leftovers:
if os.path.isfile(arg) and os.path.splitext(arg)[1].lower() == ".exe":
SKIN_CONV_ARGS.append(arg)
sys.argv.remove(arg)
def wins(num):
"""numを実際の表示サイズに変換する。
num: int or 座標(x,y) or 矩形(x,y,width,height)
or pygame.Surface or pygame.Bitmap or pygame.Image
"""
return _s_impl(num, UP_WIN)
def s(num):
"""numを描画サイズに変換する。
num: int or 座標(x,y) or 矩形(x,y,width,height)
or pygame.Surface or pygame.Bitmap or pygame.Image
"""
return _s_impl(num, UP_SCR)
def scr2win_s(num):
"""numを描画サイズから表示サイズに変換する。
num: int or 座標(x,y) or 矩形(x,y,width,height)
or pygame.Surface or pygame.Bitmap or pygame.Image
"""
if UP_WIN == UP_SCR:
return _s_impl(num, 1)
else:
return _s_impl(num, float(UP_WIN) / UP_SCR)
def win2scr_s(num):
"""numを表示サイズから描画サイズに変換する。
num: int or 座標(x,y) or 矩形(x,y,width,height)
or pygame.Surface or pygame.Bitmap or pygame.Image
"""
if UP_WIN == UP_SCR:
return _s_impl(num, 1)
else:
return _s_impl(num, float(UP_SCR) / UP_WIN)
def scr2mwin_s(num):
"""numを描画サイズから表示サイズに変換する。
num: int or 座標(x,y) or 矩形(x,y,width,height)
or pygame.Surface or pygame.Bitmap or pygame.Image
"""
if UP_WIN_M == UP_SCR:
return _s_impl(num, 1)
else:
return _s_impl(num, float(UP_WIN_M) / UP_SCR)
def mwin2scr_s(num):
"""numを表示サイズから描画サイズに変換する。
num: int or 座標(x,y) or 矩形(x,y,width,height)
or pygame.Surface or pygame.Bitmap or pygame.Image
"""
if UP_WIN_M == UP_SCR:
return _s_impl(num, 1)
else:
return _s_impl(num, float(UP_SCR) / UP_WIN_M)
def _s_impl(num, up_scr):
if isinstance(num, tuple) and len(num) == 3 and num[2] is None:
# スケール情報無し
return _s_impl(num[:2], up_scr)
if up_scr == 1 and not (isinstance(num, tuple) and len(num) == 3):
# 拡大率が1倍で、スケール情報も無い
if isinstance(num, tuple) and len(num) == 2:
if (isinstance(num[0], pygame.Surface) or\
isinstance(num[0], wx.Bitmap) or\
isinstance(num[0], wx.Image)):
# 画像はそのままのサイズで表示
return num[0]
# 座標等はそのまま返す
return num
if isinstance(num, int) or isinstance(num, float):
# 単純な数値(座標やサイズ)
return int(num * up_scr)
elif isinstance(num, pygame.Rect):
# pygameの矩形情報
if len(num) == 4:
x = int(num[0] * up_scr)
y = int(num[1] * up_scr)
w = int(num[2] * up_scr)
h = int(num[3] * up_scr)
return pygame.Rect(x, y, w, h)
elif isinstance(num, tuple):
if isinstance(num[0], pygame.Surface):
bmp = num[0]
if bmp.get_width() <= 0 or bmp.get_width() <= 0:
return bmp
return _s_impl(bmp, up_scr)
elif isinstance(num[0], wx.Image):
| if hasattr(bmp, "maskcolour") else None
scr_scale = bmp.scr_scale if hasattr(bmp, "scr_scale") else 1
up_scr /= scr_scale
if up_scr == 1:
return bmp
if bmp.GetWidth() <= 0 or bmp.GetHeight() <= 0:
return bmp
# wx.Bitmap
if bmpdepthis1:
img = util.convert_to_image(bmp)
else:
img = bmp.ConvertToImage()
result = _s_impl((img, num[1]), up_scr).ConvertToBitmap()
if bmpdepthis1:
result.bmpdepthis1 = bmpdepthis1
if maskcolour:
result.maskcolour = maskcolour
return result
elif len(num) == 4:
# 矩形
x = int(num[0] * up_scr)
y = int(num[1] * up_scr)
w = int(num[2] * up_scr)
h = int(num[3] * up_scr)
return (x, y, w, h)
elif len(num) == 2:
# 座標
x = int(num[0] * up_scr)
y = int(num[1] * up_scr)
return (x, y)
elif isinstance(num, pygame.Surface):
# スケール情報の無いpygame.Surface(単純拡大)
bmp0 = num
scr_scale = num.scr_scale if hasattr(num, "scr_scale") else 1
up_scr /= scr_scale
if up_scr == 1:
return num
w = int(num.get_width() * up_scr)
h = int(num.get_height() * up_scr)
if w <= 0 or h <= 0:
return num
size = (w, h)
if up_scr % 1 == 0:
result = pygame.transform.scale(num, size)
else:
if not (num.get_flags() & pygame.locals.SRCALPHA) and num.get_colorkey():
num = num.convert_alpha()
result = image.smoothscale(num, size)
if isinstance(bmp0, util.Depth1Surface):
result = util.Depth1Surface(result, scr_scale)
return result
elif isinstance(num, wx.Image):
# スケール情報の無いwx.Image(単純拡大)
bmpdepthis1 = hasattr(num, "bmpdepthis1")
maskcolour = num.maskcolour if hasattr(num, "maskcolour") else None
scr_scale = num.scr_scale if hasattr(num, "scr_scale") else 1
up_scr /= scr_scale
if up_scr == 1:
return num
w = int(num.GetWidth() * up_scr)
h = int(num.GetHeight() * up_scr)
if w <= 0 or h <= 0:
return num
if up_scr % 1 == 0 or bmpdepthis1:
result = num.Rescale(w, h, wx.IMAGE_QUALITY_NORMAL)
else:
if not num.HasAlpha():
num.InitAlpha()
result = num.Rescale(w, h, RESCALE_QUALITY)
if bmpdepthis1:
result.bmpdepthis1 = bmpdepthis1
if maskcolour:
result.maskcolour = maskcolour
return result
elif isinstance(num, wx.Bitmap):
# スケール情報の無いwx.Bitmap(単純拡大)
bmpdepthis1 = hasattr(num, "bmpdepthis1")
maskcolour = num.maskcolour if hasattr(num, "maskcolour") else None
scr_scale = num.scr_scale if hasattr(num, "scr_scale") else 1
up_scr /= scr_scale
if up_scr == 1:
return num
w = int(num.GetWidth() * up_scr)
h = int(num.GetHeight() * up_scr)
if w <= 0 or h <= 0:
return num
bmp = num
if bmpdepthis1:
img = util.convert_to_image(bmp)
else:
img = bmp.ConvertToImage()
img = _s_impl(img, up_scr)
result = img.ConvertToBitmap()
if bmpdepthis1:
result.bmpdepthis1 = bmpdepthis1
if maskcolour:
result.maskcolour = maskcolour
return result
return num
dpi_level = 1
def ppis(num):
return _s_impl(num, dpi_level)
def main():
pass
if __name__ == "__main__":
main()
| img = num[0]
if img.GetWidth() <= 0 or img.GetHeight() <= 0:
return img
return _s_impl(img, up_scr)
elif isinstance(num[0], wx.Bitmap):
bmp = num[0]
bmpdepthis1 = hasattr(bmp, "bmpdepthis1")
maskcolour = bmp.maskcolour |
jqc_helper.py | """
This module contains methods for opening jquery-confirm boxes.
These helper methods SHOULD NOT be called directly from tests.
"""
from seleniumbase.fixtures import constants
from seleniumbase.fixtures import js_utils
form_code = """'<form align="center" action="" class="jqc_form">' +
'<div class="form-group">' +
'<input style="font-size:20px; background-color: #f8fdfd; ' +
' width: 84%%; border: 1px solid blue; ' +
' box-shadow:inset 0 0 2px 2px #f4fafa;"' +
' type="text" class="jqc_input" />' +
'</div>' +
'</form>'"""
def jquery_confirm_button_dialog(driver, message, buttons, options=None):
|
def jquery_confirm_text_dialog(driver, message, button=None, options=None):
js_utils.activate_jquery_confirm(driver)
# These defaults will be overwritten later if set
theme = constants.JqueryConfirm.DEFAULT_THEME
border_color = constants.JqueryConfirm.DEFAULT_COLOR
width = constants.JqueryConfirm.DEFAULT_WIDTH
if not message:
message = ""
if button:
if not type(button) is list and not type(button) is tuple:
raise Exception('"button" should be a (text, color) tuple!')
if len(button) != 2:
raise Exception('"button" should be a (text, color) tuple!')
else:
button = ("Submit", "blue")
if options:
for option in options:
if option[0].lower() == "theme":
theme = option[1]
elif option[0].lower() == "color":
border_color = option[1]
elif option[0].lower() == "width":
width = option[1]
else:
raise Exception('Unknown option: "%s"' % option[0])
btn_text = button[0]
btn_color = button[1]
if not btn_color:
btn_color = "blue"
content = '<div></div><font color="#0066ee">%s</font>' % (message)
content = js_utils.escape_quotes_if_needed(content)
overlay_opacity = "0.32"
if theme.lower() == "supervan":
overlay_opacity = "0.56"
if theme.lower() == "bootstrap":
overlay_opacity = "0.64"
if theme.lower() == "modern":
overlay_opacity = "0.5"
if theme.lower() == "material":
overlay_opacity = "0.4"
jqcd = """jconfirm({
boxWidth: '%s',
useBootstrap: false,
containerFluid: true,
bgOpacity: %s,
type: '%s',
theme: '%s',
animationBounce: 1,
typeAnimated: true,
animation: 'scale',
draggable: true,
dragWindowGap: 1,
container: 'body',
title: '%s',
content: '<div></div>' +
%s,
buttons: {
formSubmit: {
btnClass: 'btn-%s',
text: '%s',
action: function () {
jqc_input = this.$content.find('.jqc_input').val();
$jqc_input = this.$content.find('.jqc_input').val();
jconfirm.lastInputText = jqc_input;
$jqc_status = '%s'; // There is only one button
},
},
},
onContentReady: function () {
var jc = this;
this.$content.find('form.jqc_form').on('submit', function (e) {
// User submits the form by pressing "Enter" in the field
e.preventDefault();
jc.$$formSubmit.trigger('click'); // Click the button
});
}
});""" % (
width,
overlay_opacity,
border_color,
theme,
content,
form_code,
btn_color,
btn_text,
btn_text,
)
driver.execute_script(jqcd)
def jquery_confirm_full_dialog(driver, message, buttons, options=None):
js_utils.activate_jquery_confirm(driver)
# These defaults will be overwritten later if set
theme = constants.JqueryConfirm.DEFAULT_THEME
border_color = constants.JqueryConfirm.DEFAULT_COLOR
width = constants.JqueryConfirm.DEFAULT_WIDTH
if not message:
message = ""
btn_count = 0
b_html = """button_%s: {
btnClass: 'btn-%s',
text: '%s',
action: function(){
jqc_input = this.$content.find('.jqc_input').val();
$jqc_input = this.$content.find('.jqc_input').val();
jconfirm.lastInputText = jqc_input;
$jqc_status = '%s';
}
},"""
b1_html = """formSubmit: {
btnClass: 'btn-%s',
text: '%s',
action: function(){
jqc_input = this.$content.find('.jqc_input').val();
$jqc_input = this.$content.find('.jqc_input').val();
jconfirm.lastInputText = jqc_input;
jqc_status = '%s';
$jqc_status = jqc_status;
jconfirm.lastButtonText = jqc_status;
}
},"""
one_button_trigger = ""
if len(buttons) == 1:
# If there's only one button, allow form submit with "Enter/Return"
one_button_trigger = "jc.$$formSubmit.trigger('click');"
all_buttons = ""
for button in buttons:
text = button[0]
text = js_utils.escape_quotes_if_needed(text)
color = button[1]
if not color:
color = "blue"
btn_count += 1
if len(buttons) == 1:
new_button = b1_html % (color, text, text)
else:
new_button = b_html % (btn_count, color, text, text)
all_buttons += new_button
if options:
for option in options:
if option[0].lower() == "theme":
theme = option[1]
elif option[0].lower() == "color":
border_color = option[1]
elif option[0].lower() == "width":
width = option[1]
else:
raise Exception('Unknown option: "%s"' % option[0])
content = '<div></div><font color="#0066ee">%s</font>' % (message)
content = js_utils.escape_quotes_if_needed(content)
overlay_opacity = "0.32"
if theme.lower() == "supervan":
overlay_opacity = "0.56"
if theme.lower() == "bootstrap":
overlay_opacity = "0.64"
if theme.lower() == "modern":
overlay_opacity = "0.5"
if theme.lower() == "material":
overlay_opacity = "0.4"
jqcd = """jconfirm({
boxWidth: '%s',
useBootstrap: false,
containerFluid: true,
bgOpacity: %s,
type: '%s',
theme: '%s',
animationBounce: 1,
typeAnimated: true,
animation: 'scale',
draggable: true,
dragWindowGap: 1,
container: 'body',
title: '%s',
content: '<div></div>' +
%s,
buttons: {
%s
},
onContentReady: function () {
var jc = this;
this.$content.find('form.jqc_form').on('submit', function (e) {
// User submits the form by pressing "Enter" in the field
e.preventDefault();
%s
});
}
});""" % (
width,
overlay_opacity,
border_color,
theme,
content,
form_code,
all_buttons,
one_button_trigger,
)
driver.execute_script(jqcd)
| js_utils.activate_jquery_confirm(driver)
# These defaults will be overwritten later if set
theme = constants.JqueryConfirm.DEFAULT_THEME
border_color = constants.JqueryConfirm.DEFAULT_COLOR
width = constants.JqueryConfirm.DEFAULT_WIDTH
if options:
for option in options:
if option[0].lower() == "theme":
theme = option[1]
elif option[0].lower() == "color":
border_color = option[1]
elif option[0].lower() == "width":
width = option[1]
else:
raise Exception('Unknown option: "%s"' % option[0])
if not message:
message = ""
key_row = ""
if len(buttons) == 1: # There's only one button as an option
key_row = "keys: ['enter', 'y', '1']," # Shortcut: "Enter","Y","1"
b_html = """button_%s: {
btnClass: 'btn-%s',
text: '<b>%s</b>',
%s
action: function(){
jqc_status = '%s';
$jqc_status = jqc_status;
jconfirm.lastButtonText = jqc_status;
}
},"""
all_buttons = ""
btn_count = 0
for button in buttons:
btn_count += 1
text = button[0]
text = js_utils.escape_quotes_if_needed(text)
if len(buttons) > 1 and text.lower() == "yes":
key_row = "keys: ['y'],"
if btn_count < 10:
key_row = "keys: ['y', '%s']," % btn_count
elif len(buttons) > 1 and text.lower() == "no":
key_row = "keys: ['n'],"
if btn_count < 10:
key_row = "keys: ['n', '%s']," % btn_count
elif len(buttons) > 1:
if btn_count < 10:
key_row = "keys: ['%s']," % btn_count
color = button[1]
if not color:
color = "blue"
new_button = b_html % (btn_count, color, text, key_row, text)
all_buttons += new_button
content = '<div></div><font color="#0066ee">%s</font>' % (message)
content = js_utils.escape_quotes_if_needed(content)
overlay_opacity = "0.32"
if theme.lower() == "supervan":
overlay_opacity = "0.56"
if theme.lower() == "bootstrap":
overlay_opacity = "0.64"
if theme.lower() == "modern":
overlay_opacity = "0.5"
if theme.lower() == "material":
overlay_opacity = "0.4"
jqcd = """jconfirm({
boxWidth: '%s',
useBootstrap: false,
containerFluid: true,
bgOpacity: %s,
type: '%s',
theme: '%s',
animationBounce: 1,
typeAnimated: true,
animation: 'scale',
draggable: true,
dragWindowGap: 1,
container: 'body',
title: '%s',
content: '<div></div>',
buttons: {
%s
}
});""" % (
width,
overlay_opacity,
border_color,
theme,
content,
all_buttons,
)
driver.execute_script(jqcd) |
thread_safe_cache.go | package utils
import "sync"
type ThreadSafeCache struct {
mutex sync.Mutex
cache map[string]*cacheEntry
}
type cacheEntry struct {
value interface{}
err error
}
func (c *ThreadSafeCache) Get(key string, f func() (interface{}, error)) (interface{}, error) {
c.mutex.Lock()
defer c.mutex.Unlock()
if c.cache == nil {
c.cache = make(map[string]*cacheEntry)
}
e, ok := c.cache[key]
if ok {
return e.value, e.err
}
e = &cacheEntry{}
e.value, e.err = f()
c.cache[key] = e
return e.value, e.err
}
|
func (c *ThreadSafeMultiCache) Get(cacheKey string, key string, f func() (interface{}, error)) (interface{}, error) {
cache, _ := c.cache.Get(cacheKey, func() (interface{}, error) {
return &ThreadSafeCache{}, nil
})
return cache.(*ThreadSafeCache).Get(key, f)
} | type ThreadSafeMultiCache struct {
cache ThreadSafeCache
} |
has-handle-value.chromium.test.ts | import * as path from 'path';
import { Browser, ElementHandle, chromium } from 'playwright';
import * as SUT from '../index';
describe('handle has value', (): void => {
let browser: Browser | undefined = undefined;
// eslint-disable-next-line @typescript-eslint/no-empty-function
beforeEach((): void => {});
afterEach(async (): Promise<void> => {
if (browser) {
await browser.close();
}
});
test('should return false when handle is undefined', async (): Promise<void> => {
// Given
const handle: ElementHandle<Element> | undefined = undefined;
// When
const result = await SUT.hasHandleValue(handle, 'foobar');
// Then
expect(result).toBe(false);
});
test('should return false when handle is null', async (): Promise<void> => {
// Given
const handle: ElementHandle<Element> | null = null;
// When
const result = await SUT.hasHandleValue(handle, 'foobar');
// Then
expect(result).toBe(false);
});
test('should return false when handle is null and expected value is empty', async (): Promise<void> => {
// Given
const handle: ElementHandle<Element> | null = null;
// When
const result = await SUT.hasHandleValue(handle, '');
// Then
expect(result).toBe(false);
});
test('should return false when handle is undefined and expected value is empty', async (): Promise<void> => {
// Given
const handle: ElementHandle<Element> | undefined = undefined;
// When
const result = await SUT.hasHandleValue(handle, '');
// Then
expect(result).toBe(false);
});
test('should return true when selector has value', async (): Promise<void> => {
// Given
browser = await chromium.launch({ headless: true });
const browserContext = await browser.newContext({ viewport: null });
const page = await browserContext.newPage();
const url = `file:${path.join(__dirname, 'has-handle-value.test.html')}`;
await page.goto(url);
const inputSelector = '#emptyInput';
await page.click(inputSelector);
await page.type(inputSelector, ' yo ');
| const result = await SUT.hasHandleValue(handle, 'yo');
// Then
expect(handle).toBeDefined();
expect(result).toBe(true);
});
test('should return false when selector has not the value', async (): Promise<void> => {
// Given
browser = await chromium.launch({ headless: true });
const browserContext = await browser.newContext({ viewport: null });
const page = await browserContext.newPage();
const url = `file:${path.join(__dirname, 'has-handle-value.test.html')}`;
await page.goto(url);
const inputSelector = '#emptyInput';
await page.click(inputSelector);
await page.type(inputSelector, ' yo ');
const handle = await page.$(inputSelector);
// When
const result = await SUT.hasHandleValue(handle, 'foobar');
// Then
expect(handle).toBeDefined();
expect(result).toBe(false);
});
test('should return true when selector has undefined value and expected is empty', async (): Promise<void> => {
// Given
browser = await chromium.launch({ headless: true });
const browserContext = await browser.newContext({ viewport: null });
const page = await browserContext.newPage();
const url = `file:${path.join(__dirname, 'has-handle-value.test.html')}`;
await page.goto(url);
const handle = await page.$('#withUndefinedValue');
// When
const result = await SUT.hasHandleValue(handle, '');
// Then
expect(handle).toBeDefined();
expect(result).toBe(true);
});
test('should return true when selector has empty value and expected is empty', async (): Promise<void> => {
// Given
browser = await chromium.launch({ headless: true });
const browserContext = await browser.newContext({ viewport: null });
const page = await browserContext.newPage();
const url = `file:${path.join(__dirname, 'has-handle-value.test.html')}`;
await page.goto(url);
const handle = await page.$('#emptyInput');
// When
const result = await SUT.hasHandleValue(handle, '');
// Then
expect(handle).toBeDefined();
expect(result).toBe(true);
});
test('should return true when selector has null value and expected is empty', async (): Promise<void> => {
// Given
browser = await chromium.launch({ headless: true });
const browserContext = await browser.newContext({ viewport: null });
const page = await browserContext.newPage();
const url = `file:${path.join(__dirname, 'has-handle-value.test.html')}`;
await page.goto(url);
const handle = await page.$('#withNullValue');
// When
const result = await SUT.hasHandleValue(handle, '');
// Then
expect(handle).toBeDefined();
expect(result).toBe(true);
});
}); | const handle = await page.$(inputSelector);
// When
|
test_meta.py | # Copyright (c) 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from murano.tests.unit.dsl.foundation import object_model as om
from murano.tests.unit.dsl.foundation import test_case
class TestMeta(test_case.DslTestCase):
def setUp(self):
super(TestMeta, self).setUp()
self._runner = self.new_runner(om.Object('metatests.TestMeta'))
def test_class_multi_meta(self):
|
def test_class_single_meta(self):
self.assertCountEqual(
[5, 6], self._runner.testClassSingleMeta())
def test_parent_class_not_inherited_meta(self):
self.assertEqual(3, self._runner.testParentClassNotInheritedMeta())
def test_method_meta(self):
self.assertCountEqual(
[7, 8, 9, 4, 1, 10], self._runner.testMethodMeta())
def test_method_argument_meta(self):
self.assertCountEqual(
[1, 2, 3], self._runner.testMethodArgumentMeta())
def test_inherited_property_meta(self):
self.assertEqual(
[1], self._runner.testInheritedPropertyMeta())
def test_overridden_property_meta(self):
self.assertCountEqual(
[1, 4], self._runner.testOverriddenPropertyMeta())
def test_package_meta(self):
self.assertEqual(
[], self._runner.testPackageMeta())
def test_complex_meta(self):
self.assertCountEqual([
[1, 'metatests.PropertyType'],
[2, 'metatests.PropertyType'],
[3, 'metatests.PropertyType2'],
[4, 'metatests.PropertyType'],
[5, 'metatests.PropertyType2']
], self._runner.testComplexMeta())
| self.assertCountEqual(
[4, 1, 111, 2], self._runner.testClassMultiMeta()) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.