text
stringlengths
2
1.04M
meta
dict
#endregion namespace Dynastream.Fit { /// <summary> /// Implements the profile TimerTrigger type as an enum /// </summary> public enum TimerTrigger : byte { Manual = 0, Auto = 1, FitnessEquipment = 2, Invalid = 0xFF } }
{ "content_hash": "0530caaff5c6131f5c79dbac84a21370", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 58, "avg_line_length": 16.555555555555557, "alnum_prop": 0.5201342281879194, "repo_name": "mikekov/CycleTrainer", "id": "1c34b09fe6e8d1b86375df6fd4e42c99de20258d", "size": "1279", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "FIT/Dynastream/Fit/Profile/Types/TimerTrigger.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "1281323" } ], "symlink_target": "" }
package net.arnx.wmf2svg.gdi.svg; import net.arnx.wmf2svg.gdi.GdiPalette; class SvgPalette extends SvgObject implements GdiPalette { private int version; private int[] entries; public SvgPalette( SvgGdi gdi, int version, int[] entries) { super(gdi); this.version = version; this.entries = entries; } public int getVersion() { return version; } public int[] getEntries() { return entries; } }
{ "content_hash": "0613500db2c67ba37c1f892be339540a", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 59, "avg_line_length": 18, "alnum_prop": 0.6622222222222223, "repo_name": "hidekatsu-izuno/wmf2svg", "id": "b607383e3c186783f1485a100e72c0169204376f", "size": "450", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/main/java/net/arnx/wmf2svg/gdi/svg/SvgPalette.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "834" }, { "name": "Java", "bytes": "205691" } ], "symlink_target": "" }
FROM balenalib/intel-nuc-ubuntu:eoan-run # remove several traces of debian python RUN apt-get purge -y python.* # http://bugs.python.org/issue19846 # > At the moment, setting "LANG=C" on a Linux system *fundamentally breaks Python 3*, and that's not OK. ENV LANG C.UTF-8 # install python dependencies RUN apt-get update && apt-get install -y --no-install-recommends \ ca-certificates \ netbase \ && rm -rf /var/lib/apt/lists/* # key 63C7CC90: public key "Simon McVittie <[email protected]>" imported # key 3372DCFA: public key "Donald Stufft (dstufft) <[email protected]>" imported RUN gpg --keyserver keyring.debian.org --recv-keys 4DE8FF2A63C7CC90 \ && gpg --keyserver keyserver.ubuntu.com --recv-key 6E3CBCE93372DCFA \ && gpg --keyserver keyserver.ubuntu.com --recv-keys 0x52a43a1e4b77b059 ENV PYTHON_VERSION 3.6.12 # if this is called "PIP_VERSION", pip explodes with "ValueError: invalid truth value '<VERSION>'" ENV PYTHON_PIP_VERSION 21.0.1 ENV SETUPTOOLS_VERSION 56.0.0 RUN set -x \ && buildDeps=' \ curl \ ' \ && apt-get update && apt-get install -y $buildDeps --no-install-recommends && rm -rf /var/lib/apt/lists/* \ && curl -SLO "http://resin-packages.s3.amazonaws.com/python/v$PYTHON_VERSION/Python-$PYTHON_VERSION.linux-amd64-openssl1.1.tar.gz" \ && echo "c811b37dfb62442ccf23f28ca81e5a48eb85b071a58ee69b278f25520196cb2e Python-$PYTHON_VERSION.linux-amd64-openssl1.1.tar.gz" | sha256sum -c - \ && tar -xzf "Python-$PYTHON_VERSION.linux-amd64-openssl1.1.tar.gz" --strip-components=1 \ && rm -rf "Python-$PYTHON_VERSION.linux-amd64-openssl1.1.tar.gz" \ && ldconfig \ && if [ ! -e /usr/local/bin/pip3 ]; then : \ && curl -SLO "https://raw.githubusercontent.com/pypa/get-pip/430ba37776ae2ad89f794c7a43b90dc23bac334c/get-pip.py" \ && echo "19dae841a150c86e2a09d475b5eb0602861f2a5b7761ec268049a662dbd2bd0c get-pip.py" | sha256sum -c - \ && python3 get-pip.py \ && rm get-pip.py \ ; fi \ && pip3 install --no-cache-dir --upgrade --force-reinstall pip=="$PYTHON_PIP_VERSION" setuptools=="$SETUPTOOLS_VERSION" \ && find /usr/local \ \( -type d -a -name test -o -name tests \) \ -o \( -type f -a -name '*.pyc' -o -name '*.pyo' \) \ -exec rm -rf '{}' + \ && cd / \ && rm -rf /usr/src/python ~/.cache # make some useful symlinks that are expected to exist RUN cd /usr/local/bin \ && ln -sf pip3 pip \ && { [ -e easy_install ] || ln -s easy_install-* easy_install; } \ && ln -sf idle3 idle \ && ln -sf pydoc3 pydoc \ && ln -sf python3 python \ && ln -sf python3-config python-config # set PYTHONPATH to point to dist-packages ENV PYTHONPATH /usr/lib/python3/dist-packages:$PYTHONPATH CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"] RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \ && echo "Running test-stack@python" \ && chmod +x [email protected] \ && bash [email protected] \ && rm -rf [email protected] RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: Intel 64-bit (x86-64) \nOS: Ubuntu eoan \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nPython v3.6.12, Pip v21.0.1, Setuptools v56.0.0 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \ && chmod +x /bin/sh-shim \ && cp /bin/sh /bin/sh.real \ && mv /bin/sh-shim /bin/sh
{ "content_hash": "68080ff0e7f901f6de7c17cbe0df5736", "timestamp": "", "source": "github", "line_count": 78, "max_line_length": 722, "avg_line_length": 51.94871794871795, "alnum_prop": 0.7070582428430404, "repo_name": "nghiant2710/base-images", "id": "57f6a8355c0dde99c055bb4ad7abde93fd89195e", "size": "4073", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "balena-base-images/python/intel-nuc/ubuntu/eoan/3.6.12/run/Dockerfile", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "144558581" }, { "name": "JavaScript", "bytes": "16316" }, { "name": "Shell", "bytes": "368690" } ], "symlink_target": "" }
package net.glowstone.entity; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.flowpowered.network.Message; import java.util.LinkedList; import java.util.List; import java.util.Random; import java.util.Set; import java.util.UUID; import java.util.concurrent.ThreadLocalRandom; import lombok.Getter; import lombok.Setter; import net.glowstone.EventFactory; import net.glowstone.entity.meta.MetadataIndex; import net.glowstone.entity.meta.profile.GlowPlayerProfile; import net.glowstone.entity.objects.GlowItem; import net.glowstone.inventory.ArmorConstants; import net.glowstone.inventory.EquipmentMonitor; import net.glowstone.inventory.GlowCraftingInventory; import net.glowstone.inventory.GlowEnchantingInventory; import net.glowstone.inventory.GlowInventory; import net.glowstone.inventory.GlowInventoryView; import net.glowstone.inventory.GlowPlayerInventory; import net.glowstone.io.entity.EntityStorage; import net.glowstone.net.message.play.entity.EntityEquipmentMessage; import net.glowstone.net.message.play.entity.EntityHeadRotationMessage; import net.glowstone.net.message.play.entity.SpawnPlayerMessage; import net.glowstone.util.InventoryUtil; import net.glowstone.util.Position; import net.glowstone.util.UuidUtils; import net.glowstone.util.nbt.CompoundTag; import org.bukkit.GameMode; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.block.Sign; import org.bukkit.entity.Entity; import org.bukkit.entity.HumanEntity; import org.bukkit.event.entity.EntityDamageEvent.DamageCause; import org.bukkit.event.inventory.InventoryCloseEvent; import org.bukkit.event.inventory.InventoryType; import org.bukkit.event.inventory.InventoryType.SlotType; import org.bukkit.inventory.EntityEquipment; import org.bukkit.inventory.Inventory; import org.bukkit.inventory.InventoryView; import org.bukkit.inventory.InventoryView.Property; import org.bukkit.inventory.ItemStack; import org.bukkit.permissions.PermissibleBase; import org.bukkit.permissions.Permission; import org.bukkit.permissions.PermissionAttachment; import org.bukkit.permissions.PermissionAttachmentInfo; import org.bukkit.plugin.Plugin; import org.bukkit.util.Vector; /** * Represents a human entity, such as an NPC or a player. */ public abstract class GlowHumanEntity extends GlowLivingEntity implements HumanEntity { /** * The player profile with name and UUID information. */ @Getter private final GlowPlayerProfile profile; /** * The inventory of this human. */ @Getter private final GlowPlayerInventory inventory = new GlowPlayerInventory(this); /** * The ender chest inventory of this human. */ @Getter private final GlowInventory enderChest = new GlowInventory(this, InventoryType.ENDER_CHEST); /** * Whether this human is sleeping or not. */ @Getter protected boolean sleeping; /** * This human's PermissibleBase for permissions. */ protected PermissibleBase permissions; /** * The item the player has on their cursor. */ @Getter @Setter private ItemStack itemOnCursor; /** * How long this human has been sleeping. */ @Getter private int sleepTicks; /** * Whether this human is considered an op. */ @Getter private boolean op; /** * The player's active game mode. */ @Getter @Setter private GameMode gameMode; /** * The player's currently open inventory. */ @Getter private InventoryView openInventory; /** * The player's xpSeed. Used for calculation of enchantments. */ @Getter @Setter private int xpSeed; /** * Whether the client needs to be notified of armor changes (set to true after joining). */ private boolean needsArmorUpdate = false; /** * Creates a human within the specified world and with the specified name. * * @param location The location. * @param profile The human's profile with name and UUID information. */ public GlowHumanEntity(Location location, GlowPlayerProfile profile) { super(location); this.profile = profile; xpSeed = new Random().nextInt(); //TODO: use entity's random instance permissions = new PermissibleBase(this); gameMode = server.getDefaultGameMode(); openInventory = new GlowInventoryView(this); addViewer(openInventory.getTopInventory()); addViewer(openInventory.getBottomInventory()); } //////////////////////////////////////////////////////////////////////////// // Internals @Override public List<Message> createSpawnMessage() { List<Message> result = new LinkedList<>(); // spawn player double x = location.getX(); double y = location.getY(); double z = location.getZ(); int yaw = Position.getIntYaw(location); int pitch = Position.getIntPitch(location); result.add(new SpawnPlayerMessage(entityId, profile.getId(), x, y, z, yaw, pitch, metadata.getEntryList())); // head facing result.add(new EntityHeadRotationMessage(entityId, yaw)); // equipment EntityEquipment equipment = getEquipment(); result.add(new EntityEquipmentMessage(entityId, EntityEquipmentMessage.HELD_ITEM, equipment .getItemInMainHand())); result.add(new EntityEquipmentMessage(entityId, EntityEquipmentMessage.OFF_HAND, equipment .getItemInOffHand())); for (int i = 0; i < 4; i++) { result.add(new EntityEquipmentMessage(entityId, EntityEquipmentMessage.BOOTS_SLOT + i, equipment.getArmorContents()[i])); } return result; } @Override public void pulse() { super.pulse(); if (sleeping) { ++sleepTicks; } else { sleepTicks = 0; } processArmorChanges(); } /** * Process changes to the human enitity's armor, and update the entity's armor attributes * accordingly. */ private void processArmorChanges() { GlowPlayer player = null; if (this instanceof GlowPlayer) { player = ((GlowPlayer) this); } boolean armorUpdate = false; List<EquipmentMonitor.Entry> armorChanges = getEquipmentMonitor().getArmorChanges(); if (armorChanges.size() > 0) { for (EquipmentMonitor.Entry entry : armorChanges) { if (player != null && needsArmorUpdate) { player.getSession().send(new EntityEquipmentMessage(0, entry.slot, entry.item)); } armorUpdate = true; } } if (armorUpdate) { getAttributeManager().setProperty(AttributeManager.Key.KEY_ARMOR, ArmorConstants.getDefense(getEquipment().getArmorContents())); getAttributeManager().setProperty(AttributeManager.Key.KEY_ARMOR_TOUGHNESS, ArmorConstants.getToughness(getEquipment().getArmorContents())); } needsArmorUpdate = true; } @Override public String getName() { return profile.getName(); } //////////////////////////////////////////////////////////////////////////// // Properties @Override public UUID getUniqueId() { return profile.getId(); } @Override public void setUniqueId(UUID uuid) { // silently allow setting the same UUID again if (!profile.getId().equals(uuid)) { throw new IllegalStateException( "UUID of " + this + " is already " + UuidUtils.toString(profile.getId())); } } @Override public boolean isBlocking() { return false; } @Override public int getExpToLevel() { throw new UnsupportedOperationException("Non-player HumanEntity has no level"); } @Override public EntityEquipment getEquipment() { return getInventory(); } @Override public void setFireTicks(int ticks) { if (gameMode == GameMode.SURVIVAL || gameMode == GameMode.ADVENTURE) { super.setFireTicks(ticks); } } //////////////////////////////////////////////////////////////////////////// // Permissions @Override public boolean isPermissionSet(String name) { return permissions.isPermissionSet(name); } @Override public boolean isPermissionSet(Permission perm) { return permissions.isPermissionSet(perm); } @Override public boolean hasPermission(String name) { return permissions.hasPermission(name); } @Override public boolean hasPermission(Permission perm) { return permissions.hasPermission(perm); } @Override public PermissionAttachment addAttachment(Plugin plugin) { return permissions.addAttachment(plugin); } @Override public PermissionAttachment addAttachment(Plugin plugin, int ticks) { return permissions.addAttachment(plugin, ticks); } @Override public PermissionAttachment addAttachment(Plugin plugin, String name, boolean value) { return permissions.addAttachment(plugin, name, value); } @Override public PermissionAttachment addAttachment(Plugin plugin, String name, boolean value, int ticks) { return permissions.addAttachment(plugin, name, value, ticks); } @Override public void removeAttachment(PermissionAttachment attachment) { permissions.removeAttachment(attachment); } @Override public void recalculatePermissions() { permissions.recalculatePermissions(); } @Override public Set<PermissionAttachmentInfo> getEffectivePermissions() { return permissions.getEffectivePermissions(); } @Override public void setOp(boolean value) { op = value; recalculatePermissions(); } //////////////////////////////////////////////////////////////////////////// // Health @Override public boolean canTakeDamage(DamageCause damageCause) { return (damageCause == DamageCause.VOID || damageCause == DamageCause.SUICIDE || gameMode == GameMode.SURVIVAL || gameMode == GameMode.ADVENTURE) && super .canTakeDamage(damageCause); } //////////////////////////////////////////////////////////////////////////// // Inventory @Override public ItemStack getItemInHand() { return getInventory().getItemInMainHand(); } @Override public void setItemInHand(ItemStack item) { getInventory().setItemInMainHand(item); } @Override public boolean setWindowProperty(Property prop, int value) { // nb: does not actually send anything return prop.getType() == openInventory.getType(); } @Override public InventoryView openInventory(Inventory inventory) { InventoryView view = new GlowInventoryView(this, inventory); openInventory(view); return view; } @Override public void openInventory(InventoryView inventory) { checkNotNull(inventory); this.inventory.getDragTracker().reset(); // stop viewing the old inventory and start viewing the new one removeViewer(openInventory.getTopInventory()); removeViewer(openInventory.getBottomInventory()); openInventory = inventory; addViewer(openInventory.getTopInventory()); addViewer(openInventory.getBottomInventory()); } @Override public InventoryView openWorkbench(Location location, boolean force) { if (location == null) { location = getLocation(); } if (!force && location.getBlock().getType() != Material.WORKBENCH) { return null; } return openInventory(new GlowCraftingInventory(this, InventoryType.WORKBENCH)); } @Override public InventoryView openEnchanting(Location location, boolean force) { if (location == null) { location = getLocation(); } if (!force && location.getBlock().getType() != Material.ENCHANTMENT_TABLE) { return null; } return openInventory(new GlowEnchantingInventory(location, (GlowPlayer) this)); } @Override public void closeInventory() { EventFactory.getInstance().callEvent(new InventoryCloseEvent(openInventory)); if (getGameMode() != GameMode.CREATIVE) { if (!InventoryUtil.isEmpty(getItemOnCursor())) { drop(getItemOnCursor()); } handleUnusedInputs(); } setItemOnCursor(InventoryUtil.createEmptyStack()); resetInventoryView(); } @Override public void closeInventory(InventoryCloseEvent.Reason reason) { // TODO: use reason? closeInventory(); } // Drop items left in crafting area. private void handleUnusedInputs() { for (int i = 0; i < getTopInventory().getSlots().size(); i++) { ItemStack itemStack = getOpenInventory().getItem(i); if (InventoryUtil.isEmpty(itemStack)) { continue; } if (isDroppableCraftingSlot(i)) { getOpenInventory().getBottomInventory().addItem(itemStack); getOpenInventory().getTopInventory().setItem(i, InventoryUtil.createEmptyStack()); } } } private boolean isDroppableCraftingSlot(int i) { if (getTopInventory().getSlot(i).getType() == SlotType.CRAFTING) { switch (getTopInventory().getType()) { case BREWING: case FURNACE: return false; default: return true; } } else { return false; } } private GlowInventory getTopInventory() { return (GlowInventory) getOpenInventory().getTopInventory(); } void resetInventoryView() { openInventory(new GlowInventoryView(this)); } private void addViewer(Inventory inventory) { if (inventory instanceof GlowInventory) { ((GlowInventory) inventory).addViewer(this); } } private void removeViewer(Inventory inventory) { if (inventory instanceof GlowInventory) { ((GlowInventory) inventory).removeViewer(this); } } /** * Drops the item this entity currently has in its hands and remove the item from the * HumanEntity's inventory. * * @param wholeStack True if the whole stack should be dropped */ public void dropItemInHand(boolean wholeStack) { ItemStack stack = getItemInHand(); if (InventoryUtil.isEmpty(stack)) { return; } ItemStack dropping = stack.clone(); if (!wholeStack) { dropping.setAmount(1); } GlowItem dropped = drop(dropping); if (dropped == null) { return; } if (stack.getAmount() == 1 || wholeStack) { setItemInHand(InventoryUtil.createEmptyStack()); } else { ItemStack now = stack.clone(); now.setAmount(now.getAmount() - 1); setItemInHand(now); } } /** * Spawns a new {@link GlowItem} in the world, as if this HumanEntity had dropped it. * * <p>Note that this does NOT remove the item from the inventory. * * @param stack The item to drop * @return the GlowItem that was generated, or null if the spawning was cancelled * @throws IllegalArgumentException if the stack is empty */ public GlowItem drop(ItemStack stack) { checkArgument(!InventoryUtil.isEmpty(stack), "stack must not be empty"); Location dropLocation = location.clone().add(0, getEyeHeight(true) - 0.3, 0); GlowItem dropItem = world.dropItem(dropLocation, stack); /* These calculations are strictly based off of trial-and-error to find the closest similar behavior to the official server. May be changed in the future. */ Vector vel = location.getDirection().multiply(0.3); ThreadLocalRandom tlr = ThreadLocalRandom.current(); double randOffset = 0.02; vel.add(new Vector( tlr.nextDouble(randOffset) - randOffset / 2, tlr.nextDouble(0.12), tlr.nextDouble(randOffset) - randOffset / 2)); dropItem.setVelocity(vel); return dropItem; } @Override public Entity getShoulderEntityLeft() { CompoundTag tag = getLeftShoulderTag(); if (tag.isEmpty()) { return null; } UUID uuid = new UUID(tag.getLong("UUIDMost"), tag.getLong("UUIDLeast")); return server.getEntity(uuid); } @Override public void setShoulderEntityLeft(Entity entity) { if (entity == null) { releaseLeftShoulderEntity(); } else { CompoundTag tag = new CompoundTag(); EntityStorage.save((GlowEntity) entity, tag); setLeftShoulderTag(tag); } } @Override public Entity getShoulderEntityRight() { CompoundTag tag = getRightShoulderTag(); if (tag.isEmpty()) { return null; } UUID uuid = new UUID(tag.getLong("UUIDMost"), tag.getLong("UUIDLeast")); return server.getEntity(uuid); } @Override public void setShoulderEntityRight(Entity entity) { if (entity == null) { releaseRightShoulderEntity(); } else { CompoundTag tag = new CompoundTag(); EntityStorage.save((GlowEntity) entity, tag); setRightShoulderTag(tag); } } @Override public Entity releaseLeftShoulderEntity() { CompoundTag tag = getLeftShoulderTag(); GlowEntity shoulderEntity = null; if (!tag.isEmpty()) { shoulderEntity = EntityStorage.loadEntity(world, tag); shoulderEntity.setRawLocation(getLocation()); } setLeftShoulderTag(null); return shoulderEntity; } @Override public Entity releaseRightShoulderEntity() { CompoundTag tag = getRightShoulderTag(); GlowEntity shoulderEntity = null; if (!tag.isEmpty()) { shoulderEntity = EntityStorage.loadEntity(world, tag); shoulderEntity.setRawLocation(getLocation()); } setRightShoulderTag(null); return shoulderEntity; } public CompoundTag getLeftShoulderTag() { Object tag = metadata.get(MetadataIndex.PLAYER_LEFT_SHOULDER); return tag == null ? new CompoundTag() : (CompoundTag) tag; } public void setLeftShoulderTag(CompoundTag tag) { metadata.set(MetadataIndex.PLAYER_LEFT_SHOULDER, tag == null ? new CompoundTag() : tag); } public CompoundTag getRightShoulderTag() { Object tag = metadata.get(MetadataIndex.PLAYER_RIGHT_SHOULDER); return tag == null ? new CompoundTag() : (CompoundTag) tag; } public void setRightShoulderTag(CompoundTag tag) { metadata.set(MetadataIndex.PLAYER_RIGHT_SHOULDER, tag == null ? new CompoundTag() : tag); } @Override public void openSign(Sign sign) { throw new UnsupportedOperationException("Not implemented yet."); } }
{ "content_hash": "bac1f2af0320b783b35ffc275969958a", "timestamp": "", "source": "github", "line_count": 616, "max_line_length": 100, "avg_line_length": 31.76948051948052, "alnum_prop": 0.630148185998978, "repo_name": "GlowstoneMC/GlowstonePlusPlus", "id": "97bf747983ce0c177aa71bdf0767eb25e0caf7b3", "size": "19570", "binary": false, "copies": "1", "ref": "refs/heads/dev", "path": "src/main/java/net/glowstone/entity/GlowHumanEntity.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "2325722" }, { "name": "Python", "bytes": "1031" }, { "name": "Ruby", "bytes": "335" }, { "name": "Shell", "bytes": "2214" } ], "symlink_target": "" }
from osext.test import pushdtest import unittest suite = unittest.TestLoader().loadTestsFromModule(pushdtest) unittest.TextTestRunner(verbosity=2).run(suite)
{ "content_hash": "87d8b07c6b9c6211ecf301d88d83599e", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 60, "avg_line_length": 31.8, "alnum_prop": 0.8364779874213837, "repo_name": "Appdynamics/python-osext", "id": "bbe168afde05e986a6ef7bd9c9d8fb93ec99525d", "size": "159", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "osext/test/__main__.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "1614" }, { "name": "Shell", "bytes": "77" } ], "symlink_target": "" }
package subchal import ( "os" "fmt" "time" "io/ioutil" "database/sql" "launchpad.net/goyaml" ) // A transfer from one route to another, or a turnaround, as included in a Walk. type Step struct { FromStation string `from_station` ToStation string `to_station` ToRoute string `to_route` } // A complete walk through the subway, touching every station at least once. type Walk struct { StartStation string `start_station` StartRoute string `start_route` // StartTime is tagged with a nonexistent variable name so that // goyaml.Marshal doesn't try to populate it StartTime time.Time `nonexistent` EndStation string `end_station` Steps []Step `steps` // Gets converted to a time.Time and placed in StartTime StartTimeStr string `start_time` // Counts visits to stations so we can make sure we hit them all StationVisits map[string]int } type SimulationError struct { s string } func (e SimulationError) Error() string { return e.s } // Simulates an execution of the Walk through the subway system. // // Returns the number of seconds that the Walk would take, from the beginning // of the first trip to the end of the last. func (wk *Walk) RunSim(db *sql.DB) (int, error) { dur := 0 t := wk.StartTime dt, err := TimeToTransfer(db, t, wk.StartStation, wk.StartStation, wk.StartRoute, wk.Steps[0].FromStation) if err != nil { return 0, err } t.Add(time.Duration(dt) * time.Second) dur += int(t.Sub(wk.StartTime).Seconds()) return dur, nil return 0, nil } // Finds the stop_id of all stations and stops with the given name func StopIDsFromName(db *sql.DB, stopName string) ([]string, error) { stopIDs := make([]string, 0) stopRows, err := db.Query(` SELECT s.stop_id FROM stops s WHERE s.stop_name = ?; `, stopName) if err != nil { return stopIDs, err } for stopRows.Next() { var stopID string err = stopRows.Scan(&stopID) if err != nil { return stopIDs, SimulationError{"Error getting stop ID from database"} } stopIDs = append(stopIDs, stopID) } if len(stopIDs) < 1 { return stopIDs, SimulationError{fmt.Sprintf("No station with name '%s'", stopName)} } return stopIDs, nil } // Finds the stop_id the station in fromStops with service to a station in toStops // // Returns only the first result found, if there's more than one result func StopGoingToward(db *sql.DB, []string fromStops, []string toStops) ([]string, error) { stopIDs := make([]string, 0) tripRows, err := db.Query(` SELECT t.trip_id, FROM stop_times st JOIN trips t ON st.trip_id = t.trip_id WHERE st.stop_id IN ? OR st.stop_id IN ? LIMIT 1; `, fromStops, toStops) if err != nil { return stopIDs, err } for stopRows.Next() { var stopID string err = stopRows.Scan(&stopID) if err != nil { return stopIDs, SimulationError{"Error getting stop ID from database"} } stopIDs = append(stopIDs, stopID) } if len(stopIDs) < 1 { return stopIDs, SimulationError{fmt.Sprintf("No station with name '%s'", stopName)} } return stopIDs, nil } // Determines the number of seconds it will take to transfer to the // given route from the given stop at the given time. func TimeToTransfer(db *sql.DB, t time.Time, fromStation string, toStation string, toRoute string, towardStationName string) (int, error) { towardStation, err := StopIDsFromName(db, towardStationName) if err != nil { return 0, err } // toStopID, err := StopGoingToward(towardStation) // if err != nil { return 0, err } // First we need to find the stops that are contained in the given // station. stopRows, err := db.Query(` SELECT s.stop_id FROM stops s WHERE s.parent_station = ?; `, toStation) if err != nil { return 0, err } toStops := make([]string, 0) for stopRows.Next() { var stopID string stopRows.Scan(&stopID) toStops = append(toStops, stopID) } if err != nil { return 0, SimulationError{fmt.Sprintf("No stops with parent station '%s'", towardStation)} } timeStringRows, err := db.Query(` SELECT st.departure_time FROM stop_times st JOIN trips t ON st.trip_id = t.trip_id JOIN routes r ON t.route_id = r.route_id WHERE st.stop_id = ? AND r.route_id = ? ORDER BY departure_time ASC; `, toStation, toRoute) if err != nil { return 0, err } var timeStrings []string for timeStringRows.Next() { var timeString string timeStringRows.Scan(&timeString) timeStrings = append(timeStrings, timeString) } times := make([]time.Time, 0) for _, timeStr := range timeStrings { t, err := ParseTime(timeStr) if err != nil { return 0, err } times = append(times, t) } if len(times) < 1 { return 0, SimulationError{fmt.Sprintf("No stoptimes found for stop %s and route %s", toStation, toRoute)} } // Find the next 2 stop times nearbyTimes := make([]time.Time, 0) for _, stoptime := range times { if stoptime.After(t) { nearbyTimes = append(nearbyTimes, stoptime) } if len(nearbyTimes) == 2 { break } } if len(nearbyTimes) < 2 { // We had to go past midnight. nearbyTimes = append(nearbyTimes, times[0].Add(24 * time.Hour)) if len(nearbyTimes) < 2 { nearbyTimes = append(nearbyTimes, times[1].Add(24 * time.Hour)) } } interval := int(nearbyTimes[1].Sub(nearbyTimes[0]).Seconds()) // Okay, now that we have the interval between trains, we need to add the time // it takes to run from platform to platform transferTimeRow := db.QueryRow(` SELECT t.min_transfer_time FROM transfers t WHERE t.from_stop_id = ( SELECT s.parent_station FROM stops s WHERE s.stop_id = ? ) AND t.to_stop_id = ( SELECT s.parent_station FROM stops s WHERE s.stop_id = ? ) LIMIT 1; `, fromStation, toStation) var transferTime int err = transferTimeRow.Scan(&transferTime) if err != nil { return 0, SimulationError{fmt.Sprintf("No transfers possible from %s to %s", fromStation, toStation)} } return (transferTime + interval)/2, nil } // Finds the number of seconds it will take to travel the given segment near the given time. func TimeToTravel(db *sql.DB, fromStop string, toStop string, route string, t time.Time) (int, error) { formattedTime := t.Format("15:04:05") dayCycled := 0 // Find the next trip on route `route` that leaves from `fromStop` in the direction of `toStop` tripIDRow := db.QueryRow(` SELECT t.trip_id FROM trips t JOIN stop_times st ON t.trip_id = st.trip_id WHERE st.stop_id = ? AND t.route_id = ? AND st.departure_time > ? ORDER BY st.departure_time ASC LIMIT 1; `, fromStop, route, formattedTime) var tripID string err := tripIDRow.Scan(&tripID) if err != nil { // We couldn't find any trips after `t`, so we'll cycle over to // the next day dayCycled = 1 tripIDRow := db.QueryRow(` SELECT t.trip_id FROM trips t JOIN stop_times st ON t.trip_id = st.trip_id WHERE st.stop_id = ? AND t.route_id = ? ORDER BY st.departure_time ASC LIMIT 1; `, fromStop, route) err = tripIDRow.Scan(&tripID) if err != nil { return 0, SimulationError{ fmt.Sprintf("No trips from %s to %s after %s", fromStop, toStop, formattedTime), } } } departureTime, err := TimeFromQuery(db, ` SELECT st.departure_time FROM stop_times st WHERE st.stop_id = ? AND st.trip_id = ?; `, fromStop, tripID) if err != nil { return 0, err } if dayCycled == 1 { departureTime = departureTime.Add(24 * time.Hour) } arrivalTime, err := TimeFromQuery(db, ` SELECT st.arrival_time FROM stop_times st WHERE st.stop_id = ? AND st.trip_id = ?; `, toStop, tripID) if err != nil { return 0, err } if arrivalTime.Before(departureTime) { arrivalTime = arrivalTime.Add(24 * time.Hour) } return int(arrivalTime.Sub(departureTime).Seconds()), nil } // Parses a time.Time out of a single DB row returned by the given query. // // Takes the same arguments as QueryRow: a query followed by zero or more // strings to interpolate into that query. func TimeFromQuery(db *sql.DB, query string, params ...interface{}) (time.Time, error) { var tStr string tRow := db.QueryRow(query, params...) err := tRow.Scan(&tStr) if err != nil { return time.Time{}, err } t, err := ParseTime(tStr) if err != nil { return time.Time{}, SimulationError{ fmt.Sprintf("Malformatted time: %s", tStr), } } return t, nil } // Loads an initial Walk from the specified YAML func LoadWalk(yamlPath string) (*Walk, error) { f, err := os.Open(yamlPath) if err != nil { return nil, err } defer f.Close() yamlBytes, err := ioutil.ReadAll(f) if err != nil { return nil, err } wk := new(Walk) goyaml.Unmarshal(yamlBytes, wk) wk.StartTime, err = time.Parse("15:04:05", wk.StartTimeStr) if err != nil { return nil, err } return wk, nil }
{ "content_hash": "a506d98c299573f6c68d0d241f3a6aae", "timestamp": "", "source": "github", "line_count": 323, "max_line_length": 113, "avg_line_length": 30.71517027863777, "alnum_prop": 0.595706078016329, "repo_name": "danslimmon/subchal", "id": "99e95d8e21093b5eaea897cc5b67e4b6b1492211", "size": "9921", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/walk.go", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "19030" }, { "name": "Python", "bytes": "4101" } ], "symlink_target": "" }
import re import json from tastypie.serializers import Serializer class CamelCaseJSONSerializer(Serializer): """ Python/javascript casing interop. """ formats = ['json'] content_types = { 'json': 'application/json', } def to_json(self, data, options=None): # Changes underscore_separated names to camelCase names to go from # python convention to javacsript convention data = self.to_simple(data, options) def underscoreToCamel(match): return match.group()[0] + match.group()[2].upper() def camelize(data): if isinstance(data, dict): new_dict = {} for key, value in data.items(): new_key = re.sub(r"[a-z]_[a-z]", underscoreToCamel, key) new_dict[new_key] = camelize(value) return new_dict if isinstance(data, (list, tuple)): for i in range(len(data)): data[i] = camelize(data[i]) return data return data camelized_data = camelize(data) return json.dumps(camelized_data, sort_keys=True) def from_json(self, content): # Changes camelCase names to underscore_separated names to go from # javascript convention to python convention data = json.loads(content) def camelToUnderscore(match): return match.group()[0] + "_" + match.group()[1].lower() def underscorize(data): if isinstance(data, dict): new_dict = {} for key, value in data.items(): new_key = re.sub(r"[a-z][A-Z]", camelToUnderscore, key) new_dict[new_key] = underscorize(value) return new_dict if isinstance(data, (list, tuple)): for i in range(len(data)): data[i] = underscorize(data[i]) return data return data underscored_data = underscorize(data) return underscored_data
{ "content_hash": "e416eb28ded9a278b0c9dde3acbc2237", "timestamp": "", "source": "github", "line_count": 68, "max_line_length": 76, "avg_line_length": 30.544117647058822, "alnum_prop": 0.5459797785267212, "repo_name": "timrlaw/bandsoc", "id": "7b5ea25f7a35a4ed57a61b7f2dff1795536484bd", "size": "2077", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "bandsoc/apps/common/serializers.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "101152" }, { "name": "HTML", "bytes": "45837" }, { "name": "JavaScript", "bytes": "259572" }, { "name": "Python", "bytes": "53170" }, { "name": "Shell", "bytes": "248" } ], "symlink_target": "" }
require 'action_mailer' require 'active_support/core_ext/hash' # for slice require 'file_mover/base' require 'file_mover/sink' require 'file_mover/utils' module FileMover # Sends files in as Mail via SMTP # # Required params: # * :from # * :to # * :subject # # The following SMTP params must be set unless injected into FileMover::Mailer : # * :address # * :port # * :domain # # If :send_file_in_body is true the file (only one allowed in this case) # will be sent in body # otherwise given files will be sent as attachments. # # If :subject and :body are strings the substring _%files_ is replaced by the attached filesnames # # If :subject and :body are procs the proc is called with argument _pathes_ # class Mail < Base include Sink Mailer.logger = nil attribute :from attribute :to attribute :cc attribute :bcc attribute :subject attribute :address attribute :port attribute :domain attribute :send_file_in_body def put_files pathes # slice required. otherwise things like password_hider will cause an error keys = [:from,:to,:subject,:address,:port,:domain,:cc,:bcc,:send_file_in_body] Mailer.my_message(pathes, attributes.symbolize_keys.slice(*keys)).deliver [] end def to_s "#{super}(#{to.inspect})" end end class MailSenderError < StandardError; end # Helper class used by Mail class Mailer < ActionMailer::Base def my_message pathes, params [:address, :port, :domain].each do |key| Mailer.smtp_settings[key] = params.delete(key) if params.has_key?(key) end params[:subject]= _parse(params.delete(:subject),pathes) if params.delete(:send_file_in_body) if pathes.size != 1 raise MailSenderError, "#{self}: cannot send multiple files #{pathes * ","} in mail body" else params[:body] = File.read(pathes.first) end else # body must not be nil to prevent lookup of templates params[:body] = _parse(params[:body],pathes) || '' pathes.each do |path| attachments[File.basename(path)] = File.read(path) end end mail params end private def _parse value, pathes case value when nil then nil when String then value.gsub /\%files/, Utils.basenames(pathes).join(',') when Proc then value.call pathes else raise MailSenderError, "invalid value #{value}. #{self}" end end end end
{ "content_hash": "4f36977d91ecf98b33ab796cbf17733b", "timestamp": "", "source": "github", "line_count": 97, "max_line_length": 99, "avg_line_length": 26.072164948453608, "alnum_prop": 0.6362198497429814, "repo_name": "tsonntag/file_mover", "id": "b6600064fcd8faea05b3c9a8f6a5f6f5bb00892e", "size": "2529", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "lib/file_mover/mail.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "118812" } ], "symlink_target": "" }
package com.evolveum.midpoint.repo.api; import java.util.Collection; import java.util.List; import com.evolveum.midpoint.prism.Containerable; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.delta.ItemDelta; import com.evolveum.midpoint.prism.query.ObjectQuery; import com.evolveum.midpoint.schema.*; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.util.exception.ObjectAlreadyExistsException; import com.evolveum.midpoint.util.exception.ObjectNotFoundException; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.xml.ns._public.common.common_3.FocusType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType; /** * <p>Identity Repository Interface.</p> * <p> * <ul> * <li>Status: public</li> * <li>Stability: stable</li> * </ul> * @version 3.1.1 * @author Radovan Semancik * </p><p> * This service provides repository for objects that are commonly found * in identity management deployments. It is used for storage and retrieval * of objects. It also supports modifications (relative changes), searching * and basic coordination. * </p><p> * Supported object types: * <ul> * <li>All object types from Common Schema</li> * <li>All object types from Identity Schema</li> * <li>All object types from IDM Model Schema</li> * </ul> * </p><p> * Identity repository may add some kind of basic logic in addition to a * pure storage of data. E.g. it may check referential consistency, * validate schema, etc. * </p><p> * The implementation may store the objects and properties in any suitable * way and it is not required to check any schema beyond the basic common schema * structures. However, the implementation MAY be able to check additional * schema definitions, e.g. to check for mandatory and allowed properties * and property types. This may be either explicit (e.g. implementation checking * against provided XML schema) or implicit, conforming to the constraints of * the underlying storage (e.g. LDAP schema enforced by underlying directory server). * One way or another, the implementation may fail to store the objects that violate * the schema. The method how the schemas are "loaded" to the implementation is not * defined by this interface. This interface even cannot "reveal" the schema to its * users (at least not now). Therefore clients of this interface must be prepared to * handle schema violation errors. * </p><p> * The implementation is not required to index the data or provide any other * optimizations. This depends on the specific implementation, its configuration * and the underlying storage system. Qualitative constraints (such as performance) * are NOT defined by this interface definition. * </p> * <h1>Naming Conventions</h1> * <p> * operations should be named as &lt;operation&gt;&lt;objectType&gt; e.g. addUser, * modifyAccount, searchObjects. The operations that returns single object * instance or works on single object should be named in singular (e.g. addUser). * The operation that return multiple instances should be named in plural (e.g. listObjects). * Operations names should be unified as well: * <ul> * <li>add, modify, delete - writing to repository, single object, need OID</li> * <li>get - retrieving single object by OID</li> * <li>list - returning all objects, no or fixed search criteria</li> * <li>search - returning subset of objects with flexible search criteria</li> * </ul> * </p> * <h1>Notes</h1> * <p> * The definition of this interface is somehow "fuzzy" at places. E.g. * allowing schema-aware implementation but not mandating it, recommending * to remove duplicates, but tolerating them, etc. The reason for this is * to have better fit to the underlying storage mechanisms and therefore * more efficient and simpler implementation. It may complicate the clients * if the code needs to be generic and fit each and every implementation of * this interface. However, such code will be quite rare. Most of the custom code * will be developed to work on a specific storage (e.g. Oracle DB or LDAP) * and therefore can be made slightly implementation-specific. Changing the * storage in a running IDM system is extremely unlikely. * </p> * <h1>TODO</h1> * <p> * <ul> * <li>TODO: Atomicity, consistency</li> * <li>TODO: security constraints</li> * <li>TODO: inherently thread-safe</li> * <li>TODO: note about distributed storage systems and weak/eventual consistency</li> * <li>TODO: task coordination</li> * </ul> * </p> */ public interface RepositoryService { String CLASS_NAME_WITH_DOT = RepositoryService.class.getName() + "."; String GET_OBJECT = CLASS_NAME_WITH_DOT + "getObject"; String LIST_OBJECTS = CLASS_NAME_WITH_DOT + "listObjects"; @Deprecated String LIST_ACCOUNT_SHADOW = CLASS_NAME_WITH_DOT + "listAccountShadowOwner"; String ADD_OBJECT = CLASS_NAME_WITH_DOT + "addObject"; String DELETE_OBJECT = CLASS_NAME_WITH_DOT + "deleteObject"; @Deprecated String CLAIM_TASK = CLASS_NAME_WITH_DOT + "claimTask"; @Deprecated String RELEASE_TASK = CLASS_NAME_WITH_DOT + "releaseTask"; String SEARCH_OBJECTS = CLASS_NAME_WITH_DOT + "searchObjects"; String SEARCH_CONTAINERS = CLASS_NAME_WITH_DOT + "searchContainers"; String LIST_RESOURCE_OBJECT_SHADOWS = CLASS_NAME_WITH_DOT + "listResourceObjectShadows"; String MODIFY_OBJECT = CLASS_NAME_WITH_DOT + "modifyObject"; String COUNT_OBJECTS = CLASS_NAME_WITH_DOT + "countObjects"; String GET_VERSION = CLASS_NAME_WITH_DOT + "getVersion"; String SEARCH_OBJECTS_ITERATIVE = CLASS_NAME_WITH_DOT + "searchObjectsIterative"; String CLEANUP_TASKS = CLASS_NAME_WITH_DOT + "cleanupTasks"; String SEARCH_SHADOW_OWNER = CLASS_NAME_WITH_DOT + "searchShadowOwner"; String ADVANCE_SEQUENCE = CLASS_NAME_WITH_DOT + "advanceSequence"; String RETURN_UNUSED_VALUES_TO_SEQUENCE = CLASS_NAME_WITH_DOT + "returnUnusedValuesToSequence"; String EXECUTE_QUERY_DIAGNOSTICS = CLASS_NAME_WITH_DOT + "executeQueryDiagnostics"; /** * Returns object for provided OID. * * Must fail if object with the OID does not exists. * * @param oid * OID of the object to get * @param parentResult * parent OperationResult (in/out) * @return Object fetched from repository * * @throws ObjectNotFoundException * requested object does not exist * @throws SchemaException * error dealing with storage schema * @throws IllegalArgumentException * wrong OID format, etc. */ <T extends ObjectType> PrismObject<T> getObject(Class<T> type, String oid, Collection<SelectorOptions<GetOperationOptions>> options, OperationResult parentResult) throws ObjectNotFoundException, SchemaException; // <T extends ObjectType> PrismObject<T> getContainerValue(Class<T> type, String oid, long id, // Collection<SelectorOptions<GetOperationOptions>> options, // OperationResult parentResult) // throws ObjectNotFoundException, SchemaException; /** * Returns object version for provided OID. * * Must fail if object with the OID does not exists. * * This is a supposed to be a very lightweight and cheap operation. It is used to support * efficient caching of expensive objects. * * @param oid * OID of the object to get * @param parentResult * parent OperationResult (in/out) * @return Object version * * @throws ObjectNotFoundException * requested object does not exist * @throws SchemaException * error dealing with storage schema * @throws IllegalArgumentException * wrong OID format, etc. */ <T extends ObjectType> String getVersion(Class<T> type,String oid, OperationResult parentResult) throws ObjectNotFoundException, SchemaException; /** * <p>Add new object.</p> * <p> * The OID provided in the input message may be empty. In that case the OID * will be assigned by the implementation of this method and it will be * provided as return value. * </p><p> * This operation should fail if such object already exists (if object with * the provided OID already exists). * </p><p> * The operation may fail if provided OID is in an unusable format for the * storage. Generating own OIDs and providing them to this method is not * recommended for normal operation. * </p><p> * Should be atomic. Should not allow creation of two objects with the same * OID (even if created in parallel). * </p><p> * The operation may fail if the object to be created does not conform to * the underlying schema of the storage system or the schema enforced by the * implementation. * </p><p> * Note: no need for explicit type parameter here. The object parameter contains the information. * </p> * * @param object * object to create * @param parentResult * parent OperationResult (in/out) * @return OID assigned to the created object * * @throws ObjectAlreadyExistsException * object with specified identifiers already exists, cannot add * @throws SchemaException * error dealing with storage schema, e.g. schema violation * @throws IllegalArgumentException * wrong OID format, etc. */ <T extends ObjectType> String addObject(PrismObject<T> object, RepoAddOptions options, OperationResult parentResult) throws ObjectAlreadyExistsException, SchemaException; /** * <p>Search for objects in the repository.</p> * <p>If no search criteria specified, list of all objects of specified type is returned.</p> * <p> * Searches through all object types. * Returns a list of objects that match search criteria. * </p><p> * Returns empty list if object type is correct but there are no objects of * that type. The ordering of the results is not significant and may be arbitrary * unless sorting in the paging is used. * </p><p> * Should fail if object type is wrong. Should fail if unknown property is * specified in the query. * </p> * * @param query * search query * @param paging * paging specification to limit operation result (optional) * @param parentResult * parent OperationResult (in/out) * @return all objects of specified type that match search criteria (subject * to paging) * * @throws IllegalArgumentException * wrong object type * @throws SchemaException * unknown property used in search query */ <T extends ObjectType> SearchResultList<PrismObject<T>> searchObjects(Class<T> type, ObjectQuery query, Collection<SelectorOptions<GetOperationOptions>> options, OperationResult parentResult) throws SchemaException; /** * Search for "sub-object" structures, i.e. containers. * Currently, only one type of search is available: certification case search. * * @param type * @param query * @param options * @param parentResult * @param <T> * @return * @throws SchemaException */ <T extends Containerable> SearchResultList<T> searchContainers(Class<T> type, ObjectQuery query, Collection<SelectorOptions<GetOperationOptions>> options, OperationResult parentResult) throws SchemaException; /** * <p>Search for objects in the repository in an iterative fashion.</p> * <p>Searches through all object types. Calls a specified handler for each object found. * If no search criteria specified, list of all objects of specified type is returned.</p> * <p> * Searches through all object types. * Returns a list of objects that match search criteria. * </p><p> * Returns empty list if object type is correct but there are no objects of * that type. The ordering of the results is not significant and may be arbitrary * unless sorting in the paging is used. * </p><p> * Should fail if object type is wrong. Should fail if unknown property is * specified in the query. * </p> * * @param query * search query * @param handler * result handler * @param strictlySequential * takes care not to skip any object nor to process objects more than once; * currently requires paging NOT to be used - uses its own paging * @param parentResult * parent OperationResult (in/out) * @return all objects of specified type that match search criteria (subject * to paging) * * @throws IllegalArgumentException * wrong object type * @throws SchemaException * unknown property used in search query */ <T extends ObjectType> SearchResultMetadata searchObjectsIterative(Class<T> type, ObjectQuery query, ResultHandler<T> handler, Collection<SelectorOptions<GetOperationOptions>> options, boolean strictlySequential, OperationResult parentResult) throws SchemaException; /** * <p>Returns the number of objects that match specified criteria.</p> * <p>If no search criteria specified, count of all objects of specified type is returned.</p> * <p> * Should fail if object type is wrong. Should fail if unknown property is * specified in the query. * </p> * * @param query * search query * @param paging * paging specification to limit operation result (optional) * @param parentResult * parent OperationResult (in/out) * @return count of objects of specified type that match search criteria (subject * to paging) * * @throws IllegalArgumentException * wrong object type * @throws SchemaException * unknown property used in search query */ <T extends ObjectType> int countObjects(Class<T> type, ObjectQuery query, OperationResult parentResult) throws SchemaException; boolean isAnySubordinate(String upperOrgOid, Collection<String> lowerObjectOids) throws SchemaException; /** * <p>Modifies object using relative change description.</p> * Must fail if user with * provided OID does not exists. Must fail if any of the described changes * cannot be applied. Should be atomic. * </p><p> * If two or more modify operations are executed in parallel, the operations * should be merged. In case that the operations are in conflict (e.g. one * operation adding a value and the other removing the same value), the * result is not deterministic. * </p><p> * The operation may fail if the modified object does not conform to the * underlying schema of the storage system or the schema enforced by the * implementation. * </p> * * TODO: optimistic locking * * @param parentResult * parent OperationResult (in/out) * * @throws ObjectNotFoundException * specified object does not exist * @throws SchemaException * resulting object would violate the schema * @throws ObjectAlreadyExistsException * if resulting object would have name which already exists in another object of the same type * @throws IllegalArgumentException * wrong OID format, described change is not applicable */ <T extends ObjectType> void modifyObject(Class<T> type, String oid, Collection<? extends ItemDelta> modifications, OperationResult parentResult) throws ObjectNotFoundException, SchemaException, ObjectAlreadyExistsException; <T extends ObjectType> void modifyObject(Class<T> type, String oid, Collection<? extends ItemDelta> modifications, RepoModifyOptions options, OperationResult parentResult) throws ObjectNotFoundException, SchemaException, ObjectAlreadyExistsException; /** * <p>Deletes object with specified OID.</p> * <p> * Must fail if object with specified OID does not exists. Should be atomic. * </p> * * @param oid * OID of object to delete * @param parentResult * parent OperationResult (in/out) * * @throws ObjectNotFoundException * specified object does not exist * @throws IllegalArgumentException * wrong OID format, described change is not applicable */ <T extends ObjectType> void deleteObject(Class<T> type, String oid, OperationResult parentResult) throws ObjectNotFoundException; /** * <p>Returns the User object representing owner of specified account (account * shadow).</p> * <p> * May return null if there is no owner specified for the account. * </p><p> * May only be called with OID of AccountShadow object. * </p><p> * Implements the backward "owns" association between account shadow and * user. Forward association is implemented by property "account" of user * object. * </p><p> * This is a "list" operation even though it may return at most one owner. * However the operation implies searching the repository for an owner, * which may be less efficient that following a direct association. Hence it * is called "list" to indicate that there may be non-negligible overhead. * </p> * * @param accountOid * OID of account shadow * @param parentResult * parentResult parent OperationResult (in/out) * @return User object representing owner of specified account * * @throws ObjectNotFoundException * specified object does not exist * @throws IllegalArgumentException * wrong OID format */ @Deprecated PrismObject<UserType> listAccountShadowOwner(String accountOid, OperationResult parentResult) throws ObjectNotFoundException; /** * <p>Returns the object representing owner of specified shadow.</p> * <p> * Implements the backward "owns" association between account shadow and * user. Forward association is implemented by linkRef reference in subclasses * of FocusType. * </p * <p> * Returns null if there is no owner for the shadow. * </p> * <p> * This is a "search" operation even though it may return at most one owner. * However the operation implies searching the repository for an owner, * which may be less efficient that following a direct association. Hence it * is called "search" to indicate that there may be non-negligible overhead. * </p> * <p> * This method should not die even if the specified shadow does not exist. * Even if the shadow is gone, it still may be used in some linkRefs. This * method should be able to find objects with such linkeRefs otherwise we * will not be able to do proper cleanup. * </p> * * @param shadowOid * OID of shadow * @param parentResult * parentResult parent OperationResult (in/out) * @return Object representing owner of specified account (subclass of FocusType) * * @throws IllegalArgumentException * wrong OID format */ <F extends FocusType> PrismObject<F> searchShadowOwner(String shadowOid, Collection<SelectorOptions<GetOperationOptions>> options, OperationResult parentResult); /** * <p>Search for resource object shadows of a specified type that belong to the * specified resource.</p> * <p> * Returns a list of such object shadows or empty list * if nothing was found. * </p><p> * Implements the backward "has" association between resource and resource * object shadows. Forward association is implemented by property "resource" * of resource object shadow. * </p><p> * May only be called with OID of Resource object. * </p> * * @param resourceOid * OID of resource definition (ResourceType) * @param parentResult * parentResult parent OperationResult (in/out) * @return resource object shadows of a specified type from specified * resource * * @throws ObjectNotFoundException * specified object does not exist * @throws SchemaException * found object is not type of {@link ShadowType} * @throws IllegalArgumentException * wrong OID format */ <T extends ShadowType> List<PrismObject<T>> listResourceObjectShadows(String resourceOid, Class<T> resourceObjectShadowType, OperationResult parentResult) throws ObjectNotFoundException, SchemaException; /** * * This operation is guaranteed to be atomic. If two threads or even two nodes request a value from * the same sequence at the same time then different values will be returned. * * @param oid sequence OID * @param parentResult * @return next unallocated counter value * @throws ObjectNotFoundException the sequence does not exist * @throws SchemaException the sequence cannot produce a value (e.g. maximum counter reached) */ long advanceSequence(String oid, OperationResult parentResult) throws ObjectNotFoundException, SchemaException; /** * * The sequence may ignore the values, e.g. if value re-use is disabled or when the list of * unused values is full. In such a case the values will be ignored silently and no error is indicated. * * @param oid * @param unusedValues * @param parentResult * @throws ObjectNotFoundException */ void returnUnusedValuesToSequence(String oid, Collection<Long> unusedValues, OperationResult parentResult) throws ObjectNotFoundException, SchemaException; /** * Provide repository run-time configuration and diagnostic information. */ RepositoryDiag getRepositoryDiag(); /** * Runs a short, non-descructive repository self test. * This methods should never throw a (checked) exception. All the results * should be recorded under the provided result structure (including fatal errors). * * This should implement ONLY self-tests that are IMPLEMENTATION-SPECIFIC. It must not * implement self-tests that are generic and applies to all repository implementations. * Such self-tests must be implemented in higher layers. * * If the repository has no self-tests then the method should return immediately * without changing the result structure. It must not throw an exception in this case. */ void repositorySelfTest(OperationResult parentResult); /** * Checks a closure for consistency, repairing any problems found. * This methods should never throw a (checked) exception. All the results * should be in the returned result structure (including fatal errors). * * The current implementation expects closure to be of reasonable size - so * it could be fetched into main memory as well as recomputed online * (perhaps up to ~250K entries). In future, this method will be reimplemented. * * BEWARE, this method locks out the M_ORG_CLOSURE table, so org-related operations * would wait until it completes. * * TODO this method is SQL service specific; it should be generalized/fixed somehow. */ void testOrgClosureConsistency(boolean repairIfNecessary, OperationResult testResult); /** * A bit of hack - execute arbitrary query, e.g. hibernate query in case of SQL repository. * Use with all the care! * * @param request * @param result * @return */ RepositoryQueryDiagResponse executeQueryDiagnostics(RepositoryQueryDiagRequest request, OperationResult result); }
{ "content_hash": "baffef83dd902456f614cfdb5d7dd207", "timestamp": "", "source": "github", "line_count": 560, "max_line_length": 172, "avg_line_length": 41.71071428571429, "alnum_prop": 0.714102234780375, "repo_name": "PetrGasparik/midpoint", "id": "15d77d778309de864b8e30ad5367c42df652b413", "size": "23958", "binary": false, "copies": "1", "ref": "refs/heads/CAS-auth", "path": "repo/repo-api/src/main/java/com/evolveum/midpoint/repo/api/RepositoryService.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "321145" }, { "name": "CSS", "bytes": "234702" }, { "name": "HTML", "bytes": "651627" }, { "name": "Java", "bytes": "24107826" }, { "name": "JavaScript", "bytes": "17224" }, { "name": "PLSQL", "bytes": "2171" }, { "name": "PLpgSQL", "bytes": "8169" }, { "name": "Shell", "bytes": "390442" } ], "symlink_target": "" }
<?php namespace Budbee\Model; use \JsonSerializable; /** * @author Nicklas Moberg */ class OrderInterval implements JsonSerializable { static $dataTypes = array( 'collection' => '\Budbee\Model\Interval', 'delivery' => '\Budbee\Model\Interval' ); /** * Collection interval * @var \Budbee\Model\Interval */ public $collection; /** * Delivery interval * @var \Budbee\Model\Interval */ public $delivery; public function __construct(\Budbee\Model\Interval $collection = null, \Budbee\Model\Interval $delivery = null) { $this->collection = $collection; $this->delivery = $delivery; } public function jsonSerialize() { return array( 'collection' => $this->collection, 'delivery' => $this->delivery ); } }
{ "content_hash": "3c12f49b6a26bbbde43935e3132a440c", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 115, "avg_line_length": 19.833333333333332, "alnum_prop": 0.602641056422569, "repo_name": "LybeAB/Lybe_Budbee", "id": "8bfa1e168ea0482c6a44438d6c723bf61a447fb0", "size": "1437", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "lib/Budbee/vendor/budbee/api-client/src/Model/OrderInterval.php", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "16705" }, { "name": "PHP", "bytes": "31232" } ], "symlink_target": "" }
const expect = require('unexpected'); const http = require('http'); const messy = require('messy'); const errors = require('../lib/errors'); const OrderedMockStrategy = require('../lib/mockstrategies/OrderedMockStrategy'); const UnexpectedMitmMocker = require('../lib/UnexpectedMitmMocker'); function consumeResponse(response, callback) { const chunks = []; response .on('data', (chunk) => { chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)); }) .on('end', () => { callback(null, Buffer.concat(chunks)); }); } function issueGetAndConsume(url) { return new Promise((resolve, reject) => { http .get(url) .on('response', (response) => consumeResponse(response, resolve)) .on('error', reject) .end(); }); } describe('UnexpectedMitmMocker', () => { it('should throw if supplied no strategy or request descriptions', () => { expect( () => { new UnexpectedMitmMocker(); }, 'to throw', 'UnexpectedMitmMocker: missing strategy or request descriptions' ); }); it('should create an ordered mock strategy by default', () => { const mocker = new UnexpectedMitmMocker({ requestDescriptions: [] }); expect(mocker, 'to satisfy', { strategy: expect.it('to be an', OrderedMockStrategy), }); }); it('should throw if the supplied strategy does not expose request descriptions', () => { expect( () => { new UnexpectedMitmMocker({ strategy: {} }); }, 'to throw', 'UnexpectedMitmMocker: supplied strategy has no request descriptions' ); }); it('should create an mocker with the specific strategy', () => { const requestDescriptions = []; const strategy = { requestDescriptions }; const mocker = new UnexpectedMitmMocker({ strategy }); expect(mocker.requestDescriptions, 'to be', requestDescriptions); }); it('should provide a getter for requestDescriptions', () => { const strategy = { requestDescriptions: [] }; const mocker = new UnexpectedMitmMocker({ strategy }); expect(mocker, 'to satisfy', { strategy: expect.it('to be', strategy), }); }); it('should call nextDescriptionForIncomingRequest with the correct args', () => { let nextDescriptionForIncomingRequestArgs; const strategy = { requestDescriptions: [], firstDescriptionRemaining: () => Promise.resolve(null), nextDescriptionForIncomingRequest: (...args) => { nextDescriptionForIncomingRequestArgs = args; return Promise.resolve(null); }, }; const mocker = new UnexpectedMitmMocker({ strategy }); return mocker .mock(() => { return issueGetAndConsume('http://example.com/foo').catch((e) => {}); }) .then(({ fulfilmentValue, timeline }) => { expect( nextDescriptionForIncomingRequestArgs, 'to exhaustively satisfy', [ { request: expect.it('to be a', messy.HttpRequest), error: undefined, chunks: expect.it('to be an array'), properties: expect.it('to be an object'), spec: undefined, }, ] ); }); }); describe('when handling a request', () => { it('should reject with an unexpected requests error', () => { const strategy = { requestDescriptions: [], firstDescriptionRemaining: () => Promise.resolve(null), nextDescriptionForIncomingRequest: () => Promise.reject(new Error('fail')), }; const mocker = new UnexpectedMitmMocker({ strategy }); return mocker .mock(() => { return issueGetAndConsume('http://example.com/foo').catch((e) => {}); }) .then(({ fulfilmentValue, timeline }) => { expect(timeline, 'to satisfy', [new Error('fail')]); }); }); }); describe('when there are no remaining requests', () => { it('should reject with an unexpected requests error', () => { const strategy = { requestDescriptions: [], firstDescriptionRemaining: () => Promise.resolve(null), nextDescriptionForIncomingRequest: () => Promise.resolve(null), }; const mocker = new UnexpectedMitmMocker({ strategy }); return mocker .mock(() => { return issueGetAndConsume('http://example.com/foo').catch((e) => {}); }) .then(({ timeline }) => { expect(timeline, 'to satisfy', [ { exchange: expect.it('to be a', messy.HttpExchange), spec: null }, expect.it('to be an', errors.SawUnexpectedRequestsError), ]); }); }); }); describe('when the request does not match expectations', () => { it('should reject with an unexpected requests error', () => { const strategy = { requestDescriptions: [], firstDescriptionRemaining: () => Promise.resolve(null), nextDescriptionForIncomingRequest: () => Promise.reject(new errors.EarlyExitError()), }; const mocker = new UnexpectedMitmMocker({ strategy }); return mocker .mock(() => { return issueGetAndConsume('http://example.com/foo').catch(() => {}); }) .then(({ timeline }) => { expect(timeline, 'to satisfy', [ { exchange: expect.it('to be a', messy.HttpExchange), spec: expect.it('not to be null'), }, expect.it('to be an', errors.EarlyExitError), ]); }); }); }); });
{ "content_hash": "e13dd373593663a976a68df122e65372", "timestamp": "", "source": "github", "line_count": 178, "max_line_length": 90, "avg_line_length": 31.370786516853933, "alnum_prop": 0.5807664756446992, "repo_name": "unexpectedjs/unexpected-mitm", "id": "c4bba3342c58411fe7a1f54a1b1a37733fef20fc", "size": "5584", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/UnexpectedMitmMocker.js", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "JavaScript", "bytes": "171219" } ], "symlink_target": "" }
'use strict'; /** * @ngdoc directive * @name izzyposWebApp.directive:adminPosHeader * @description * # adminPosHeader */ app.directive('sidebarSearch', function () { return { templateUrl: 'scripts/directives/sidebar/sidebar-search/sidebar-search.html', restrict: 'E', replace: true, scope: { }, controller: function ($scope) { $scope.selectedMenu = 'home'; } } });
{ "content_hash": "c0825a30b3ff1fc18baabc3ac0192caf", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 81, "avg_line_length": 19.523809523809526, "alnum_prop": 0.6365853658536585, "repo_name": "thiMuniz/tcc_frontEnd", "id": "7a8e91124d476b7d236c96041f69a69d654c6d2b", "size": "410", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/scripts/directives/sidebar/sidebar-search/sidebar-search.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "24139" }, { "name": "CSS", "bytes": "11233" }, { "name": "HTML", "bytes": "383746" }, { "name": "JavaScript", "bytes": "155906" } ], "symlink_target": "" }
package org.wso2.carbon.governance.registry.eventing.internal; import javax.servlet.http.HttpServletRequest; import org.wso2.carbon.registry.core.Registry; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.service.RegistryService; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.common.eventing.NotificationService; import org.apache.axis2.context.MessageContext; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; public class Utils { private static final Log log = LogFactory.getLog(Utils.class); private static RegistryService registryService; private static NotificationService registryNotificationService; private static String defaultNotificationServiceURL; private static String remoteTopicHeaderName; private static String remoteTopicHeaderNS; private static String remoteSubscriptionStoreContext; public static void setRegistryService(RegistryService service) { registryService = service; } public static RegistryService getRegistryService() { return registryService; } public static String getDefaultNotificationServiceURL() { return defaultNotificationServiceURL; } public static void setDefaultNotificationServiceURL(String defaultNotificationServiceURL) { Utils.defaultNotificationServiceURL = defaultNotificationServiceURL; } public static String getRemoteTopicHeaderName() { return remoteTopicHeaderName; } public static void setRemoteTopicHeaderName(String remoteTopicHeaderName) { Utils.remoteTopicHeaderName = remoteTopicHeaderName; } public static String getRemoteTopicHeaderNS() { return remoteTopicHeaderNS; } public static void setRemoteTopicHeaderNS(String remoteTopicHeaderNS) { Utils.remoteTopicHeaderNS = remoteTopicHeaderNS; } public static String getRemoteSubscriptionStoreContext() { return remoteSubscriptionStoreContext; } public static void setRemoteSubscriptionStoreContext(String remoteSubscriptionStoreContext) { Utils.remoteSubscriptionStoreContext = remoteSubscriptionStoreContext; } public static NotificationService getRegistryNotificationService() { return registryNotificationService; } public static void setRegistryNotificationService(NotificationService registryNotificationService) { Utils.registryNotificationService = registryNotificationService; } }
{ "content_hash": "be778e45c41289a1206984aec85031f1", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 104, "avg_line_length": 33.35064935064935, "alnum_prop": 0.7858255451713395, "repo_name": "Niranjan-K/carbon-governance", "id": "a3368559bcfbb4d3673c54af5a829d96416c1a63", "size": "3258", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "components/governance/org.wso2.carbon.governance.registry.eventing/src/main/java/org/wso2/carbon/governance/registry/eventing/internal/Utils.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "41440" }, { "name": "Java", "bytes": "1849332" }, { "name": "JavaScript", "bytes": "806540" }, { "name": "Shell", "bytes": "62608" } ], "symlink_target": "" }
 #pragma once #include <aws/snowball/Snowball_EXPORTS.h> #include <aws/core/client/AWSErrorMarshaller.h> namespace Aws { namespace Client { class AWS_SNOWBALL_API SnowballErrorMarshaller : public Client::JsonErrorMarshaller { public: Client::AWSError<Client::CoreErrors> FindErrorByName(const char* exceptionName) const override; }; } // namespace Snowball } // namespace Aws
{ "content_hash": "113035d6f32d11bd2b69429a70709a2d", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 97, "avg_line_length": 19.2, "alnum_prop": 0.7760416666666666, "repo_name": "svagionitis/aws-sdk-cpp", "id": "073d028951fd938109c8945788687a23ca83083e", "size": "957", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "aws-cpp-sdk-snowball/include/aws/snowball/SnowballErrorMarshaller.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "2313" }, { "name": "C++", "bytes": "104799778" }, { "name": "CMake", "bytes": "455533" }, { "name": "HTML", "bytes": "4471" }, { "name": "Java", "bytes": "243075" }, { "name": "Python", "bytes": "72896" }, { "name": "Shell", "bytes": "2803" } ], "symlink_target": "" }
package client type ( // DaemonSetInterface has methods to work with DaemonSet resources. DaemonSetInterface interface { CreateDaemonSet(namespace string, item *DaemonSet) (*DaemonSet, error) GetDaemonSet(namespace, name string) (result *DaemonSet, err error) ListDaemonSets(namespace string, opts *ListOptions) (*DaemonSetList, error) DeleteDaemonSet(namespace, name string) error UpdateDaemonSet(namespace string, item *DaemonSet) (*DaemonSet, error) } // DaemonSet represents the configuration of a daemon set. DaemonSet struct { TypeMeta `json:",inline"` ObjectMeta `json:"metadata,omitempty"` // Specification of the desired behavior of the DaemonSet. Spec *DaemonSetSpec `json:"spec,omitempty"` // Most recently observed status of the DaemonSet. Status *DaemonSetStatus `json:"status,omitempty"` } // DaemonSetSpec is the specification of a daemon set. DaemonSetSpec struct { // Selector is a label query over pods that are managed by the daemon set. // Must match in order to be controlled. If empty, defaulted to labels on Pod template. Selector *LabelSelector `json:"selector,omitempty"` // Template is the object that describes the pod that will be created. // The DaemonSet will create exactly one copy of this pod on every node that matches the template’s // node selector (or on every node if no node selector is specified). Template PodTemplateSpec `json:"template"` } // DaemonSetStatus represents the current status of a daemon set. DaemonSetStatus struct { // CurrentNumberScheduled is the number of nodes that are running at least 1 daemon pod and are supposed to run the daemon pod. CurrentNumberScheduled int32 `json:"currentNumberScheduled"` // NumberMisscheduled is the number of nodes that are running the daemon pod, but are not supposed to run the daemon pod. NumberMisscheduled int32 `json:"numberMisscheduled"` // DesiredNumberScheduled is the total number of nodes that should be running the daemon pod (including nodes correctly running the daemon pod). DesiredNumberScheduled int32 `json:"desiredNumberScheduled"` // NumberReady is the number of nodes that should be running the daemon pod and have one or more of the daemon pod running and ready. NumberReady int32 `json:"numberReady"` } DaemonSetList struct { TypeMeta `json:",inline"` ListMeta `json:"metadata,omitempty"` // Items is the list of daemonsets. Items []DaemonSet `json:"items"` } ) // NewDaemonSet creates a new DaemonSet struct func NewDaemonSet(namespace, name string) *DaemonSet { return &DaemonSet{ TypeMeta: NewTypeMeta("DaemonSet", "extensions/v1beta1"), ObjectMeta: NewObjectMeta(namespace, name), Spec: &DaemonSetSpec{}, } }
{ "content_hash": "6d27871df60f014c03fb055c8e452c4b", "timestamp": "", "source": "github", "line_count": 65, "max_line_length": 146, "avg_line_length": 42.01538461538462, "alnum_prop": 0.7605272793848408, "repo_name": "pulcy/ha-redis", "id": "ac8a92480904a44e6f1974a2b50214cf5d1a4651", "size": "2733", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "vendor/github.com/YakLabs/k8s-client/daemonset.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "42038" }, { "name": "HCL", "bytes": "718" }, { "name": "Makefile", "bytes": "1791" } ], "symlink_target": "" }
package com.atlassian.svn2git import java.io.ByteArrayInputStream import org.specs2.mutable class AuthorsTest extends mutable.Specification { def parseUserXml(xml: String, expected: String*) = { val authors = Authors.parseUserXml(new ByteArrayInputStream(xml.getBytes)) authors.toSet must equalTo(expected.toSet) } "parseUserXmlSingle" >> { parseUserXml("""<log><logentry><author>a</author></logentry></log>""", "a") } "parseUserXmlMulti" >> { parseUserXml("""<log><logentry><author>a</author><author>b</author></logentry></log>""", "a", "b") } "parseUserXmlMultiDupes" >> { parseUserXml("""<log><logentry><author>a</author><author>b</author><author>a</author></logentry></log>""", "a", "b") } "onDemandBaseUrl" >> { Authors.onDemandBaseUrl("https://chocs.jira-dev.com/svn/CMN/foo") must equalTo(Some("chocs.jira-dev.com")) } "not onDemandBaseUrl" >> { Authors.onDemandBaseUrl("https://chocs.abc.com/svn/CMN/foo") must equalTo(None) } "test mapUserDetails" >> { Authors.mapUserDetails(List("b", "c", "a")) { user => if (user != "c") Some(user.toUpperCase, "%s@%s" format (user, "example.com")) else None } must equalTo(List("c", "a = A <[email protected]>", "b = B <[email protected]>")) } "test username" >> { Authors.processUsername("zaphod") must equalTo(Some("zaphod", "[email protected]")) } "test email username" >> { Authors.processUsername("[email protected]") must equalTo(Some("Arthur Dent", "[email protected]")) } "test empty username" >> { Authors.processUsername("") must beNone } "test svn command line for http:// schema" >> { val url = "http://pug.jira.com/svn" Authors.svnCommandLineOptions(url, None) must be equalTo List("svn", "log", "--trust-server-cert", "--no-auth-cache", "--xml", "--non-interactive", "-q", url) } "test svn command line for https:// schema" >> { val url = "https://pug.jira.com/svn" Authors.svnCommandLineOptions(url, None) must be equalTo List("svn", "log", "--trust-server-cert", "--no-auth-cache", "--xml", "--non-interactive", "-q", url) } "test svn command line for file:// schema" >> { val url = "file:///Users/stefan/dev/svn2git/repos/a" Authors.svnCommandLineOptions(url, None) must be equalTo List("svn", "log", "--xml", "--non-interactive", "-q", url) } "test svn command line for svn:// schema" >> { val url = "svn://localhost" Authors.svnCommandLineOptions(url, None) must be equalTo List("svn", "log", "--xml", "--non-interactive", "-q", url) } "test svn command line for a local directory" >> { val url = "." new java.io.File(url).isDirectory must beTrue Authors.svnCommandLineOptions(url, None) must be equalTo List("svn", "log", "--xml", "--non-interactive", "-q", "file://" + new java.io.File(url).getCanonicalPath) } import net.liftweb.json "test valid parseOnDemandJson" >> { Authors.parseOnDemandJson(json.parse( """{"displayName": "a", "emailAddress": "b"}""" )) must equalTo(Some("a", "b")) } "test invalid parseOnDemandJson" >> { Authors.parseOnDemandJson(json.parse("{}")) must equalTo(None) } }
{ "content_hash": "8bda4c6853ad0be0a5f574200bba0a75", "timestamp": "", "source": "github", "line_count": 88, "max_line_length": 167, "avg_line_length": 36.09090909090909, "alnum_prop": 0.6423173803526449, "repo_name": "cjwilburn/svn-migration-scripts", "id": "6a90c8a852396beade170d0f05a715592631f50d", "size": "3176", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/test/scala/AuthorsTest.scala", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Scala", "bytes": "62839" } ], "symlink_target": "" }
'use strict'; var builder = require('./lib/builder'); var header = require('./lib/header'); var chrono = require('./lib/chrono'); var optionsChecker = require('./lib/options.checker'); function find(str,cha,num){ var x=str.indexOf(cha); for(var i=0;i<num;i++){ x=str.indexOf(cha,x+1); } return x; } function getRoute(originalUrl,RoutePath) { var re = new RegExp("/","g"); var route=RoutePath.match(re); if(route&& RoutePath!="/"){ var arr1 = originalUrl.match(re).length-route.length; return originalUrl.substr(0,find(originalUrl,'/',arr1))+RoutePath; }else{ return originalUrl; } } var expressMetricsStartTimeDate=new Date().getDate(); var clearEveryDay=false; var filterPaths=[]; function filterPathsBool(regs,url) { if(regs.length<=0) return true; else { for (var i=0;i<regs.length;i++){ var reg=new RegExp(regs[i],"g"); if(reg.test(url)){ return true; break; } } return false; } } module.exports.expressMetrics = function expressMetrics(options) { var client; options = optionsChecker.check(options); builder.init(options); client = builder.getClient(); header.init({ header: options.header }); chrono.init({ decimals: options.decimals }); clearEveryDay=options.clearEveryDay||false; filterPaths=options.filterPaths||[]; expressMetricsStartTimeDate=new Date().getDate(); return function (req, res, next) { // chrono.start(); // if(new Date(json.syncTime).getDate()!=new Date().getDate()) return {}; if(clearEveryDay&&(expressMetricsStartTimeDate!=new Date().getDate())){ builder.getServer().setMetric({}); expressMetricsStartTimeDate=new Date().getDate(); } res.startAt = process.hrtime(); res.startTime = new Date(); // decorate response#end method from express var end = res.end; res.once('finish',function() { // var responseTime = new Date() - this.startTime; var diff = process.hrtime(this.startAt); var responseTime = diff[0] * 1e3 + diff[1] * 1e-6; header.setResponseTime(res, responseTime); var routePath=getRoute(req.originalUrl,req.route.path); if((filterPathsBool(filterPaths,routePath))&&res.statusCode>=200&&res.statusCode<300){ client.send({ route: { path: routePath, stack: req.route.stack, methods: req.route.methods }, method: req.method, status: res.statusCode, time: responseTime }); } else{ client.send({ route: { path: '*', stack: req.route.stack, methods: req.route.methods }, method: req.method, status: res.statusCode, time: responseTime }); } }) next(); }; }; module.exports.listen = function listen(port) { if (builder.getServer()) { return builder.getMetricsServer(); } return builder.startServer(port); }; module.exports.close = function close(callback) { builder.getServer().stop(callback); }; module.exports.getMetricInsideObj = function getMetric(name) { if(clearEveryDay&&(expressMetricsStartTimeDate!=new Date().getDate())){ builder.getServer().setMetric({}); expressMetricsStartTimeDate=new Date().getDate(); return {}; }else{ var trackedMetrics=builder.getServer().metrics.getReportSummaryInside() if(arguments.length<1){ if(trackedMetrics)return trackedMetrics; else return {}; } else { if(trackedMetrics[name]) return trackedMetrics[name]; else return {}; } } }; module.exports.getMetricObj = function getMetric(name) { if(clearEveryDay&&(expressMetricsStartTimeDate!=new Date().getDate())){ builder.getServer().setMetric({}); expressMetricsStartTimeDate=new Date().getDate(); return {}; }else { var trackedMetrics=builder.getServer().metrics.getReportSummary() if(arguments.length<1){ if(trackedMetrics)return trackedMetrics; else return {}; } else { if(trackedMetrics[name]) return trackedMetrics[name]; else return {}; } } }; module.exports.getMetric = function getMetric(name) { return builder.getServer().metrics.report.trackedMetrics; }; module.exports.setMetric = function setMetric(trackedMetrics) { return builder.getServer().setMetric(trackedMetrics); };
{ "content_hash": "ad32200e6f7c2138d2a2d3ef7f2e0118", "timestamp": "", "source": "github", "line_count": 148, "max_line_length": 93, "avg_line_length": 29.06756756756757, "alnum_prop": 0.6559739655973965, "repo_name": "OSMeteor/expressjs-metrics", "id": "509c0906be647b5db4bfce6059b31ef9135d806b", "size": "4302", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "index.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "13342" } ], "symlink_target": "" }
// // This file demonstrates how to initialize EGL in a Windows Store app, using ICoreWindow. // #include "pch.h" #include <WindowsNumerics.h> using namespace Windows::ApplicationModel::Core; using namespace Platform; using namespace HologramJS; void RunLocalSimple() { // Run a basic HoloJS app auto holoJsAppSource = ref new HoloJsAppSource(ref new String(L"scripts/app.json")); CoreApplication::Run(holoJsAppSource); } void RunLocalWithOptions() { // Run with some custom options for the app // 1. Create the app and set the options // If more customizations are required, copy the HoloJsAppView class in this project and make further changes as needed auto holoJsAppView = ref new HoloJsAppView(ref new String(L"scripts/app.json")); holoJsAppView->ImageStabilizationEnabled = false; holoJsAppView->LaunchMode = HoloJsLaunchMode::AsActivated; holoJsAppView->WorldOriginRelativePosition = Windows::Foundation::Numerics::float3(0, 0, -2); // 2. Create the source from the app and run it auto holoJsAppSource = ref new HoloJsAppSource(holoJsAppView); CoreApplication::Run(holoJsAppSource); } void RunWebWithOptions() { // Run with some custom options for the app // 1. Create the app and set the options // If more customizations are required, copy the HoloJsAppView class in this project and make further changes as needed auto holoJsAppView = ref new HoloJsAppView(ref new String(L"http://holojs.azurewebsites.net/samples/photosphere/photosphere.json")); holoJsAppView->ImageStabilizationEnabled = false; holoJsAppView->WorldOriginRelativePosition = Windows::Foundation::Numerics::float3(0, 0, -2); // 2. Create the source from the app and run it auto holoJsAppSource = ref new HoloJsAppSource(holoJsAppView); CoreApplication::Run(holoJsAppSource); } // The main function creates an IFrameworkViewSource for our app, and runs the app. [Platform::MTAThread] int main(Array<Platform::String^>^) { //RunLocalSimple(); //RunWebWithOptions(); RunLocalWithOptions(); return 0; }
{ "content_hash": "c9e79f4404fe76aa064842c8d0843483", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 136, "avg_line_length": 35.39655172413793, "alnum_prop": 0.7569410618606917, "repo_name": "sjando/HoloJS", "id": "2cada9486094cd370d6f4010a74b4a163ad83c69", "size": "2055", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "HoloJS/ThreeJSApp/App.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "1627" }, { "name": "C++", "bytes": "406743" }, { "name": "HTML", "bytes": "3107" }, { "name": "JavaScript", "bytes": "5569056" } ], "symlink_target": "" }
package 'kali-linux-rfid' do timeout node['kali']['apt_timeout'] action :upgrade if node['kali']['upgrade'] end
{ "content_hash": "77b6a25344e7597359171ddeb6593b0b", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 44, "avg_line_length": 29, "alnum_prop": 0.6896551724137931, "repo_name": "sliim-cookbooks/kali", "id": "365d12e71ffdbca7c048d3bcac82855db0bec3e1", "size": "695", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "recipes/rfid.rb", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Ruby", "bytes": "21098" }, { "name": "Shell", "bytes": "3054" } ], "symlink_target": "" }
package com.clem.ipoca1.core.util.flattr; /* SimpleFlattrThing is a trivial implementation of the FlattrThing interface */ public class SimpleFlattrThing implements FlattrThing { public SimpleFlattrThing(String title, String url, FlattrStatus status) { this.title = title; this.url = url; this.status = status; } public String getTitle() { return this.title; } public String getPaymentLink() { return this.url; } public FlattrStatus getFlattrStatus() { return this.status; } private String title; private String url; private FlattrStatus status; }
{ "content_hash": "801350cfdd19412f6de2684a2d70b8d9", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 80, "avg_line_length": 22.033333333333335, "alnum_prop": 0.653555219364599, "repo_name": "narakai/DemoApp2", "id": "b2a3cfc2b2af7d31a4510984e6924457690fc0e1", "size": "661", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core/src/main/java/com/clem/ipoca1/core/util/flattr/SimpleFlattrThing.java", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "4071" }, { "name": "Java", "bytes": "2255549" }, { "name": "Python", "bytes": "14394" }, { "name": "Shell", "bytes": "878" } ], "symlink_target": "" }
<?php namespace app\models\query; use yii\db\ActiveQuery; /** * This is the ActiveQuery class for [[\entity\UserProvider]] * * @see \entity\UserProvider */ class UserProviderQuery extends ActiveQuery { /** * Select by type of provider * * @param int $type * @param string $profileId * @return yii\db\ActiveQuery */ public function provider(int $type, string $profileId): ActiveQuery { $this->andWhere(['type' => $type, 'profile_id' => $profileId]); return $this; } }
{ "content_hash": "45f2f649613546ab0bbbd541eb049dfc", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 71, "avg_line_length": 20.615384615384617, "alnum_prop": 0.6175373134328358, "repo_name": "rkit/bootstrap-yii2", "id": "c2451761f097574f4d5f046985eace8a3447a850", "size": "536", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "models/query/UserProviderQuery.php", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1030" }, { "name": "CSS", "bytes": "6718" }, { "name": "JavaScript", "bytes": "4974" }, { "name": "PHP", "bytes": "219042" } ], "symlink_target": "" }
layout: post title: Sample post --- We will write some stuff here about our progress and what not.
{ "content_hash": "ab8561705fb853e2da599bf2e2932c41", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 62, "avg_line_length": 20, "alnum_prop": 0.75, "repo_name": "yumaikas/FGCU-SoftwareEngineeringProject.github.io", "id": "692b62a1105a1167e14a604017baeb8c71289cac", "size": "104", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_posts/2015-08-20-First-Post.md", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "6579" }, { "name": "HTML", "bytes": "22006" }, { "name": "JavaScript", "bytes": "1478" } ], "symlink_target": "" }
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd"> <html lang="en"> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> <title>Unsubscription from {{ newsletter.title }}</title> </head> <body> Dear {{ subscription.name }}, you, or someone in your name requested unsubscription from {{ newsletter.title }}. If you would like to confirm your unsubscription, please follow this activation link: http://{{ site.domain }}{{ subscription.unsubscribe_activate_url }} Kind regards, {{ newsletter.sender }} </body> </html>
{ "content_hash": "22be88254f3f5c3cb2849f61771fb220", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 85, "avg_line_length": 29.6, "alnum_prop": 0.6976351351351351, "repo_name": "w8s/alleghenypilgrims", "id": "8780eca9297283eaeea28935ac7845abb89fd6df", "size": "592", "binary": false, "copies": "12", "ref": "refs/heads/master", "path": "pilgrims/core/templates/newsletter/message/unsubscribe.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "447" }, { "name": "HTML", "bytes": "32630" }, { "name": "Python", "bytes": "23010" }, { "name": "Ruby", "bytes": "1659" }, { "name": "Shell", "bytes": "6371" } ], "symlink_target": "" }
require "helper" require "gapic/grpc/service_stub" require "google/cloud/retail/v2/product_service_pb" require "google/cloud/retail/v2/product_service_services_pb" require "google/cloud/retail/v2/product_service" class ::Google::Cloud::Retail::V2::ProductService::ClientTest < Minitest::Test class ClientStub attr_accessor :call_rpc_count, :requests def initialize response, operation, &block @response = response @operation = operation @block = block @call_rpc_count = 0 @requests = [] end def call_rpc *args, **kwargs @call_rpc_count += 1 @requests << @block&.call(*args, **kwargs) yield @response, @operation if block_given? @response end end def test_create_product # Create GRPC objects. grpc_response = ::Google::Cloud::Retail::V2::Product.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. parent = "hello world" product = {} product_id = "hello world" create_product_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :create_product, name assert_kind_of ::Google::Cloud::Retail::V2::CreateProductRequest, request assert_equal "hello world", request["parent"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Retail::V2::Product), request["product"] assert_equal "hello world", request["product_id"] refute_nil options end Gapic::ServiceStub.stub :new, create_product_client_stub do # Create client client = ::Google::Cloud::Retail::V2::ProductService::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.create_product({ parent: parent, product: product, product_id: product_id }) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use named arguments client.create_product parent: parent, product: product, product_id: product_id do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object client.create_product ::Google::Cloud::Retail::V2::CreateProductRequest.new(parent: parent, product: product, product_id: product_id) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use hash object with options client.create_product({ parent: parent, product: product, product_id: product_id }, grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object with options client.create_product(::Google::Cloud::Retail::V2::CreateProductRequest.new(parent: parent, product: product, product_id: product_id), grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, create_product_client_stub.call_rpc_count end end def test_get_product # Create GRPC objects. grpc_response = ::Google::Cloud::Retail::V2::Product.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. name = "hello world" get_product_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :get_product, name assert_kind_of ::Google::Cloud::Retail::V2::GetProductRequest, request assert_equal "hello world", request["name"] refute_nil options end Gapic::ServiceStub.stub :new, get_product_client_stub do # Create client client = ::Google::Cloud::Retail::V2::ProductService::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.get_product({ name: name }) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use named arguments client.get_product name: name do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object client.get_product ::Google::Cloud::Retail::V2::GetProductRequest.new(name: name) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use hash object with options client.get_product({ name: name }, grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object with options client.get_product(::Google::Cloud::Retail::V2::GetProductRequest.new(name: name), grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, get_product_client_stub.call_rpc_count end end def test_list_products # Create GRPC objects. grpc_response = ::Google::Cloud::Retail::V2::ListProductsResponse.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. parent = "hello world" page_size = 42 page_token = "hello world" filter = "hello world" read_mask = {} list_products_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :list_products, name assert_kind_of ::Google::Cloud::Retail::V2::ListProductsRequest, request assert_equal "hello world", request["parent"] assert_equal 42, request["page_size"] assert_equal "hello world", request["page_token"] assert_equal "hello world", request["filter"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["read_mask"] refute_nil options end Gapic::ServiceStub.stub :new, list_products_client_stub do # Create client client = ::Google::Cloud::Retail::V2::ProductService::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.list_products({ parent: parent, page_size: page_size, page_token: page_token, filter: filter, read_mask: read_mask }) do |response, operation| assert_kind_of Gapic::PagedEnumerable, response assert_equal grpc_response, response.response assert_equal grpc_operation, operation end # Use named arguments client.list_products parent: parent, page_size: page_size, page_token: page_token, filter: filter, read_mask: read_mask do |response, operation| assert_kind_of Gapic::PagedEnumerable, response assert_equal grpc_response, response.response assert_equal grpc_operation, operation end # Use protobuf object client.list_products ::Google::Cloud::Retail::V2::ListProductsRequest.new(parent: parent, page_size: page_size, page_token: page_token, filter: filter, read_mask: read_mask) do |response, operation| assert_kind_of Gapic::PagedEnumerable, response assert_equal grpc_response, response.response assert_equal grpc_operation, operation end # Use hash object with options client.list_products({ parent: parent, page_size: page_size, page_token: page_token, filter: filter, read_mask: read_mask }, grpc_options) do |response, operation| assert_kind_of Gapic::PagedEnumerable, response assert_equal grpc_response, response.response assert_equal grpc_operation, operation end # Use protobuf object with options client.list_products(::Google::Cloud::Retail::V2::ListProductsRequest.new(parent: parent, page_size: page_size, page_token: page_token, filter: filter, read_mask: read_mask), grpc_options) do |response, operation| assert_kind_of Gapic::PagedEnumerable, response assert_equal grpc_response, response.response assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, list_products_client_stub.call_rpc_count end end def test_update_product # Create GRPC objects. grpc_response = ::Google::Cloud::Retail::V2::Product.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. product = {} update_mask = {} allow_missing = true update_product_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :update_product, name assert_kind_of ::Google::Cloud::Retail::V2::UpdateProductRequest, request assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Retail::V2::Product), request["product"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["update_mask"] assert_equal true, request["allow_missing"] refute_nil options end Gapic::ServiceStub.stub :new, update_product_client_stub do # Create client client = ::Google::Cloud::Retail::V2::ProductService::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.update_product({ product: product, update_mask: update_mask, allow_missing: allow_missing }) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use named arguments client.update_product product: product, update_mask: update_mask, allow_missing: allow_missing do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object client.update_product ::Google::Cloud::Retail::V2::UpdateProductRequest.new(product: product, update_mask: update_mask, allow_missing: allow_missing) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use hash object with options client.update_product({ product: product, update_mask: update_mask, allow_missing: allow_missing }, grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object with options client.update_product(::Google::Cloud::Retail::V2::UpdateProductRequest.new(product: product, update_mask: update_mask, allow_missing: allow_missing), grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, update_product_client_stub.call_rpc_count end end def test_delete_product # Create GRPC objects. grpc_response = ::Google::Protobuf::Empty.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. name = "hello world" delete_product_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :delete_product, name assert_kind_of ::Google::Cloud::Retail::V2::DeleteProductRequest, request assert_equal "hello world", request["name"] refute_nil options end Gapic::ServiceStub.stub :new, delete_product_client_stub do # Create client client = ::Google::Cloud::Retail::V2::ProductService::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.delete_product({ name: name }) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use named arguments client.delete_product name: name do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object client.delete_product ::Google::Cloud::Retail::V2::DeleteProductRequest.new(name: name) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use hash object with options client.delete_product({ name: name }, grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Use protobuf object with options client.delete_product(::Google::Cloud::Retail::V2::DeleteProductRequest.new(name: name), grpc_options) do |response, operation| assert_equal grpc_response, response assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, delete_product_client_stub.call_rpc_count end end def test_import_products # Create GRPC objects. grpc_response = ::Google::Longrunning::Operation.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. parent = "hello world" request_id = "hello world" input_config = {} errors_config = {} update_mask = {} reconciliation_mode = :RECONCILIATION_MODE_UNSPECIFIED notification_pubsub_topic = "hello world" import_products_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :import_products, name assert_kind_of ::Google::Cloud::Retail::V2::ImportProductsRequest, request assert_equal "hello world", request["parent"] assert_equal "hello world", request["request_id"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Retail::V2::ProductInputConfig), request["input_config"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Retail::V2::ImportErrorsConfig), request["errors_config"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["update_mask"] assert_equal :RECONCILIATION_MODE_UNSPECIFIED, request["reconciliation_mode"] assert_equal "hello world", request["notification_pubsub_topic"] refute_nil options end Gapic::ServiceStub.stub :new, import_products_client_stub do # Create client client = ::Google::Cloud::Retail::V2::ProductService::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.import_products({ parent: parent, request_id: request_id, input_config: input_config, errors_config: errors_config, update_mask: update_mask, reconciliation_mode: reconciliation_mode, notification_pubsub_topic: notification_pubsub_topic }) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use named arguments client.import_products parent: parent, request_id: request_id, input_config: input_config, errors_config: errors_config, update_mask: update_mask, reconciliation_mode: reconciliation_mode, notification_pubsub_topic: notification_pubsub_topic do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use protobuf object client.import_products ::Google::Cloud::Retail::V2::ImportProductsRequest.new(parent: parent, request_id: request_id, input_config: input_config, errors_config: errors_config, update_mask: update_mask, reconciliation_mode: reconciliation_mode, notification_pubsub_topic: notification_pubsub_topic) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use hash object with options client.import_products({ parent: parent, request_id: request_id, input_config: input_config, errors_config: errors_config, update_mask: update_mask, reconciliation_mode: reconciliation_mode, notification_pubsub_topic: notification_pubsub_topic }, grpc_options) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use protobuf object with options client.import_products(::Google::Cloud::Retail::V2::ImportProductsRequest.new(parent: parent, request_id: request_id, input_config: input_config, errors_config: errors_config, update_mask: update_mask, reconciliation_mode: reconciliation_mode, notification_pubsub_topic: notification_pubsub_topic), grpc_options) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, import_products_client_stub.call_rpc_count end end def test_set_inventory # Create GRPC objects. grpc_response = ::Google::Longrunning::Operation.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. inventory = {} set_mask = {} set_time = {} allow_missing = true set_inventory_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :set_inventory, name assert_kind_of ::Google::Cloud::Retail::V2::SetInventoryRequest, request assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Retail::V2::Product), request["inventory"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["set_mask"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["set_time"] assert_equal true, request["allow_missing"] refute_nil options end Gapic::ServiceStub.stub :new, set_inventory_client_stub do # Create client client = ::Google::Cloud::Retail::V2::ProductService::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.set_inventory({ inventory: inventory, set_mask: set_mask, set_time: set_time, allow_missing: allow_missing }) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use named arguments client.set_inventory inventory: inventory, set_mask: set_mask, set_time: set_time, allow_missing: allow_missing do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use protobuf object client.set_inventory ::Google::Cloud::Retail::V2::SetInventoryRequest.new(inventory: inventory, set_mask: set_mask, set_time: set_time, allow_missing: allow_missing) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use hash object with options client.set_inventory({ inventory: inventory, set_mask: set_mask, set_time: set_time, allow_missing: allow_missing }, grpc_options) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use protobuf object with options client.set_inventory(::Google::Cloud::Retail::V2::SetInventoryRequest.new(inventory: inventory, set_mask: set_mask, set_time: set_time, allow_missing: allow_missing), grpc_options) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, set_inventory_client_stub.call_rpc_count end end def test_add_fulfillment_places # Create GRPC objects. grpc_response = ::Google::Longrunning::Operation.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. product = "hello world" type = "hello world" place_ids = ["hello world"] add_time = {} allow_missing = true add_fulfillment_places_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :add_fulfillment_places, name assert_kind_of ::Google::Cloud::Retail::V2::AddFulfillmentPlacesRequest, request assert_equal "hello world", request["product"] assert_equal "hello world", request["type"] assert_equal ["hello world"], request["place_ids"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["add_time"] assert_equal true, request["allow_missing"] refute_nil options end Gapic::ServiceStub.stub :new, add_fulfillment_places_client_stub do # Create client client = ::Google::Cloud::Retail::V2::ProductService::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.add_fulfillment_places({ product: product, type: type, place_ids: place_ids, add_time: add_time, allow_missing: allow_missing }) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use named arguments client.add_fulfillment_places product: product, type: type, place_ids: place_ids, add_time: add_time, allow_missing: allow_missing do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use protobuf object client.add_fulfillment_places ::Google::Cloud::Retail::V2::AddFulfillmentPlacesRequest.new(product: product, type: type, place_ids: place_ids, add_time: add_time, allow_missing: allow_missing) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use hash object with options client.add_fulfillment_places({ product: product, type: type, place_ids: place_ids, add_time: add_time, allow_missing: allow_missing }, grpc_options) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use protobuf object with options client.add_fulfillment_places(::Google::Cloud::Retail::V2::AddFulfillmentPlacesRequest.new(product: product, type: type, place_ids: place_ids, add_time: add_time, allow_missing: allow_missing), grpc_options) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, add_fulfillment_places_client_stub.call_rpc_count end end def test_remove_fulfillment_places # Create GRPC objects. grpc_response = ::Google::Longrunning::Operation.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. product = "hello world" type = "hello world" place_ids = ["hello world"] remove_time = {} allow_missing = true remove_fulfillment_places_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :remove_fulfillment_places, name assert_kind_of ::Google::Cloud::Retail::V2::RemoveFulfillmentPlacesRequest, request assert_equal "hello world", request["product"] assert_equal "hello world", request["type"] assert_equal ["hello world"], request["place_ids"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["remove_time"] assert_equal true, request["allow_missing"] refute_nil options end Gapic::ServiceStub.stub :new, remove_fulfillment_places_client_stub do # Create client client = ::Google::Cloud::Retail::V2::ProductService::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.remove_fulfillment_places({ product: product, type: type, place_ids: place_ids, remove_time: remove_time, allow_missing: allow_missing }) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use named arguments client.remove_fulfillment_places product: product, type: type, place_ids: place_ids, remove_time: remove_time, allow_missing: allow_missing do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use protobuf object client.remove_fulfillment_places ::Google::Cloud::Retail::V2::RemoveFulfillmentPlacesRequest.new(product: product, type: type, place_ids: place_ids, remove_time: remove_time, allow_missing: allow_missing) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use hash object with options client.remove_fulfillment_places({ product: product, type: type, place_ids: place_ids, remove_time: remove_time, allow_missing: allow_missing }, grpc_options) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use protobuf object with options client.remove_fulfillment_places(::Google::Cloud::Retail::V2::RemoveFulfillmentPlacesRequest.new(product: product, type: type, place_ids: place_ids, remove_time: remove_time, allow_missing: allow_missing), grpc_options) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, remove_fulfillment_places_client_stub.call_rpc_count end end def test_add_local_inventories # Create GRPC objects. grpc_response = ::Google::Longrunning::Operation.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. product = "hello world" local_inventories = [{}] add_mask = {} add_time = {} allow_missing = true add_local_inventories_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :add_local_inventories, name assert_kind_of ::Google::Cloud::Retail::V2::AddLocalInventoriesRequest, request assert_equal "hello world", request["product"] assert_kind_of ::Google::Cloud::Retail::V2::LocalInventory, request["local_inventories"].first assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["add_mask"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["add_time"] assert_equal true, request["allow_missing"] refute_nil options end Gapic::ServiceStub.stub :new, add_local_inventories_client_stub do # Create client client = ::Google::Cloud::Retail::V2::ProductService::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.add_local_inventories({ product: product, local_inventories: local_inventories, add_mask: add_mask, add_time: add_time, allow_missing: allow_missing }) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use named arguments client.add_local_inventories product: product, local_inventories: local_inventories, add_mask: add_mask, add_time: add_time, allow_missing: allow_missing do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use protobuf object client.add_local_inventories ::Google::Cloud::Retail::V2::AddLocalInventoriesRequest.new(product: product, local_inventories: local_inventories, add_mask: add_mask, add_time: add_time, allow_missing: allow_missing) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use hash object with options client.add_local_inventories({ product: product, local_inventories: local_inventories, add_mask: add_mask, add_time: add_time, allow_missing: allow_missing }, grpc_options) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use protobuf object with options client.add_local_inventories(::Google::Cloud::Retail::V2::AddLocalInventoriesRequest.new(product: product, local_inventories: local_inventories, add_mask: add_mask, add_time: add_time, allow_missing: allow_missing), grpc_options) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, add_local_inventories_client_stub.call_rpc_count end end def test_remove_local_inventories # Create GRPC objects. grpc_response = ::Google::Longrunning::Operation.new grpc_operation = GRPC::ActiveCall::Operation.new nil grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure grpc_options = {} # Create request parameters for a unary method. product = "hello world" place_ids = ["hello world"] remove_time = {} allow_missing = true remove_local_inventories_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:| assert_equal :remove_local_inventories, name assert_kind_of ::Google::Cloud::Retail::V2::RemoveLocalInventoriesRequest, request assert_equal "hello world", request["product"] assert_equal ["hello world"], request["place_ids"] assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::Timestamp), request["remove_time"] assert_equal true, request["allow_missing"] refute_nil options end Gapic::ServiceStub.stub :new, remove_local_inventories_client_stub do # Create client client = ::Google::Cloud::Retail::V2::ProductService::Client.new do |config| config.credentials = grpc_channel end # Use hash object client.remove_local_inventories({ product: product, place_ids: place_ids, remove_time: remove_time, allow_missing: allow_missing }) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use named arguments client.remove_local_inventories product: product, place_ids: place_ids, remove_time: remove_time, allow_missing: allow_missing do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use protobuf object client.remove_local_inventories ::Google::Cloud::Retail::V2::RemoveLocalInventoriesRequest.new(product: product, place_ids: place_ids, remove_time: remove_time, allow_missing: allow_missing) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use hash object with options client.remove_local_inventories({ product: product, place_ids: place_ids, remove_time: remove_time, allow_missing: allow_missing }, grpc_options) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Use protobuf object with options client.remove_local_inventories(::Google::Cloud::Retail::V2::RemoveLocalInventoriesRequest.new(product: product, place_ids: place_ids, remove_time: remove_time, allow_missing: allow_missing), grpc_options) do |response, operation| assert_kind_of Gapic::Operation, response assert_equal grpc_response, response.grpc_op assert_equal grpc_operation, operation end # Verify method calls assert_equal 5, remove_local_inventories_client_stub.call_rpc_count end end def test_configure grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure client = block_config = config = nil Gapic::ServiceStub.stub :new, nil do client = ::Google::Cloud::Retail::V2::ProductService::Client.new do |config| config.credentials = grpc_channel end end config = client.configure do |c| block_config = c end assert_same block_config, config assert_kind_of ::Google::Cloud::Retail::V2::ProductService::Client::Configuration, config end def test_operations_client grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure client = nil Gapic::ServiceStub.stub :new, nil do client = ::Google::Cloud::Retail::V2::ProductService::Client.new do |config| config.credentials = grpc_channel end end assert_kind_of ::Google::Cloud::Retail::V2::ProductService::Operations, client.operations_client end end
{ "content_hash": "c6a2d4e8bc25da321ddfe62a0b55504c", "timestamp": "", "source": "github", "line_count": 799, "max_line_length": 343, "avg_line_length": 44.51063829787234, "alnum_prop": 0.6926105050050613, "repo_name": "googleapis/google-cloud-ruby", "id": "69697c5c060f456b512f2d3d6d6dbc31196f248c", "size": "36227", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "google-cloud-retail-v2/test/google/cloud/retail/v2/product_service_test.rb", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "23930" }, { "name": "CSS", "bytes": "1422" }, { "name": "DIGITAL Command Language", "bytes": "2216" }, { "name": "Go", "bytes": "1321" }, { "name": "HTML", "bytes": "66414" }, { "name": "JavaScript", "bytes": "1862" }, { "name": "Ruby", "bytes": "103945852" }, { "name": "Shell", "bytes": "19653" } ], "symlink_target": "" }
import abc from supriya.tools.systemtools.SupriyaObject import SupriyaObject class BindingSource(SupriyaObject): ### CLASS VARIABLES ### __slots__ = () ### INITIALIZER ### @abc.abstractmethod def __init__(self, output_range=None): from supriya.tools import synthdeftools self._binding_targets = set() if output_range is not None: output_range = synthdeftools.Range(output_range) else: output_range = synthdeftools.Range(0, 1) self._output_range = output_range ### PRIVATE METHODS ### def _send_bound_event(self, event=None): for binding in self._binding_targets: binding(event) ### PUBLIC METHODS ### def unbind(self, binding=None): if binding is None: for binding in self._binding_targets: binding.unbind() elif self is binding.source: binding.unbind() ### PUBLIC PROPERTIES ### @property def is_bound(self): return bool(self._binding_targets)
{ "content_hash": "fd4b689204d87ad311e31b6ad0512283", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 65, "avg_line_length": 25.047619047619047, "alnum_prop": 0.6026615969581749, "repo_name": "andrewyoung1991/supriya", "id": "d576a8374b3a1dc57a3a9c939da1bffbcdbc6cdb", "size": "1078", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "supriya/tools/bindingtools/BindingSource.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "6712" }, { "name": "CSS", "bytes": "446" }, { "name": "HTML", "bytes": "1083" }, { "name": "JavaScript", "bytes": "6163" }, { "name": "Makefile", "bytes": "6775" }, { "name": "Python", "bytes": "2693776" } ], "symlink_target": "" }
""" Surrogate model classes for Bayesian strategy. These are separate from the strategy classes which just operate the models. """ from __future__ import print_function, absolute_import, division import numpy as np try: from GPy import kern from GPy.kern import RBF, Fixed, Bias from GPy.util.linalg import tdot from GPy.models import GPRegression from scipy.optimize import minimize from scipy.stats import norm # If the GPy modules fail we won't do this unnecessarily. from .entry_point import load_entry_point KERNEL_BASE_CLASS = kern.src.kern.Kern except ImportError: # GPy is optional, but required for gp GPRegression = kern = minimize = None pass # TODO Make all of these sklearn estimators class MaximumLikelihoodGaussianProcess(object): """ Gaussian Process model which has its own hyperparameters chosen by a maximum likelihood process """ # Can't have instantiation of model without supplying data def __init__(self, X, Y, kernel, max_feval): if not GPRegression: raise ImportError('No module named GPy') self.X = X self.Y = Y self.kernel = kernel self.model = GPRegression(X=self.X, Y=self.Y, kernel=self.kernel) self.max_feval = max_feval # TODO make this a variable. self.num_restarts = 20 def fit(self): """ Fits the model with random restarts. :return: """ self.model.optimize_restarts(num_restarts=self.num_restarts, verbose=False) def predict(self, x): return self.model.predict(Xnew=x) class GaussianProcessKernel(object): def __init__(self, kernel_params, n_dims): """ Kernels for the Gaussian Process surrogates :param kernel_params: the param list from yaml. """ self.kernel_params = kernel_params self.kernel = None # The final kernel self.n_dims = n_dims self._create_kernel() def _create_kernel(self): """ creates an additive kernel """ # Check kernels kernels = self.kernel_params if not isinstance(kernels, list): raise RuntimeError('Must provide enumeration of kernels') for kernel in kernels: if sorted(list(kernel.keys())) != ['name', 'options', 'params']: raise RuntimeError( 'strategy/params/kernels must contain keys: "name", "options", "params"') # Turn into entry points. # TODO use eval to allow user to specify internal variables for kernels (e.g. V) in config file. kernels = [] for kern in self.kernel_params: params = kern['params'] options = kern['options'] name = kern['name'] kernel_ep = load_entry_point(name, 'strategy/params/kernels') if issubclass(kernel_ep, KERNEL_BASE_CLASS): if options['independent']: # TODO Catch errors here? Estimator entry points don't catch instantiation errors kernel = np.sum([kernel_ep(1, active_dims=[i], **params) for i in range(self.n_dims)]) else: kernel = kernel_ep(self.n_dims, **params) if not isinstance(kernel, KERNEL_BASE_CLASS): raise RuntimeError('strategy/params/kernel must load a' 'GPy derived Kernel') kernels.append(kernel) self.kernel = np.sum(kernels)
{ "content_hash": "7a6524e1dbdfddf1e2f99b126df92b39", "timestamp": "", "source": "github", "line_count": 102, "max_line_length": 115, "avg_line_length": 34.529411764705884, "alnum_prop": 0.6115843270868825, "repo_name": "msmbuilder/osprey", "id": "8590fd9bdbe386e0be74f196569e35f283c4d609", "size": "3522", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "osprey/surrogate_models.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "51" }, { "name": "Python", "bytes": "173313" }, { "name": "Shell", "bytes": "4748" }, { "name": "TeX", "bytes": "2620" } ], "symlink_target": "" }
<?php namespace LEM_projet2\FrontBundle; use Symfony\Component\HttpKernel\Bundle\Bundle; class FrontBundle extends Bundle { }
{ "content_hash": "95ba9b896143aa5c409f3fb881f2793a", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 47, "avg_line_length": 14.333333333333334, "alnum_prop": 0.7984496124031008, "repo_name": "sandrahurteaux/Projet-PHP-Symfony", "id": "fbaa669bda817be1d3ba9b4185578d7d92e58bbc", "size": "129", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/LEM_projet2/FrontBundle/FrontBundle.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "43578" }, { "name": "JavaScript", "bytes": "1861" }, { "name": "PHP", "bytes": "193503" }, { "name": "Shell", "bytes": "182" } ], "symlink_target": "" }
from __future__ import unicode_literals import time import uuid import django.db.models.deletion import morango.models.fields.uuids from django.conf import settings from django.db import migrations from django.db import models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ("device", "0011_devicesettings_subset_of_users_device"), ] operations = [ migrations.CreateModel( name="SyncQueue", fields=[ ( "id", morango.models.fields.uuids.UUIDField( default=uuid.uuid4, primary_key=True, serialize=False ), ), ("datetime", models.DateTimeField(auto_now_add=True)), ("updated", models.FloatField(default=time.time)), ("keep_alive", models.FloatField(default=5.0)), ( "user", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, ), ), ], ), ]
{ "content_hash": "bcda826bd17c92dee6c719efc70c300e", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 77, "avg_line_length": 29.523809523809526, "alnum_prop": 0.5233870967741936, "repo_name": "indirectlylit/kolibri", "id": "2b5b580ef9dfef550d23761176eb0350bc658a74", "size": "1314", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "kolibri/core/device/migrations/0012_syncqueue.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2554964" }, { "name": "Dockerfile", "bytes": "4114" }, { "name": "Gherkin", "bytes": "365088" }, { "name": "HTML", "bytes": "24294" }, { "name": "JavaScript", "bytes": "1613945" }, { "name": "Makefile", "bytes": "11953" }, { "name": "Python", "bytes": "2860587" }, { "name": "SCSS", "bytes": "5225" }, { "name": "Shell", "bytes": "5245" }, { "name": "Vue", "bytes": "1604613" } ], "symlink_target": "" }
'use strict'; angular.module('mpk').factory('cloudService', function($http, $log, $q, $timeout, cryptoService) { return { cloudAddress: 'http://localhost:8080', settings: {notLoaded: true, encryptionKey: 'my-random-key'}, loadSettings: function() { var settings = localStorage.getItem('myPersonalKanban.cloudSettings'); if (settings == undefined){ this.settings = {notSetup: true, encryptionKey: 'my-random-key'}; return this.settings; } this.settings = angular.fromJson(settings); this.settings.notSetup = false; if (this.settings.encryptionKey == undefined){ this.settings.encryptionKey = 'my-random-key'; } if (this.settings.useLocalCloud){ this.cloudAddress = this.settings.localCloudUrl; } return this.settings; }, saveSettings: function(settings){ this.settings = settings; localStorage.setItem('myPersonalKanban.cloudSettings', angular.toJson(this.settings, false)); this.loadSettings(); return this.settings; }, downloadKanban: function(){ if (this.settings.notLoaded) { this.loadSettings(); } var params = {kanbanKey: this.settings.kanbanKey, action: 'get'}; return $http.jsonp(this.cloudAddress + '/service/kanban?callback=JSON_CALLBACK', {params: params}); }, uploadKanban: function(kanban){ if (this.settings.notLoaded) { this.loadSettings(); } var self = this; function splitSlice(str, len) { var ret = [ ]; for (var offset = 0, strLen = str.length; offset < strLen; offset += len) { ret.push(str.slice(offset, len + offset)); } return ret; }; function sendStart(numberOfFragments) { var params = {kanbanKey: self.settings.kanbanKey, action: 'put', fragments: numberOfFragments}; return $http.jsonp(self.cloudAddress + '/service/kanban?callback=JSON_CALLBACK', {params: params}); }; function sendChunk(chunk, chunkNumber){ var params = {kanbanKey: self.settings.kanbanKey, action: 'put', chunk: chunk, chunkNumber:chunkNumber}; return $http.jsonp(self.cloudAddress + '/service/kanban?callback=JSON_CALLBACK', {params: params}); }; function checkKanbanValidity(kanban){ var hash = cryptoService.md5Hash(kanban); var params = {kanbanKey: self.settings.kanbanKey, action: 'put', hash: hash}; return $http.jsonp(self.cloudAddress + '/service/kanban?callback=JSON_CALLBACK', {params: params}); }; var encryptetKanban = cryptoService.encrypt(kanban, this.settings.encryptionKey); var kanbanInChunks = splitSlice(encryptetKanban, 1000); var promise = sendStart(kanbanInChunks.length); angular.forEach(kanbanInChunks, function(value, index){ promise = promise.then(function(){ return sendChunk(value, index + 1); }); }); return promise.then(function(){ return checkKanbanValidity(encryptetKanban); }); }, isConfigurationValid: function(){ if (this.settings.notLoaded) { this.loadSettings(); } return this.settings.kanbanKey != undefined && this.settings.kanbanKey != ''; } }; });
{ "content_hash": "9c14084cba85e9bb3ac74b769ad6c576", "timestamp": "", "source": "github", "line_count": 96, "max_line_length": 108, "avg_line_length": 31.71875, "alnum_prop": 0.6873563218390805, "repo_name": "mabotech/maboss-admin", "id": "14963165e14de82b62f7f3a1cec7c4012f5d73e3", "size": "3045", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "public/kanban/scripts/services/cloudService.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "371443" }, { "name": "JavaScript", "bytes": "389691" }, { "name": "Python", "bytes": "12105" } ], "symlink_target": "" }
#ifndef TH_GENERIC_FILE #define TH_GENERIC_FILE "generic/IndexLinear.c" #else #ifdef _OPENMP #include <omp.h> #endif /* Threshold used to trigger multithreading */ #ifndef THNN_SPARSE_OMP_THRESHOLD #define THNN_SPARSE_OMP_THRESHOLD 100000 #endif /* Threshold used to trigger BLAS axpy call */ #ifndef THNN_SPARSE_OUTDIM_THRESHOLD #define THNN_SPARSE_OUTDIM_THRESHOLD 49 #endif /* sign MACRO */ #ifndef THNN_INDEXLINEAR_SIGN #define THNN_INDEXLINEAR_SIGN(a) ( ( (a) < 0 ) ? -1 : ( (a) > 0 ) ) #endif static bool THNN_(checkKeysValues)(THLongTensor* keys, THTensor* values) { return THLongTensor_size(keys, 0) == THTensor_(nElement)(values) && THTensor_(nDimension)(values) == 1 && THLongTensor_nDimension(keys) == 1; } void THNN_(IndexLinear_updateOutput)( THNNState *state, THLongTensor *keys, long keysOffset, THTensor *values, THLongTensor *sizes, THLongTensor *cumSumSizes, THTensor *output, THTensor *weight, THTensor *bias, THTensor *normalizedValues, int train) { /* Retrieve all the dimensions of the problem */ long batchSize = THLongTensor_size(sizes, 0); long keysSize = THLongTensor_size(keys, 0); long outDim = THTensor_(size)(bias, 0); long woutDim = THTensor_(size)(weight, 1); int maxNormalize = woutDim - outDim; long* sizesData = THLongTensor_data(sizes); long* cumSumSizesData = THLongTensor_data(cumSumSizes); /* Define/resize the normalized values tensor if maxNormalize is > 0 */ real* normalizedValuesData = NULL; if (maxNormalize) { THTensor_(resize1d)(normalizedValues, keysSize); normalizedValuesData = THTensor_(data)(normalizedValues); } /* Resize the output */ THTensor_(resize2d)(output, batchSize, outDim); /* Access the storage data/strides */ real* outputData = THTensor_(data)(output); real* valuesData = THTensor_(data)(values); real* weightData = THTensor_(data)(weight); long weightStride0 = weight->stride[0]; real* biasData = THTensor_(data)(bias); long* keysData = THLongTensor_data(keys); /* Make sure these inputs are contiguous to accelerate computations */ THArgCheck(THLongTensor_isContiguous(keys), 1, "keys vector must be contiguous"); THArgCheck(THTensor_(isContiguous)(values), 3, "values vector must be contiguous"); THArgCheck(THTensor_(isContiguous)(output), 6, "output vector must be contiguous"); THArgCheck(THTensor_(isContiguous)(weight), 7, "weight matrix must be contiguous"); THArgCheck(THTensor_(isContiguous)(bias), 8, "bias vector must be contiguous"); THArgCheck(THNN_(checkKeysValues)(keys, values), 1, "Keys and values should have the same number of elements"); THArgCheck(THTensor_(isContiguous)(normalizedValues), 9, "normalizedValues vector must be contiguous"); long i,j,k; /* Separate cases: output dimension is == 1, or > 1 * This allows for some optimizations. */ if (outDim == 1) { THVector_(fill)(outputData, *biasData, batchSize); if (maxNormalize) { /* Parallelize on the batch itself */ #pragma omp parallel \ for private(i,j) \ firstprivate(outDim, keysOffset, \ weightData, keysData, \ valuesData, outputData, \ cumSumSizesData, sizesData) \ schedule(static) \ if(keysSize*outDim > THNN_SPARSE_OMP_THRESHOLD && batchSize > 1) for (j = 0; j < batchSize; j++) { real* loutputData = outputData + j; real val = 0; real absVal = 0; long offset = j == 0 ? 0 : cumSumSizesData[j - 1]; for (i = 0; i < sizesData[j]; i++) { long woffset = weightStride0*(keysData[offset] + keysOffset); absVal = fabs(valuesData[offset]); if (train) { if (absVal > weightData[woffset]) { weightData[woffset] = absVal; weightData[woffset+1] = 1/absVal; } /* * The following can be used to scale the size of the updates * depending on some rule, e.g. the frequency of a feature, ... * This is used at update time. * TODO: implement a smarter update scale. */ weightData[woffset+2] = 1; } normalizedValuesData[offset] = (absVal > weightData[woffset] ? THNN_INDEXLINEAR_SIGN(valuesData[offset]):valuesData[offset]*weightData[woffset+1]) + weightData[woffset+3]; val += normalizedValuesData[offset] * weightData[woffset+maxNormalize]; offset++; } *loutputData += val; } } else { /* Parallelize on the batch itself */ #pragma omp parallel \ for private(i,j) \ firstprivate(outDim, weightData, \ keysData, valuesData, \ outputData, cumSumSizesData, \ sizesData) \ schedule(static) \ if(keysSize*outDim > THNN_SPARSE_OMP_THRESHOLD && batchSize > 1) for (j = 0; j < batchSize; j++) { long offset = j == 0 ? 0 : cumSumSizesData[j - 1]; real* loutputData = outputData + j; real val = 0; for (i = 0; i < sizesData[j]; i++) { val += weightData[weightStride0*(keysData[offset] + keysOffset)] * valuesData[offset]; offset++; } *loutputData += val; } } } else { #pragma omp parallel \ for private(i,j,k) \ firstprivate(outDim, weightData, \ keysData, valuesData, \ biasData, outputData, \ cumSumSizesData, sizesData) \ schedule(static) \ if(keysSize*outDim > THNN_SPARSE_OMP_THRESHOLD && batchSize > 1) for (j = 0; j < batchSize; j++) { long offset = j == 0 ? 0 : cumSumSizesData[j - 1]; real val = 0; real* loutputData = outputData + j*outDim; real* lweightData = weightData; memcpy(loutputData, biasData, outDim*sizeof(real)); for (i = 0; i < sizesData[j]; i++) { real val; long woffset = weightStride0*(keysData[offset] + keysOffset); if (maxNormalize) { val = valuesData[offset]; real absVal = fabs(val); if (train) { if (absVal > weightData[woffset]) { weightData[woffset] = absVal; weightData[woffset+1] = 1/absVal; } /* * The following can be used to scale the size of the updates * depending on some rule, e.g. the frequency of a feature, ... * The commented section thereafter is just an example of what can be done: * *``` * weightData[woffset+2] = weightData[woffset+2]==0?1:(weightData[woffset+2] / (weightData[woffset+2] + 1)); * real alpha = 1; * real beta = 0.01; * real gamma = 1 - 0.000001; * real l = weightData[woffset+2]==0?1/gamma:(weightData[woffset+2] - beta) / (alpha - beta); * l = gamma*l; * weightData[woffset+2] = (alpha-beta)*l + beta; * ``` * * TODO: implement a smarter update scale. */ weightData[woffset+2] = 1; } /* Normalize + Clamp */ val = (absVal > weightData[woffset] ? THNN_INDEXLINEAR_SIGN(val):val*weightData[woffset+1]) + weightData[woffset+3]; normalizedValuesData[offset] = val; lweightData = weightData + woffset + maxNormalize; } else { val = valuesData[offset]; lweightData = weightData + woffset; } if (outDim > THNN_SPARSE_OUTDIM_THRESHOLD) { THBlas_(axpy)(outDim, val, lweightData, 1, loutputData, 1); } else { for (k=0; k < outDim; k++) { loutputData[k] += lweightData[k] * val; } } offset++; } } } return; } void THNN_(IndexLinear_updateParameters)( THNNState *state, THTensor *gradWeight, THTensor *gradBias, THTensor *weight, THTensor *bias, THLongTensor *runningKeys, THLongTensor *cumSumSizes, long keysOffset, accreal weightDecay_, accreal learningRate_) { real weightDecay = TH_CONVERT_ACCREAL_TO_REAL(weightDecay_); real learningRate = TH_CONVERT_ACCREAL_TO_REAL(learningRate_); /* Retrieve all the dimensions of the problem */ long outDim = THTensor_(size)(bias, 0); long woutDim = THTensor_(size)(weight, 1); int maxNormalize = woutDim - outDim; long keysSize = THLongTensor_size(runningKeys, 0); /* Access the storage data/strides */ real* gradWeightData = THTensor_(data)(gradWeight); real* weightData = THTensor_(data)(weight); long weightStride0 = weight->stride[0]; real* gradBiasData = THTensor_(data)(gradBias); real* biasData = THTensor_(data)(bias); long* keysData = THLongTensor_data(runningKeys); /* Make sure these inputs are contiguous to accelerate computations */ THArgCheck(THTensor_(isContiguous)(gradWeight), 1, "gradWeight must be contiguous"); THArgCheck(THTensor_(isContiguous)(gradBias), 2, "gradBias vector must be contiguous"); THArgCheck(THTensor_(isContiguous)(weight), 3, "gradBias vector must be contiguous"); THArgCheck(THTensor_(isContiguous)(bias), 4, "gradBias vector must be contiguous"); THArgCheck(THLongTensor_isContiguous(runningKeys), 5, "keys vector must be contiguous"); int j,k; long offset = 0; /* Update the bias first */ THVector_(cadd)(biasData, biasData, gradBiasData, -learningRate, outDim); /* Separate cases: output dimension is == 1, or > 1 * This allows for some optimizations. * No multithreading here as this could * corrupt the results (hogwild style) */ if (outDim == 1) { if (maxNormalize) { if (weightDecay) { for (j = 0; j < keysSize; j++) { long woffset = weightStride0*(keysData[j] + keysOffset) + maxNormalize; real lr = learningRate*weightData[woffset-2]; weightData[woffset-1] -= weightData[woffset]*gradWeightData[2*j]*lr; weightData[woffset] -= gradWeightData[2*j+1]*lr - weightDecay * weightData[woffset-2] * weightData[woffset]; } } else { for (j = 0; j < keysSize; j++) { long woffset = weightStride0*(keysData[j] + keysOffset) + maxNormalize; real lr = learningRate*weightData[woffset-2]; weightData[woffset-1] -= weightData[woffset]*gradWeightData[2*j]*lr; weightData[woffset] -= gradWeightData[2*j+1]*lr; } } } else { if (weightDecay) { for (j = 0; j < keysSize; j++) { long woffset = weightStride0*(keysData[j] + keysOffset); weightData[woffset] -= gradWeightData[j]*learningRate + weightDecay * weightData[woffset]; } } else { for (j = 0; j < keysSize; j++) { weightData[weightStride0*(keysData[j] + keysOffset)] -= gradWeightData[j]*learningRate; } } } } else { for (j = 0; j < keysSize; j++) { real lr = learningRate; real wd = weightDecay; real* lweightData; long woffset = weightStride0*(keysData[j] + keysOffset); real* lgradWeightData = gradWeightData + j*outDim; if (maxNormalize) { lgradWeightData += j*outDim; /* weightData[woffset + 2] */ lweightData = weightData + woffset + maxNormalize - 2; lr = lr*lweightData[0]; wd = weightDecay*lweightData[0]; /* weightData[woffset + 3] */ lweightData++; for (k=0; k < outDim; k++) { lweightData[0] -= lgradWeightData[k]*lweightData[k+1]*lr; } lweightData++; lgradWeightData += outDim; } else { lweightData = weightData + woffset; } /* We do sparse weight decay. * We think it makes more sense. */ if (weightDecay) { for (k=0; k < outDim; k++) { lweightData[k] -= lweightData[k]*wd; } } if (outDim > THNN_SPARSE_OUTDIM_THRESHOLD) { THBlas_(axpy)(outDim, -lr, lgradWeightData, 1, lweightData, 1); } else { for (k=0; k < outDim; k++) { lweightData[k] -= lgradWeightData[k]*lr; } } } } } void THNN_(IndexLinear_accUpdateGradParameters)( THNNState *state, THLongTensor *keys, long keysOffset, THTensor *values, THLongTensor *sizes, THLongTensor *cumSumSizes, THTensor *gradOutput, THTensor *weight, THTensor *bias, accreal weightDecay_, accreal scale_) { real weightDecay = TH_CONVERT_ACCREAL_TO_REAL(weightDecay_); real scale = TH_CONVERT_ACCREAL_TO_REAL(scale_); /* Retrieve all the dimensions of the problem */ long batchSize = THLongTensor_size(sizes, 0); long keysSize = THLongTensor_size(keys, 0); long outDim = THTensor_(size)(bias, 0); long woutDim = THTensor_(size)(weight, 1); int maxNormalize = woutDim - outDim; THArgCheck(THNN_(checkKeysValues)(keys, values), 1, "Keys and values should have the same number of elements"); /* Access the storage data/strides */ real* gradOutputData = THTensor_(data)(gradOutput); real* valuesData =THTensor_(data)(values); real* weightData = THTensor_(data)(weight); real* biasData = THTensor_(data)(bias); long weightStride0 = weight->stride[0]; long biasStride = bias->stride[0]; long* keysData = THLongTensor_data(keys); long* sizesData = THLongTensor_data(sizes); /* Make sure these inputs are contiguous to accelerate computations */ THArgCheck(THLongTensor_isContiguous(keys), 1, "keys vector must be contiguous"); THArgCheck(THTensor_(isContiguous)(values), 3, "values vector must be contiguous"); THArgCheck(THTensor_(isContiguous)(gradOutput), 6, "gradOutput vector must be contiguous"); THArgCheck(THTensor_(isContiguous)(weight), 7, "weight matrix must be contiguous"); THArgCheck(THTensor_(isContiguous)(bias), 8, "bias matrix must be contiguous"); int i,j,k; /* Separate cases: output dimension is == 1, or > 1 * This allows for some optimizations. * No multithreading here as this could * corrupt the results (hogwild style) */ if (outDim == 1) { if (maxNormalize) { long offset = 0; for (j = 0; j < batchSize; j++) { real* lgradOutputData = gradOutputData + j; *biasData -= *lgradOutputData * scale; real val = *lgradOutputData * scale; real* lweightData = weightData; for (i = 0; i < sizesData[j]; i++) { long idx = weightStride0*(keysData[offset] + keysOffset) + maxNormalize; weightData[idx-1] -= weightData[idx]*val*weightData[idx-2]; weightData[idx] -= (val*valuesData[offset] - weightDecay * weightData[idx])*weightData[idx-2]; offset++; } } offset = 0; for (j = 0; j < batchSize; j++) { real* lweightData = weightData; for (i = 0; i < sizesData[j]; i++) { long idx = weightStride0*(keysData[offset] + keysOffset) + maxNormalize; weightData[idx-2] = 0; offset++; } } } else { if (weightDecay) { long offset = 0; for (j = 0; j < batchSize; j++) { real* lgradOutputData = gradOutputData + j; *biasData -= *lgradOutputData * scale; real val = *lgradOutputData * scale; real* lweightData = weightData; for (i = 0; i < sizesData[j]; i++) { long idx = weightStride0*(keysData[offset] + keysOffset); weightData[idx] -= val * valuesData[offset] + weightData[idx] * weightDecay; offset++; } } } else { long offset = 0; for (j = 0; j < batchSize; j++) { real val = gradOutputData[j] * scale; for (i = 0; i < sizesData[j]; i++) { weightData[(keysData[offset] + keysOffset)*weightStride0] -= val * valuesData[offset]; offset++; } *biasData -= val; } } } } else { long offset = 0; for (j = 0; j < batchSize; j++) { real val = 0; real* lgradOutputData = gradOutputData + j*outDim; real* lweightData = weightData; THVector_(cadd)(biasData, biasData, lgradOutputData, -scale, outDim); for (i = 0; i < sizesData[j]; i++) { real val = valuesData[offset] * scale; real wd = weightDecay; // Max normalize case if (maxNormalize) { lweightData = weightData + weightStride0*(keysData[offset] + keysOffset) + (maxNormalize-2); val *= lweightData[0]; wd *= lweightData[0]; for (k=0; k < outDim; k++) { lweightData[1] -= lweightData[k+2]*scale*lgradOutputData[k]*lweightData[0]; } lweightData += 2; } else { lweightData = weightData + weightStride0*(keysData[offset] + keysOffset); } /* We do sparse weight decay. * We think it makes more sense. */ if (weightDecay) { if (outDim > THNN_SPARSE_OUTDIM_THRESHOLD) { THBlas_(axpy)(outDim, -wd, lweightData, 1, lweightData, 1); } else { for (k=0; k < outDim; k++) { lweightData[k] -= wd * lweightData[k]; } } } if (outDim > THNN_SPARSE_OUTDIM_THRESHOLD) { THBlas_(axpy)(outDim, -val, lgradOutputData, 1, lweightData, 1); } else { for (k=0; k < outDim; k++) { lweightData[k] -= val * lgradOutputData[k]; } } offset++; } } /* Max Normalize case: * Reset the smart update scaling if * one does it batch-wise. * TODO: Decide what to do with that piece of code. * NB: If the code belowe is uncommented, so should the commented * code in IndexLinear:zeroGradParameters() */ /* if (maxNormalize) { offset = 0; for (j = 0; j < batchSize; j++) { real* lweightData = weightData; for (i = 0; i < sizesData[j]; i++) { real val = valuesData[offset] * scale; real wd = weightDecay; lweightData = weightData + weightStride0*(keysData[offset] + keysOffset) + (maxNormalize-2); lweightData[0] = 0; offset++; } } } */ } return; } void THNN_(IndexLinear_accGradParameters)( THNNState *state, THLongTensor *keys, long keysOffset, THTensor *values, THLongTensor *sizes, THLongTensor *cumSumSizes, THTensor *gradOutput, THTensor *gradWeight, THTensor *gradBias, THTensor *weight, THTensor *bias, THTensor *valuesBuffer, accreal weightDecay_, accreal scale_) { real weightDecay = TH_CONVERT_ACCREAL_TO_REAL(weightDecay_); real scale = TH_CONVERT_ACCREAL_TO_REAL(scale_); /* Retrieve all the dimensions of the problem */ long batchSize = THLongTensor_size(sizes, 0); long keysSize = THLongTensor_size(keys, 0); long outDim = THTensor_(size)(bias, 0); long woutDim = THTensor_(size)(weight, 1); long maxNormalize = (woutDim - outDim) > 0 ?1:0; THArgCheck(THNN_(checkKeysValues)(keys, values), 1, "Keys and values should have the same number of elements"); long* sizesData = THLongTensor_data(sizes); /* COmpute the cumulative sizes */ THLongTensor* cumSizes = THLongTensor_new(); THLongTensor_cumsum(cumSizes, sizes, 0); long* cumSizesData = THLongTensor_data(cumSizes); /* Resize the gradWeight buffer to keep it dense. * That speeds up updates A LOT assuming random mem access. */ THTensor_(resize2d)(gradWeight, keysSize, outDim * (maxNormalize>0?2:1)); /* Access the storage data/strides */ real* gradOutputData = THTensor_(data)(gradOutput); real* valuesData =THTensor_(data)(values); real* gradWeightData = THTensor_(data)(gradWeight); real* weightData = THTensor_(data)(weight); real* gradBiasData = THTensor_(data)(gradBias); long gradWeightStride0 = gradWeight->stride[0]; long weightStride0 = weight->stride[0]; long* keysData = THLongTensor_data(keys); /* Make sure these inputs are contiguous to accelerate computations */ THArgCheck(THLongTensor_isContiguous(keys), 1, "keys vector must be contiguous"); THArgCheck(THTensor_(isContiguous)(values), 3, "values vector must be contiguous"); THArgCheck(THTensor_(isContiguous)(gradOutput), 6, "gradOutput vector must be contiguous"); THArgCheck(THTensor_(isContiguous)(gradWeight), 7, "gradWeight must be contiguous"); THArgCheck(THTensor_(isContiguous)(gradBias), 8, "gradBias vector must be contiguous"); THArgCheck(THTensor_(isContiguous)(weight), 9, "weight must be contiguous"); THArgCheck(THTensor_(isContiguous)(bias), 10, "bias vector must be contiguous"); THArgCheck(THTensor_(isContiguous)(valuesBuffer), 11, "valuesBuffer must be contiguous"); int i,j,k; /* Separate cases: output dimension is == 1, or > 1 * This allows for some optimizations. * No multithreading here as this could * corrupt the results (hogwild style) */ if (outDim == 1) { for (j = 0; j < batchSize; j++) { long offset = j==0?0:cumSizesData[j-1]; real val = gradOutputData[j] * scale; real* lgradWeightData = gradWeightData + offset; real* lvaluesData = valuesData + offset; long end = sizesData[j]; if (maxNormalize) { lgradWeightData += offset; i = 0; for(;i < end; i++) { lgradWeightData[2*i] = val; lgradWeightData[2*i+1] = val * lvaluesData[i]; } } else { i = 0; for(;i < end-4; i += 4) { lgradWeightData[i] = val * lvaluesData[i]; lgradWeightData[i+1] = val * lvaluesData[i+1]; lgradWeightData[i+2] = val * lvaluesData[i+2]; lgradWeightData[i+3] = val * lvaluesData[i+3]; } for(; i < end; i++) { lgradWeightData[i] = val * lvaluesData[i]; } } *gradBiasData += val; offset += end; } } else { for (j = 0; j < batchSize; j++) { long offset = j==0?0:cumSizesData[j-1]; real val = 0; real* lgradOutputData = gradOutputData + j*outDim; real* lgradWeightData = gradWeightData; real* lweightData = weightData; THVector_(cadd)(gradBiasData, gradBiasData, lgradOutputData, scale, outDim); for (i = 0; i < sizesData[j]; i++) { real val = valuesData[offset] * scale; lgradWeightData = gradWeightData + offset*outDim; if (maxNormalize) { lgradWeightData += offset*outDim; k = 0; for(;k < outDim-4; k += 4) { lgradWeightData[k] = lgradOutputData[k]*scale; lgradWeightData[k+1] = lgradOutputData[k+1]*scale; lgradWeightData[k+2] = lgradOutputData[k+2]*scale; lgradWeightData[k+3] = lgradOutputData[k+3]*scale; } for(; k < outDim; k++) { lgradWeightData[k] = lgradOutputData[k]*scale; } lgradWeightData += outDim; } k = 0; for(;k < outDim-4; k += 4) { lgradWeightData[k] = val * lgradOutputData[k]; lgradWeightData[k+1] = val * lgradOutputData[k+1]; lgradWeightData[k+2] = val * lgradOutputData[k+2]; lgradWeightData[k+3] = val * lgradOutputData[k+3]; } for(; k < outDim; k++) { lgradWeightData[k] = val * lgradOutputData[k]; } offset++; } } } THLongTensor_free(cumSizes); return; } #endif
{ "content_hash": "7d870a8b1634f97c6542bb84319a84d7", "timestamp": "", "source": "github", "line_count": 742, "max_line_length": 181, "avg_line_length": 33.95148247978437, "alnum_prop": 0.5600587488091457, "repo_name": "AlexeySa/rspamd", "id": "42d8368ba44238646acdd2e712c82944d4231ca4", "size": "25192", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "contrib/torch/nn/lib/THNN/generic/IndexLinear.c", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Assembly", "bytes": "297971" }, { "name": "C", "bytes": "11099110" }, { "name": "C++", "bytes": "91123" }, { "name": "CMake", "bytes": "170769" }, { "name": "CSS", "bytes": "14178" }, { "name": "HTML", "bytes": "14747" }, { "name": "Java", "bytes": "13174" }, { "name": "JavaScript", "bytes": "102041" }, { "name": "Lua", "bytes": "2234819" }, { "name": "Makefile", "bytes": "12428" }, { "name": "PHP", "bytes": "58538" }, { "name": "Pascal", "bytes": "87" }, { "name": "Perl", "bytes": "73903" }, { "name": "Python", "bytes": "7698" }, { "name": "Ragel", "bytes": "26306" }, { "name": "RobotFramework", "bytes": "74060" }, { "name": "Roff", "bytes": "22" }, { "name": "Shell", "bytes": "7684" }, { "name": "SourcePawn", "bytes": "3118" }, { "name": "Standard ML", "bytes": "128" } ], "symlink_target": "" }
using System; using GoFSamples.Patterns.Bridge.Interfaces; namespace GoFSamples.Patterns.Bridge { public class CorporateProgrammer : Programmer { public CorporateProgrammer(ILanguage lang) : base(lang) { } public override void EarnMoney() { Console.WriteLine("Получаем в конце месяца зарплату"); } } }
{ "content_hash": "91d3fd87ae4de7924262d92d461bc6ef", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 57, "avg_line_length": 17.157894736842106, "alnum_prop": 0.7361963190184049, "repo_name": "SergeyKononovich/GoF", "id": "b1cae96137f74d4139e5462828a4ea1ea9514ae8", "size": "356", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "GoFSamples/Patterns/Bridge/CorporateProgrammer.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "23861" } ], "symlink_target": "" }
SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "e2a0dcb78071ec7f6c8408ac98d58784", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.23076923076923, "alnum_prop": 0.6917293233082706, "repo_name": "mdoering/backbone", "id": "4b2cd3101ef9e005809781b92cbfb43e05b28ac9", "size": "190", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Iridaceae/Iris/Iris aphylla/ Syn. Iris nudicaulis fieberi/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
package com.android.zycojamie.coolweather.util; import okhttp3.OkHttpClient; import okhttp3.Request; /** * Created by zckya on 2017/4/19. */ public class HttpUtil { public static void sendOkHttpRequest(String address,okhttp3.Callback callback){ OkHttpClient client=new OkHttpClient(); Request request=new Request.Builder() .url(address) .build(); client.newCall(request).enqueue(callback); } }
{ "content_hash": "2b4ea041ca0abe82a0803f7cc1e87c0e", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 83, "avg_line_length": 24.42105263157895, "alnum_prop": 0.6702586206896551, "repo_name": "zycoJamie/coolweather", "id": "d0f5c0ce3f7759dc8cdddb8b85d9d3e87c362142", "size": "464", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/com/android/zycojamie/coolweather/util/HttpUtil.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "31824" } ], "symlink_target": "" }
<div class="mw-hide-on-request"> <div ng-if="modelCollectionIsRequesting" class="spinner-holder" mw-indefinite-loading></div> <div ng-if="!modelCollectionIsRequesting" class="content-holder" ng-transclude></div> </div>
{ "content_hash": "fcc386c1737e99c7b43b84605424cdfa", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 43, "avg_line_length": 31.25, "alnum_prop": 0.668, "repo_name": "mwaylabs/uikit", "id": "229f77d1d0a35329fad8caf9234d15d45b2a90d1", "size": "250", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/mw-ui-components/directives/templates/mw_hide_on_request.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "70715" }, { "name": "HTML", "bytes": "75196" }, { "name": "JavaScript", "bytes": "816166" }, { "name": "Shell", "bytes": "5348" } ], "symlink_target": "" }
/* */ var getNative = require('./_getNative'), root = require('./_root'); var Map = getNative(root, 'Map'); module.exports = Map;
{ "content_hash": "bad3f6f8d16a5140098be5f337cf6e78", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 40, "avg_line_length": 27, "alnum_prop": 0.6, "repo_name": "Imms/imms.github.io", "id": "19b749d21b0cc1ad528404bbe103e85fcb0da650", "size": "135", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "jspm_packages/npm/[email protected]/_Map.js", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "8077" }, { "name": "CSS", "bytes": "481567" }, { "name": "CoffeeScript", "bytes": "113858" }, { "name": "F#", "bytes": "6915" }, { "name": "HTML", "bytes": "6308849" }, { "name": "JavaScript", "bytes": "9587150" }, { "name": "LiveScript", "bytes": "7961" }, { "name": "Makefile", "bytes": "1453" }, { "name": "PHP", "bytes": "5957" }, { "name": "TypeScript", "bytes": "35481" } ], "symlink_target": "" }
@interface AboutViewController () @end @implementation AboutViewController - (void)viewDidLoad { [super viewDidLoad]; NSString *appVersion = [[[NSBundle mainBundle] infoDictionary] objectForKey:@"CFBundleShortVersionString"]; self.versionLabel.text = [@"Version " stringByAppendingString:appVersion]; } @end
{ "content_hash": "22918b855761e8e7235e1f7b7e5efa56", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 111, "avg_line_length": 20.625, "alnum_prop": 0.7454545454545455, "repo_name": "rl1987/VLC-Controller", "id": "c2fe7beb25e39a532d71b00662f161e109d610b4", "size": "456", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "VLC Controller/AboutViewController.m", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "178" }, { "name": "Objective-C", "bytes": "93676" }, { "name": "Ruby", "bytes": "652" } ], "symlink_target": "" }
{% extends "layout.html" %} {% block content %} {% if finished %} <div class="alert alert-info text-center" role="alert"> Congratulations, cameratrap turking is completed. Thank you! <br/> <a href="{{ url_for('turk_detection') }}?imgsetid={{ imgsetid }}">Continue by turking detections</a> </div> {% else %} <div class="row"> <div class="col-lg-2"></div> <div class="col-lg-8"> {% if imgsetid %} <div class="alert alert-info" role="alert"> Filtering for imgsetid: <a href="{{ url_for('view_imagesets') }}?imgsetid={{ imgsetid }}">{{ imgsettext }} ({{ imgsetid }})</a> <a href="{{ url_for('root') }}" class="close"><span aria-hidden="true">&times;</span></a> </div> {% endif %} {% if not REFER_DST_ENCODED %} <div class="progress"> <div class="progress-bar" role="progressbar" aria-valuenow="{{ progress }}" aria-valuemin="0" aria-valuemax="100" style="width: {{ progress }}%;"> {{ progress }}% </div> </div> {% endif %} <div style="text-align: center;"> Gid: {{ gid }} </div> <img src="{{ image_src }}" style="height: auto; max-width: 100%; max-height: 500px; margin: 20px auto; margin-bottom: 0px; display: block;"> <br/> <form method="post" action="{{ url_for('submit_cameratrap') }}?imgsetid={{ imgsetid }}&refer={{ REFER_DST_ENCODED }}"> <input type="text" name="cameratrap-gid" value="{{ gid }}" style="display: none;" readonly> <div class="row"> <div class="col-lg-4 col-md-4 col-sm-4 col-xs-4" style="text-align: left;"> {% if previous %} <a class="btn btn-default" id="turk-previous" href="{{ url_for('turk_cameratrap') }}?imgsetid={{ imgsetid }}&gid={{ previous }}">Previous {{ previous }}</a> {% endif %} </div> <div class="col-lg-4 col-md-4 col-sm-4 col-xs-4" style="text-align: center;"> <input type="checkbox" name="cameratrap-toggle" id="ia-cameratrap-toggle" data-width="100" {% if positive %}checked{% endif %}> </div> <div class="col-lg-4 col-md-4 col-sm-4 col-xs-4" style="text-align: right;"> <input type="submit" name="cameratrap-submit" id="turk-submit-accept" class="btn btn-primary" value="Accept"> </div> </div> </form> <br/> </div> <div class="col-lg-2"></div> </div> {% endif %} {% endblock %} {% block content_custom_css %} {% endblock %} {% block content_custom_javascript %} <script src="{{ url_for('static', filename='javascript/turk-cameratrap.js') }}"></script> <script type="text/javascript"> toggle = $('#ia-cameratrap-toggle'); toggle.bootstrapToggle({ on: 'Positive', off: 'Negative', onstyle: 'primary', }); </script> {% endblock %}
{ "content_hash": "22aed203cc8679fb55c14fedf992f141", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 172, "avg_line_length": 38.78378378378378, "alnum_prop": 0.556794425087108, "repo_name": "Erotemic/ibeis", "id": "841d3244a4e5ef120c9144fa7ec21dc61100fd0e", "size": "2870", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "ibeis/web/templates/turk/cameratrap.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CMake", "bytes": "331" }, { "name": "CSS", "bytes": "4676" }, { "name": "Dockerfile", "bytes": "13018" }, { "name": "Inno Setup", "bytes": "1585" }, { "name": "Python", "bytes": "6661573" }, { "name": "Shell", "bytes": "56171" } ], "symlink_target": "" }
class ApplicationController < ActionController::Base # Prevent CSRF attacks by raising an exception. # For APIs, you may want to use :null_session instead. add_flash_types :success, :warning, :danger, :info protect_from_forgery with: :exception before_action :authenticate_usuario! private def usuario_admin unless current_usuario.admin? raise ActionController::RoutingError.new('Not Found') end end end
{ "content_hash": "6ca4833cd8288e094741926a7085d78e", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 58, "avg_line_length": 27.125, "alnum_prop": 0.7442396313364056, "repo_name": "AndersonOdilo/Adm_facil", "id": "1622d071643dd30f6afcafc485e3d91e8c7333c2", "size": "434", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/controllers/application_controller.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1482" }, { "name": "CoffeeScript", "bytes": "9957" }, { "name": "HTML", "bytes": "116770" }, { "name": "JavaScript", "bytes": "4725" }, { "name": "Ruby", "bytes": "111329" } ], "symlink_target": "" }
using System; using System.ComponentModel.DataAnnotations; using Newtonsoft.Json; namespace Stardust.Continuum.Models { // Models used as parameters to AccountController actions. public class AddExternalLoginBindingModel { [Required] [Display(Name = "External access token")] public string ExternalAccessToken { get; set; } } public class ChangePasswordBindingModel { [Required] [DataType(DataType.Password)] [Display(Name = "Current password")] public string OldPassword { get; set; } [Required] [StringLength(100, ErrorMessage = "The {0} must be at least {2} characters long.", MinimumLength = 6)] [DataType(DataType.Password)] [Display(Name = "New password")] public string NewPassword { get; set; } [DataType(DataType.Password)] [Display(Name = "Confirm new password")] [Compare("NewPassword", ErrorMessage = "The new password and confirmation password do not match.")] public string ConfirmPassword { get; set; } } public class RegisterBindingModel { [Required] [Display(Name = "Email")] public string Email { get; set; } [Required] [StringLength(100, ErrorMessage = "The {0} must be at least {2} characters long.", MinimumLength = 6)] [DataType(DataType.Password)] [Display(Name = "Password")] public string Password { get; set; } [DataType(DataType.Password)] [Display(Name = "Confirm password")] [Compare("Password", ErrorMessage = "The password and confirmation password do not match.")] public string ConfirmPassword { get; set; } } public class RegisterExternalBindingModel { [Required] [Display(Name = "Email")] public string Email { get; set; } } public class RemoveLoginBindingModel { [Required] [Display(Name = "Login provider")] public string LoginProvider { get; set; } [Required] [Display(Name = "Provider key")] public string ProviderKey { get; set; } } public class SetPasswordBindingModel { [Required] [StringLength(100, ErrorMessage = "The {0} must be at least {2} characters long.", MinimumLength = 6)] [DataType(DataType.Password)] [Display(Name = "New password")] public string NewPassword { get; set; } [DataType(DataType.Password)] [Display(Name = "Confirm new password")] [Compare("NewPassword", ErrorMessage = "The new password and confirmation password do not match.")] public string ConfirmPassword { get; set; } } }
{ "content_hash": "875229ad66fabcead9e7bf5a4d39d331", "timestamp": "", "source": "github", "line_count": 84, "max_line_length": 110, "avg_line_length": 32.214285714285715, "alnum_prop": 0.6223207686622321, "repo_name": "JonasSyrstad/Stardust.Interstellar.Rest", "id": "bd362d22b2c87dede5e4ce809f704ff366bc937f", "size": "2708", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Stardust.Continuum/Models/AccountBindingModels.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP.NET", "bytes": "347" }, { "name": "Batchfile", "bytes": "14475" }, { "name": "C#", "bytes": "1548280" }, { "name": "CSS", "bytes": "12186" }, { "name": "HTML", "bytes": "2547591" }, { "name": "JavaScript", "bytes": "594922" } ], "symlink_target": "" }
var async = require('async'); var childProcess = require('child_process'); var scClient = require('socketcluster-client'); var assert = require('assert'); var domain = require('sc-domain'); var scServer = childProcess.fork(__dirname + '/server.js'); var options = { protocol: 'http', hostname: '127.0.0.1', port: 8000, autoReconnect: true, multiplex: false }; scServer.on('message', function (m) { if (m.event == 'ready') { var socket = scClient.connect(options); var pongChannel; var tasks = [ function (cb) { socket.on('first', function (data) { var err; try { assert(data == 'This is the first event', 'Received incorrect data from "first" event'); } catch (e) { err = e; } socket.removeAllListeners('first'); cb(err); }); }, function (cb) { pongChannel = socket.subscribe('pong'); pongChannel.watch(function (data) { var err; try { assert(JSON.stringify(data) == JSON.stringify({message: 'This is pong data'}), 'Received incorrect data from "pong" event'); } catch (e) { err = e; } pongChannel.unwatch(); cb(err); }); socket.emit('ping'); }, function (cb) { pongChannel.watch(function (data) { var err; try { assert(JSON.stringify(data) == JSON.stringify({message: 'published pong'}), 'Received incorrect data from published "pong" event'); } catch (e) { err = e; } cb(err); pongChannel.unwatch(); }); socket.publish('pong', {message: 'published pong'}); }, function (cb) { socket.unsubscribe('pong'); var fooChannel = socket.subscribe('foo'); fooChannel.on('subscribe', function () { var err; try { var subscriptions = socket.subscriptions(); assert(JSON.stringify(subscriptions) == JSON.stringify(['foo']), 'Expected subscriptions() array to contain one "foo" channel'); } catch (e) { err = e; } fooChannel.removeAllListeners('subscribe'); cb(err); }); }, function (cb) { socket.subscribe('foo2'); setTimeout(function () { socket.unsubscribe('foo2'); setTimeout(function () { cb(); }, 1000); }, 1000); }, function (cb) { socket.emit('killWorker'); socket.once('error', function (err) { console.log('Caught:', err); }); var notUnsubscribedTimeout = setTimeout(function () { cb('Did not unsubscribe from channels on disconnect'); }, 3000); socket.once('unsubscribe', function () { clearTimeout(notUnsubscribedTimeout); var err; try { var subscriptions = socket.subscriptions(); assert(JSON.stringify(subscriptions) == JSON.stringify([]), 'Did not unsubscribe from channels on disconnect'); } catch (e) { err = e; } cb(err); }); }, function (cb) { setTimeout(function () { socket.emit('new'); var err; setTimeout(function () { try { var subscriptions = socket.subscriptions(); assert(JSON.stringify(subscriptions) == JSON.stringify(['foo']), 'Did not automatically resubscribe to the correct channels which were unsubscribed due to disconnection'); } catch (e) { err = e; } cb(err); }, 2000); }, 1000); }, function (cb) { socket.subscribe('test'); setTimeout(function () { var unsubscribeEmitted = false; socket.on('unsubscribe', function (channel) { if (channel == 'test') { unsubscribeEmitted = true; } }); socket.unsubscribe('test'); var err; setTimeout(function () { try { var subscriptions = socket.subscriptions(); assert(unsubscribeEmitted, 'Socket did not emit unsubscribe event after calling socket.unsubscribe(channelName) method'); } catch (e) { err = e; } cb(err); }, 2000); }, 1000); }, function (cb) { var actionSequence = []; socket.on('subscribe', function (channel) { if (channel == 'channel1') { actionSequence.push('subscribe'); } }); socket.on('unsubscribe', function (channel) { if (channel == 'channel1') { actionSequence.push('unsubscribe'); } }); socket.subscribe('channel1'); socket.unsubscribe('channel1'); socket.subscribe('channel1'); socket.unsubscribe('channel1'); var expectedActionSequence = []; var err; setTimeout(function () { socket.off('subscribe'); socket.off('unsubscribe'); try { assert(JSON.stringify(actionSequence) == JSON.stringify(expectedActionSequence), 'Subscribing and unsubscribing to channel1 multiple times in a sequence was not handled in an optimal way'); } catch (e) { err = e; } cb(err); }, 1000); }, function (cb) { var actionSequence = []; socket.subscribe('channel2'); setTimeout(function () { socket.on('subscribe', function (channel) { if (channel == 'channel2') { actionSequence.push('subscribe'); } }); socket.on('unsubscribe', function (channel) { if (channel == 'channel2') { actionSequence.push('unsubscribe'); } }); socket.unsubscribe('channel2'); socket.subscribe('channel2'); socket.unsubscribe('channel2'); socket.subscribe('channel2'); socket.unsubscribe('channel2'); socket.subscribe('channel2'); var expectedActionSequence = [ 'unsubscribe', 'subscribe' ]; var err; setTimeout(function () { socket.off('subscribe'); socket.off('unsubscribe'); try { assert(JSON.stringify(actionSequence) == JSON.stringify(expectedActionSequence), 'Subscribing and unsubscribing to channel2 multiple times in a sequence was not handled in an optimal way'); } catch (e) { err = e; } cb(err); }, 1000); }, 1000); }, function (cb) { var caughtError; var socketDomain = domain.create(); socketDomain.on('error', function (error) { caughtError = error; }); socketDomain.add(socket); socket.emit('error', 'FAIL'); var err; setTimeout(function () { try { assert(caughtError == 'FAIL', 'Socket does not work with error domains'); } catch (e) { err = e; } cb(err); }, 1000); }, function (cb) { var err; socket.once('disconnect', function () { socket.once('connect', function (status) { try { assert(!status.isAuthenticated, 'Socket should not be authenticated'); } catch (e) { err = e; } cb(err); }); socket.connect(); }); socket.disconnect(); }, function (cb) { var err; socket.once('connect', function (status) { var authTokenIsSet = false; socket.on('authenticate', function () { authTokenIsSet = true; }); socket.emit('login', {username: 'john123'}); setTimeout(function () { socket.once('connect', function (status) { try { assert(!!status.isAuthenticated, 'Socket should be authenticated'); assert(authTokenIsSet, 'authenticate event was never emitted'); } catch (e) { err = e; } cb(err); }); socket.disconnect(); socket.connect(); }, 1000); }); socket.disconnect(); socket.connect(); } ]; var timedTasks = []; var timeoutMs = 20000; var timeoutError = function () { throw new Error('Test timed out'); }; var assertTimeout = null; var timeoutTask = function (cb) { clearTimeout(assertTimeout); assertTimeout = setTimeout(timeoutError, timeoutMs); cb(); }; for (var i in tasks) { timedTasks.push(timeoutTask); timedTasks.push(tasks[i]); } timedTasks.push(function (cb) { clearTimeout(assertTimeout); socket.disconnect(); scServer.kill(); setTimeout(function () { cb(); }, 1000); }); async.waterfall(timedTasks, function (err) { if (err) { throw err; } else { console.log('All tests passed!'); } process.exit(); }); } });
{ "content_hash": "185204b8cd7875376afcf1d08c6e0d42", "timestamp": "", "source": "github", "line_count": 354, "max_line_length": 124, "avg_line_length": 27.110169491525422, "alnum_prop": 0.49317495050536625, "repo_name": "crod93/googlePlacesEx-react-native", "id": "578315e822463844b33a26dc3fa9b78566dac8b0", "size": "9597", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "node_modules/socketcluster/test/external/index.js", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "3579" }, { "name": "JavaScript", "bytes": "40796" }, { "name": "Makefile", "bytes": "319002" }, { "name": "Objective-C", "bytes": "141617" }, { "name": "Python", "bytes": "1652" }, { "name": "Shell", "bytes": "643" } ], "symlink_target": "" }
package com.dianping.cat.report.page.dependency.graph; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; import com.dianping.cat.Constants; import com.dianping.cat.consumer.dependency.model.entity.Dependency; import com.dianping.cat.consumer.dependency.model.entity.DependencyReport; import com.dianping.cat.consumer.dependency.model.entity.Index; import com.dianping.cat.consumer.dependency.model.entity.Segment; import com.dianping.cat.consumer.dependency.model.transform.BaseVisitor; import com.dianping.cat.helper.TimeHelper; import com.dianping.cat.report.page.LineChart; public class LineGraphBuilder extends BaseVisitor { public Map<String, Map<String, Item>> m_dependencies = new HashMap<String, Map<String, Item>>(); private static final String TOTAL_COUNT = "TotalCount"; private static final String ERROR_COUNT = "ErrorCount"; private static final String AVG = "Avg"; private Set<String> m_types = new TreeSet<String>(); private static int SIZE = 60; private long m_period; private int m_currentMinute; private long m_sysMinute; private Date m_start; public LineGraphBuilder() { long current = System.currentTimeMillis(); current -= current % Constants.HOUR; m_period = current; m_sysMinute = (System.currentTimeMillis()) / 1000 / 60 % 60; } private boolean isCurrentPeriod() { return m_period == m_start.getTime(); } private String appendStr(String... arg) { int length = arg.length; StringBuilder sb = new StringBuilder(); for (int i = 0; i < length; i++) { sb.append(arg[i]).append(GraphConstrant.DELIMITER); } return sb.toString().substring(0, sb.length() - 1); } private LineChart buildLineChart(String title, Map<String, Item> items) { LineChart result = new LineChart(); result.setSize(SIZE); result.setStep(TimeHelper.ONE_MINUTE); result.setTitle(title); result.setStart(m_start); if (items != null) { for (Entry<String, Item> entry : items.entrySet()) { String subTitle = entry.getKey(); Item item = entry.getValue(); result.add(subTitle, item.getValue()); } } return result; } private Item generateItem() { Item result = new Item(); long size = (int) m_sysMinute + 1; if (!isCurrentPeriod()) { size = SIZE; } for (int i = 0; i < size; i++) { result.setValue(i, 0.0); } return result; } public Item findOrCreateItem(String type, String id) { Map<String, Item> items = m_dependencies.get(type); if (items == null) { items = new HashMap<String, Item>(); m_dependencies.put(type, items); } Item result = items.get(id); if (result == null) { result = generateItem(); items.put(id, result); } return result; } public Map<String, List<LineChart>> queryDependencyGraph() { Map<String, List<LineChart>> allCharts = new HashMap<String, List<LineChart>>(); for (String type : m_types) { List<LineChart> charts = new ArrayList<LineChart>(); Map<String, Item> totalItems = m_dependencies.get(appendStr(type, TOTAL_COUNT)); Map<String, Item> errorItems = m_dependencies.get(appendStr(type, ERROR_COUNT)); Map<String, Item> avgItems = m_dependencies.get(appendStr(type, AVG)); charts.add(buildLineChart(TOTAL_COUNT, totalItems)); charts.add(buildLineChart(ERROR_COUNT, errorItems)); charts.add(buildLineChart(AVG, avgItems)); allCharts.put(type, charts); } return allCharts; } public List<LineChart> queryIndex() { List<LineChart> charts = new ArrayList<LineChart>(); charts.add(buildLineChart(TOTAL_COUNT, m_dependencies.get(TOTAL_COUNT))); charts.add(buildLineChart(ERROR_COUNT, m_dependencies.get(ERROR_COUNT))); charts.add(buildLineChart(AVG, m_dependencies.get(AVG))); return charts; } @Override public void visitDependency(Dependency dependency) { String type = dependency.getType(); String target = dependency.getTarget(); long count = dependency.getTotalCount(); long error = dependency.getErrorCount(); double avg = dependency.getAvg(); m_types.add(type); findOrCreateItem(appendStr(type, TOTAL_COUNT), target).setValue(m_currentMinute, count); findOrCreateItem(appendStr(type, ERROR_COUNT), target).setValue(m_currentMinute, error); findOrCreateItem(appendStr(type, AVG), target).setValue(m_currentMinute, avg); super.visitDependency(dependency); } @Override public void visitDependencyReport(DependencyReport dependencyReport) { m_start = dependencyReport.getStartTime(); super.visitDependencyReport(dependencyReport); } @Override public void visitIndex(Index index) { String id = index.getName(); long count = index.getTotalCount(); long error = index.getErrorCount(); double avg = index.getAvg(); findOrCreateItem(TOTAL_COUNT, id).setValue(m_currentMinute, count); findOrCreateItem(ERROR_COUNT, id).setValue(m_currentMinute, error); findOrCreateItem(AVG, id).setValue(m_currentMinute, avg); super.visitIndex(index); } @Override public void visitSegment(Segment segment) { m_currentMinute = segment.getId(); super.visitSegment(segment); } public class Item { private Double[] m_values = new Double[60]; public Double[] getValue() { return m_values; } private Item setValue(int minute, double value) { m_values[minute] = value; return this; } } }
{ "content_hash": "42da84998dda2f49becbee2b536098a3", "timestamp": "", "source": "github", "line_count": 193, "max_line_length": 97, "avg_line_length": 27.99481865284974, "alnum_prop": 0.7201554691837868, "repo_name": "ServerStarted/cat", "id": "148cd0a137117a49fdce121f65fca4a328099ff6", "size": "5403", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "cat-home/src/main/java/com/dianping/cat/report/page/dependency/graph/LineGraphBuilder.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "11263" }, { "name": "CSS", "bytes": "141432" }, { "name": "Cucumber", "bytes": "544" }, { "name": "FreeMarker", "bytes": "65856" }, { "name": "HTML", "bytes": "11792" }, { "name": "Java", "bytes": "3883689" }, { "name": "JavaScript", "bytes": "164991" }, { "name": "Python", "bytes": "11726" }, { "name": "Ruby", "bytes": "265" }, { "name": "Shell", "bytes": "2244" } ], "symlink_target": "" }
using System; using System.Windows; using System.Windows.Controls; using Microsoft.Kinect; using Microsoft.Kinect.Wpf.Controls; using Microsoft.Kinect.Toolkit.Input; using System.Windows.Media; using System.Windows.Shapes; namespace KinectV2 { /// <summary> /// MainWindow.xaml の相互作用ロジック /// </summary> public partial class MainWindow : Window { public MainWindow() { InitializeComponent(); } void InitializeKinectControls() { KinectRegion.SetKinectRegion( this, kinectRegion ); this.kinectRegion.KinectSensor = KinectSensor.GetDefault(); this.kinectRegion.KinectSensor.Open(); } private void Window_Loaded( object sender, RoutedEventArgs e ) { InitializeKinectControls(); } private void Button_Click( object sender, RoutedEventArgs e ) { Button button = sender as Button; button.Content = "Pressed!"; } private void Window_Closing( object sender, System.ComponentModel.CancelEventArgs e ) { if(kinectRegion!=null) { if(kinectRegion.KinectSensor!=null) { kinectRegion.KinectSensor.Close(); kinectRegion.KinectSensor = null; } kinectRegion = null; } } } }
{ "content_hash": "757e51e85ac47b985d819081f73e1d08", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 93, "avg_line_length": 27.529411764705884, "alnum_prop": 0.5876068376068376, "repo_name": "K4W2-Book/K4W2-Book", "id": "4e322d8396eaf97167dce1446168a62927bf428a", "size": "1424", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "C#(Managed)/10_Interaction/KinectV2/KinectV2/MainWindow.xaml.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "1528847" }, { "name": "C++", "bytes": "266270" }, { "name": "ShaderLab", "bytes": "9649" } ], "symlink_target": "" }
title: "ArgData API: DriverNumberWriter Class" --- [API Reference](/argdata/api/) &gt; [0.21](/argdata/api/0.21/) &gt; DriverNumberWriter # DriverNumberWriter Class Writes the driver numbers, or inactives a driver. **Namespace:** ArgData ## Constructors <table class="table table-bordered table-striped "> <thead> <tr> <th>Name</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td>DriverNumberWriter(<a href="/argdata/api/0.21/gpexefile/">GpExeFile</a> <em>exeFile</em>)</td> <td>Creates a DriverNumberWriter for the specified GP.EXE file.<br /><em>exeFile</em>: GpExeFile to read from.<br /></td> </tr> </tbody> </table> ## Methods <table class="table table-bordered table-striped "> <thead> <tr> <th>Name</th> <th>Description</th> </tr> </thead> <tbody> <tr> <td>For(<a href="/argdata/api/0.21/gpexefile/">GpExeFile</a> <em>exeFile</em>)</td> <td>Creates a DriverNumberWriter for the specified GP.EXE file.<br /><em>exeFile</em>: GpExeFile to read from.<br /></td> </tr> <tr> <td>WriteDriverNumbers(<a href="/argdata/api/0.21/drivernumberlist/">DriverNumberList</a> <em>driverNumbers</em>)</td> <td>Writes driver numbers. If a driver number is set to 0, the driver is inactivated.<br /><em>driverNumbers</em>: DriverNumberList of driver numbers.<br /></td> </tr> </tbody> </table>
{ "content_hash": "60304d499544965e0f0fde0e1fca32bc", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 165, "avg_line_length": 26.647058823529413, "alnum_prop": 0.6644591611479028, "repo_name": "codemeyer/ArgData", "id": "539215c685c06895386fe505ea6c0e3a2ff894a3", "size": "1363", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Docs/site/content/api/0.21/drivernumberwriter.md", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "591173" }, { "name": "PowerShell", "bytes": "424" } ], "symlink_target": "" }
<?php namespace ZF\ContentNegotiation; use Zend\View\Model\JsonModel as BaseJsonModel; class JsonModel extends BaseJsonModel { /** * Mark view model as terminal by default (intended for use with APIs) * * @var bool */ protected $terminate = true; }
{ "content_hash": "71345b0bb51a142ae2cfd7523800dab7", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 74, "avg_line_length": 17.5625, "alnum_prop": 0.6725978647686833, "repo_name": "weierophinney/zf-content-negotiation", "id": "13579085e5b819743597bf34ae133e00e704d1d9", "size": "441", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/ZF/ContentNegotiation/JsonModel.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "PHP", "bytes": "41028" } ], "symlink_target": "" }
require 'rails/generators' class BlacklightMoreLikeThisGenerator < Rails::Generators::Base argument :model_name, :type => :string, :default => "SolrDocument" argument :controller_name, :type => :string, :default => "CatalogController" def inject_solr_document_extension file_path = "app/models/#{model_name.underscore}.rb" if File.exists? file_path inject_into_file file_path, :after => "include Blacklight::Solr::Document" do "\n include BlacklightMoreLikeThis::SolrDocumentExtension\n" end end end def inject_catalog_controller_extension file_path = "app/controllers/#{controller_name.underscore}.rb" if File.exists? file_path inject_into_file file_path, :after => "include Blacklight::Catalog" do "\n include BlacklightMoreLikeThis::ControllerExtension\n" end end end end
{ "content_hash": "5fa515101e9e554cf767e43c54fa28c4", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 83, "avg_line_length": 31.814814814814813, "alnum_prop": 0.7031431897555297, "repo_name": "cbeer/blacklight_mlt", "id": "e6618168307c9529470dd531c71ef435e1b2f27a", "size": "859", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/generators/blacklight_more_like_this/blacklight_more_like_this_generator.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "5404" } ], "symlink_target": "" }
FROM php:apache MAINTAINER ukatama [email protected] RUN apt-get update -yq # Install git RUN apt-get install -yq git ## Install PHP zip extension RUN apt-get install -yq zlib1g-dev && docker-php-ext-install -j$(nproc) zip ## Install composer RUN curl -sS https://getcomposer.org/installer | php -- --install-dir=/usr/local/bin --filename=composer ## Install dependics WORKDIR /var/www/html COPY composer.json /var/www/html RUN composer install ## Add soruce codes COPY . /var/www/html COPY config/wiki.default.php /var/www/html/config/wiki.php ## Run unittests RUN ./vendor/bin/phpunit
{ "content_hash": "525fdd950ea5b297dbb806128b322e96", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 104, "avg_line_length": 23.92, "alnum_prop": 0.7491638795986622, "repo_name": "ukatama/mew", "id": "b194a142bf3f6a85da023182699b017069a4bd1d", "size": "598", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Dockerfile", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "685" }, { "name": "JavaScript", "bytes": "1405" }, { "name": "PHP", "bytes": "57294" } ], "symlink_target": "" }
/* * * PatientMonitorMobile * */ 'use strict'; import React from 'react'; import {connect} from 'react-redux'; import Helmet from 'react-helmet'; import styles from './styles.css'; import ReactGrid, {WidthProvider} from 'react-grid-layout'; const ReactGridLayout = WidthProvider(ReactGrid); import {grey900, grey800, grey700, red500, grey50} from 'material-ui/styles/colors'; import {Grid, Row, Col} from 'react-bootstrap'; import {Card} from 'material-ui/Card'; import ECG from 'components/Ecg'; import FontIcon from 'material-ui/FontIcon'; import FloatingActionButton from 'material-ui/FloatingActionButton'; import MenuItem from 'material-ui/MenuItem'; import Divider from 'material-ui/Divider'; import {List, ListItem} from 'material-ui/List'; import Subheader from 'material-ui/Subheader'; import Slider from 'material-ui/Slider'; import SelectField from 'material-ui/SelectField'; import Drawer from 'material-ui/Drawer'; import VitalSign from 'components/VitalSign'; import {createStructuredSelector} from 'reselect'; import IconButton from 'material-ui/IconButton'; import { changeLayout1, resetLayout1, addItem1, removeItem1, changeLayout2, resetLayout2, addItem2, removeItem2, handleLeftDrawerToggle, handleLeftDrawerClose, handleWaveformChange, handleColorChange, handleScaleChange, handleSpeedChange, handleRightDrawerToggle, handleRightDrawerClose, handleVitalSignChange, handleVitalSignColorChange, handlePowerButtonToggle } from './actions'; import { selectLayout1, selectItems1, selectLayout2, selectItems2, selectLeftDrawer, selectRightDrawer, selectPowerOn, selectSocket } from './selectors'; import { selectIP, selectPort, selectProtocol, selectPatientMonitor } from 'containers/Settings/selectors'; let color = { 'green': '#00bd00', 'purple': '#CC00FF', 'yellow': '#FFFF00', 'white': '#FFFFFF', 'red': '#FC0203', 'blue': '#03FDFB', }; class PatientMonitorMobile extends React.Component { // eslint-disable-line react/prefer-stateless-function componentDidMount() { window.addEventListener("keyup", this.props.onPowerOnModeKeyUp); } componentWillUnmount() { window.removeEventListener("keyup", this.props.onPowerOnModeKeyUp); } // waveform createCustomElement1 = (el) => { let {handleLeftDrawerToggle, onRemoveItem1, items1} = this.props; let removeStyle = { position: 'absolute', top: 0, right: '0px', cursor: 'pointer' }; let i = el.get('i'); let item1 = items1.get(i); let waveform = item1.get('waveform'); let strokeStyle = item1.get('strokeStyle'); let lineWidth = item1.get('lineWidth'); let scale = item1.get('scale'); let speed = item1.get('speed'); if (el.get('y') === null) { el = el.set('y', Infinity); } return ( <div key={i} data-grid={el.toObject()}> <div style={{height: '15%'}}> <span style={{ fontSize: '2em', color: color[strokeStyle], position: 'absolute', left: '0px' }}> {waveform} </span> <FontIcon className="material-icons" style={{position: 'absolute', top: 0, right: '30px', cursor: 'pointer'}} onTouchTap={handleLeftDrawerToggle.bind(this, i)}> build </FontIcon> <FontIcon style={removeStyle} className="material-icons" onClick={onRemoveItem1.bind(this, i)}> close </FontIcon> </div> <Card containerStyle={{height: '100%', width: '100%'}} style={{height: '85%', width: '100%'}}> <ECG socket={this.props.socket} i={i} waveform={waveform} strokeStyle={strokeStyle} lineWidth={lineWidth} scale={scale} speed={speed} showBuffer={true}/> </Card> </div> ) }; createPlayElement1 = (el) => { let {items1} = this.props; let i = el.get('i'); let item1 = items1.get(i); let waveform = item1.get('waveform'); let strokeStyle = item1.get('strokeStyle'); let lineWidth = item1.get('lineWidth'); let scale = item1.get('scale'); let speed = item1.get('speed'); if (el.get('y') === null) { el = el.set('y', Infinity); } return ( <div key={i} data-grid={el.toObject()}> <div style={{height: '15%'}}> <span style={{ fontSize: '2em', color: color[strokeStyle], position: 'absolute', left: '0px' }} > {waveform} </span> </div> <div style={{height: '85%', width: '100%'}}> <ECG socket={this.props.socket} i={i} waveform={waveform} strokeStyle={strokeStyle} lineWidth={lineWidth} scale={scale} speed={speed} showBuffer={false}/> </div> </div> ) }; // vital sign createCustomElement2 = (el) => { let {onRemoveItem2, handleRightDrawerToggle, items2} = this.props; let removeStyle = { position: 'absolute', right: '2px', top: 0, cursor: 'pointer' }; let i = el.get('i'); let item2 = items2.get(i); let vitalSign = item2.get('vitalSign'); let strokeStyle = item2.get('strokeStyle'); let w = el.get('w'); return ( <div key={i} data-grid={el.toObject()}> <div style={{height: '15%'}}> <FontIcon className="material-icons" style={{position: 'absolute', top: 0, right: '30px', cursor: 'pointer'}} onTouchTap={handleRightDrawerToggle.bind(this, i)}> build </FontIcon> <FontIcon className="material-icons" style={removeStyle} onClick={onRemoveItem2.bind(this, i)}> close </FontIcon> </div> <Card containerStyle={{width: '100%', height: '100%'}} style={{width: '100%', height: '85%'}}> <VitalSign socket={this.props.socket} i={i} vitalSign={vitalSign} strokeStyle={strokeStyle} w={w}/> </Card> </div> ) }; createPlayElement2 = (el) => { let {items2} = this.props; let i = el.get('i'); let item2 = items2.get(i); let vitalSign = item2.get('vitalSign'); let strokeStyle = item2.get('strokeStyle'); let w = el.get('w'); return ( <div key={i} data-grid={el.toObject()}> <div style={{height: '15%'}}> </div> <div style={{width: '100%', height: '85%'}}> <VitalSign socket={this.props.socket} i={i} vitalSign={vitalSign} strokeStyle={strokeStyle} w={w}/> </div> </div> ) }; render() { let { onLayoutChange1, onAddItem1, onResetLayout1, handleWaveformChange, handleColorChange, handleScaleChange, handleSpeedChange, handleLeftDrawerClose, onLayoutChange2, onResetLayout2, onAddItem2, handleRightDrawerClose, handleVitalSignChange, handleVitalSignColorChange, handlePowerButtonToggle } = this.props; let { layout1, items1, layout2, items2, leftDrawer, rightDrawer, powerOn } = this.props; let i1 = leftDrawer.get('i'); let open1 = leftDrawer.get('open'); let item1 = items1.get(i1); let waveformValue1 = item1 ? item1.get('waveform') : "ECG - II"; let colorValue1 = item1 ? item1.get('strokeStyle') : "green"; let scaleValue1 = item1 ? item1.get('scale') : 0.8; let speedValue1 = item1 ? item1.get('speed') : 3; let i2 = rightDrawer.get('i'); let open2 = rightDrawer.get('open'); let item2 = items2.get(i2); let vitalSign2 = item2 ? item2.get('vitalSign') : "HR"; let colorValue2 = item2 ? item2.get('strokeStyle') : "green"; let customMode = ( <Grid fluid={true}> <Row> <Col lg={9} style={{height: '95vh', background: grey900, overflow: 'auto'}} className={styles.patientMonitorMobile}> <ReactGridLayout cols={12} rowHeight={200} onLayoutChange={onLayoutChange1} onResizeStop={() => this.forceUpdate()}> {layout1.map(this.createCustomElement1)} </ReactGridLayout> <div style={{ display: 'flex', flexFlow: 'row wrap', justifyContent: 'flex-end' }} > <FloatingActionButton style={{marginLeft: '20px'}} onClick={onResetLayout1}> <FontIcon className="material-icons"> restore </FontIcon> </FloatingActionButton> <FloatingActionButton style={{marginLeft: '20px'}} onClick={onAddItem1}> <FontIcon className="material-icons"> add </FontIcon> </FloatingActionButton> </div> </Col > <Col lg={3} style={{height: '95vh', background: grey800, overflow: 'auto'}} className={styles.patientMonitorMobile}> <ReactGridLayout cols={12} rowHeight={200} onLayoutChange={onLayoutChange2} onResizeStop={() => this.forceUpdate()}> {layout2.map(this.createCustomElement2)} </ReactGridLayout> <div style={{ display: 'flex', flexFlow: 'row wrap', justifyContent: 'flex-end' }} > <FloatingActionButton style={{marginLeft: '20px'}} onClick={onResetLayout2}> <FontIcon className="material-icons"> restore </FontIcon> </FloatingActionButton> <FloatingActionButton style={{marginLeft: '20px'}} onClick={onAddItem2}> <FontIcon className="material-icons"> add </FontIcon> </FloatingActionButton> </div> </Col> </Row> <Row> <Col lg={12} style={{height: '5vh', minHeight: 48, background: grey700}}> <IconButton style={{ float: 'right' }} onClick={handlePowerButtonToggle} tooltip="Power" tooltipPosition="top-center" touch={true} > <FontIcon className="material-icons" color={powerOn ? red500 : grey50}> power_settings_new </FontIcon> </IconButton> </Col> </Row> <Drawer width={300} open={open1} openSecondary={true} > <List> <Subheader>WaveForm Type and Color</Subheader> <ListItem> <div>WaveForm</div> <SelectField floatingLabelText="WaveForm Type" value={waveformValue1} onChange={handleWaveformChange} > <MenuItem value="ECG - II" primaryText="ECG - II"/> <MenuItem value="PPG" primaryText="PPG"/> <MenuItem value="RBBB" primaryText="RBBB"/> <MenuItem value="Bigeminy" primaryText="Bigeminy"/> </SelectField> </ListItem> <ListItem> <div>Color</div> <SelectField floatingLabelText="Color Display" value={colorValue1} onChange={handleColorChange} > <MenuItem value="green" primaryText="Green"/> <MenuItem value="red" primaryText="Red"/> <MenuItem value="yellow" primaryText="Yellow"/> <MenuItem value="blue" primaryText="Blue"/> <MenuItem value="white" primaryText="White"/> <MenuItem value="purple" primaryText="Purple"/> </SelectField> </ListItem> </List> <Divider /> <List> <Subheader>Scale and Speed</Subheader> <div> <ListItem> <div>Scale</div> <Slider min={0} max={2} step={0.05} defaultValue={scaleValue1} value={scaleValue1} onChange={handleScaleChange} /> <div style={{'textAlign': 'center'}}> {scaleValue1} </div> </ListItem> <ListItem> <div>Speed</div> <Slider min={0} max={10} step={0.5} defaultValue={speedValue1} value={speedValue1} onChange={handleSpeedChange} /> <div style={{'textAlign': 'center'}}> {speedValue1} </div> </ListItem> </div> </List> <Divider /> <MenuItem onTouchTap={handleLeftDrawerClose}>Save</MenuItem> </Drawer> <Drawer width={300} open={open2} > <List> <Subheader>Vital Sign Type and Color</Subheader> <ListItem> <div>Vital Sign</div> <SelectField floatingLabelText="Vital Sign Type" value={vitalSign2} onChange={handleVitalSignChange} > <MenuItem value="HR" primaryText="HR"/> <MenuItem value="ABP" primaryText="ABP"/> <MenuItem value="PAP" primaryText="PAP"/> <MenuItem value="SpO2" primaryText="SpO2"/> <MenuItem value="RP" primaryText="RP"/> <MenuItem value="NBP" primaryText="NBP"/> </SelectField> </ListItem> <ListItem> <div>Color</div> <SelectField floatingLabelText="Color Display" value={colorValue2} onChange={handleVitalSignColorChange} > <MenuItem value="green" primaryText="Green"/> <MenuItem value="red" primaryText="Red"/> <MenuItem value="yellow" primaryText="Yellow"/> <MenuItem value="blue" primaryText="Blue"/> <MenuItem value="white" primaryText="White"/> <MenuItem value="purple" primaryText="Purple"/> </SelectField> </ListItem> </List> <MenuItem onTouchTap={handleRightDrawerClose}>Save</MenuItem> </Drawer> </Grid> ); let powerOnMode = ( <Grid fluid={true}> <Row> <Col lg={9} style={{height: '95vh', background: grey900, overflow: 'auto'}} className={styles.patientMonitorMobile}> <ReactGridLayout cols={12} rowHeight={200} isDraggable={false} isResizable={false}> {layout1.map(this.createPlayElement1)} </ReactGridLayout> </Col > <Col lg={3} style={{height: '95vh', background: grey900, overflow: 'auto'}} className={styles.patientMonitorMobile}> <ReactGridLayout cols={12} rowHeight={200} isDraggable={false} isResizable={false}> {layout2.map(this.createPlayElement2)} </ReactGridLayout> </Col> </Row> <Row> <Col lg={12} style={{height: '5vh', minHeight: 48, background: grey700}}> <IconButton style={{ float: 'right' }} onClick={handlePowerButtonToggle} tooltip="Power" tooltipPosition="top-center" touch={true} > <FontIcon className="material-icons" color={powerOn ? red500 : grey50}> power_settings_new </FontIcon> </IconButton> </Col> </Row> </Grid> ); return powerOn ? powerOnMode : customMode; } } const mapStateToProps = createStructuredSelector({ layout1: selectLayout1(), items1: selectItems1(), layout2: selectLayout2(), items2: selectItems2(), leftDrawer: selectLeftDrawer(), rightDrawer: selectRightDrawer(), powerOn: selectPowerOn(), socket: selectSocket(), ip: selectIP(), port: selectPort(), protocol: selectProtocol(), patientMonitor: selectPatientMonitor() }); function mapDispatchToProps(dispatch) { return { onLayoutChange1: (layout1) => dispatch(changeLayout1(layout1)), onResetLayout1: () => dispatch(resetLayout1()), onAddItem1: () => dispatch(addItem1()), onRemoveItem1: (i) => dispatch(removeItem1(i)), onLayoutChange2: (layout2) => dispatch(changeLayout2(layout2)), onResetLayout2: () => dispatch(resetLayout2()), onAddItem2: () => dispatch(addItem2()), onRemoveItem2: (i) => dispatch(removeItem2(i)), onPowerOnModeKeyUp: (e) => e.keyCode === 27 ? dispatch(handlePowerButtonToggle()) : null, handleLeftDrawerToggle: (i) => dispatch(handleLeftDrawerToggle(i)), handleLeftDrawerClose: () => dispatch(handleLeftDrawerClose()), handleWaveformChange: (event, index, value) => dispatch(handleWaveformChange(value)), handleColorChange: (event, index, value) => dispatch(handleColorChange(value)), handleScaleChange: (event, value) => dispatch(handleScaleChange(value)), handleSpeedChange: (event, value) => dispatch(handleSpeedChange(value)), handleRightDrawerToggle: (i) => dispatch(handleRightDrawerToggle(i)), handleRightDrawerClose: () => dispatch(handleRightDrawerClose()), handleVitalSignChange: (event, index, value) => dispatch(handleVitalSignChange(value)), handleVitalSignColorChange: (event, index, value) => dispatch(handleVitalSignColorChange(value)), handlePowerButtonToggle: () => dispatch(handlePowerButtonToggle()) }; } export default connect(mapStateToProps, mapDispatchToProps)(PatientMonitorMobile);
{ "content_hash": "cb36a391798b76297b30d7ce76622843", "timestamp": "", "source": "github", "line_count": 634, "max_line_length": 119, "avg_line_length": 29.353312302839118, "alnum_prop": 0.5377216550241806, "repo_name": "johnsiu2016/FYP", "id": "fc6bd603f3653f34631aa70689c9fd1fcd281d9b", "size": "18610", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/containers/PatientMonitorMobile/index.js", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "1526" }, { "name": "CSS", "bytes": "1709" }, { "name": "HTML", "bytes": "9422" }, { "name": "JavaScript", "bytes": "171134" } ], "symlink_target": "" }
package com.LSH.server; import com.LSH.client.DataType.ReturnLinkData; import com.LSH.server.Config.Config; import com.LSH.client.DataType.GetLinkData; import com.LSH.client.DataType.PutLinkData; import com.LSH.server.Log.Log; import com.LSH.server.Log.LogEvent; import static com.LSH.server.LSHService.errorCode; import java.sql.*; import java.util.*; /** * Created by @author AlNat on 16.09.2016. * Licensed by Apache License, Version 2.0 * * Класс соединения приложения с БД */ class DBConnect { static final DBConnect instance = new DBConnect(); // Реализация паттерна Singleton private Connection connection; // Соединение private DBConnect () { // Конструктор String url = "jdbc:postgresql:" + Config.instance.getURL(); String login = Config.instance.getLogin(); String password = Config.instance.getPassword(); Properties props; try { // Попытались установить соединение с БД Class.forName("org.postgresql.Driver"); props = new Properties(); props.setProperty("user", login); props.setProperty("password", password); props.setProperty("ssl", "false"); connection = DriverManager.getConnection(url, props); } catch (SQLException e) { // Ловим ошибку e.printStackTrace(); // Пишем лог LogEvent l = new LogEvent(); l.setClassName("LSH.DBConnect"); l.setType("Connection"); l.setMessage("Cannot connect to DB"); Log.instance.WriteEvent(l); } catch (ClassNotFoundException e) { System.out.println("Please, check Tomcat lib folder for JDBC jar archive"); e.printStackTrace(); // Пишем лог LogEvent l = new LogEvent(); l.setClassName("LSH.DBConnect"); l.setType("Connection"); l.setMessage("Cannot find jdbc driver"); Log.instance.WriteEvent(l); } } /** * Функция проверки - занят ли этот код * @param code мнемноничесая ссылки * @return -2 если занят. -1 при ошибке кода. id от кода в другом случае */ private Long CheckAvailability (String code) { Long id = Shortner.GetID(code); // Получили id по коду if (id == -1) { // Если код ошибочный то вернули ошибку -1 return -1L; } try { // Создаем запрос и выполняем его PreparedStatement st = connection.prepareStatement("SELECT valid FROM status WHERE user_id = ?", ResultSet.TYPE_SCROLL_INSENSITIVE); st.setLong(1, id); ResultSet rs = st.executeQuery(); // Получаем ответ rs.next(); boolean answer = rs.getBoolean("valid"); // Закрываем rs.close(); st.close(); // Парсим ответ if (answer) { // Если занят то возращаем код занятости return -2L; } else { // Иначе сам id return id; } } catch (SQLException e) { // Ловим ошибки if (e.getSQLState().equals("24000")) { // Пустой ответ - нет такого кода - еще не использовался return id; } System.out.println("Connection Failed! Check output console"); e.printStackTrace(); // Пишем лог LogEvent l = new LogEvent(); l.setClassName("LSH.DBConnect.CheckAvailability"); l.setType("SQLException"); l.setMessage(e.getMessage()); Log.instance.WriteEvent(l); return -1L; // И говорим про ошибку } } /** * Метод, который кладет в БД новую ссылки и * @param in сообщение с данными ссылки * @return Код ошибки или короткую ссылку */ String Put(PutLinkData in) { long id; // id Ссылки String code; // Короткий код Statement statement; PreparedStatement preparedStatement; ResultSet resultSet; if (!in.getShortLink().equals("NULL")) { // Если есть желаемый короткий код code = in.getShortLink(); // Получаем его Long answer = CheckAvailability(code); // И проверяем его наличие в БД if (answer == -1) { // Если код невалидный то отвечаем ошибкой // Пишем лог LogEvent l = new LogEvent(in); l.setClassName("LSH.DBConnect.Put"); l.setType("InvalidCode"); l.setMessage("answer = -1"); Log.instance.WriteEvent(l); return errorCode + "<br>Invalid code!"; } else if (answer == -2) { // Если занят тоже // Пишем лог LogEvent l = new LogEvent(in); l.setClassName("LSH.DBConnect.Put"); l.setType("CodeError"); l.setMessage("MemoUnavailable"); Log.instance.WriteEvent(l); return errorCode + "<br>Unfortunately, your memo is not available"; } else { // Иначе берем его как новый id id = answer; } } else { try { // Получаем новый id из базы // Создали соедниение и вызвали функцию генерации нового id statement = connection.createStatement(); resultSet = statement.executeQuery("SELECT get_next_id()"); // Получили его resultSet.next(); id = resultSet.getInt(1); // И закрыли соединение resultSet.close(); statement.close(); } catch (SQLException e) { // Отловили ошибки e.printStackTrace(); WritePutLog(in, e);// Пишем лог return errorCode + "<br>SQL Error!"; } } // Логгирование пользователей String userLogin = in.getUserLogin(); // Логин Integer userID = 0; // id этого логина if (userLogin != null) { // если есть логин try { // Пошли в БД за id PreparedStatement st = connection.prepareStatement("SELECT id FROM users WHERE login = ?", ResultSet.TYPE_SCROLL_INSENSITIVE); st.setString(1, userLogin); ResultSet rs = st.executeQuery(); // / Получаем ответ rs.next(); userID = rs.getInt("id"); } catch (SQLException e) { if (e.getSQLState().equals("24000")) { // Пустой ответ - нет такого логина - вернули ошибку LogEvent l = new LogEvent(in); l.setClassName("LSH.DBConnect.Put"); l.setType("Login not found"); l.setMessage("Login = " + userLogin); Log.instance.WriteEvent(l); return errorCode + "<br>Incorrect user!"; } // Иначе это просто ошибка sql System.out.println("Connection Failed! Check output console"); e.printStackTrace(); // Пишем лог LogEvent l = new LogEvent(in); l.setClassName("LSH.DBConnect.Put"); l.setType("SQLException"); l.setMessage(e.getMessage()); Log.instance.WriteEvent(l); return errorCode + "<br>SQL Error!"; } } code = Shortner.GetShort(id); // Сокращаем id в код // Приводим дату к виду Timestamp String t = in.getTtl(); Timestamp date; switch (t) { case "1 hour": date = new Timestamp(System.currentTimeMillis() + 3600000L ); // ТК 1 час - это 3600000 миллисекунд break; case "12 hour": date = new Timestamp(System.currentTimeMillis() + 43200000L ); // Аналогично break; case "1 day": date = new Timestamp(System.currentTimeMillis() + 86400000L ); break; case "1 week": date = new Timestamp(System.currentTimeMillis() + 604800000L ); break; case "1 month": date = new Timestamp(System.currentTimeMillis() + 2678400000L ); // Это, конечно, не месяц, а 31 день. Но нам огромная точность не нужна break; case "Unlimited": date = new Timestamp(System.currentTimeMillis() + 3155760000000L); // Это 100 лет. Куда уж больше? // LONG_MAX не влезает //date = new Timestamp(Timestamp.parse("infinity")); // Есть такой вариант, но какая разница? break; default: date = new Timestamp(System.currentTimeMillis()); // По дефолту будем делать ссылку не валидной } try { // Пишем в базу // Создали соединение preparedStatement = connection.prepareStatement( "INSERT INTO short(user_id, link, expired_date, max_count, current_count, ip, user_agent, password, owner) VALUES (?, ?, ?, ?, ?, ?::cidr, ?, ?, ?)" ); preparedStatement.setLong(1, id); preparedStatement.setString(2, in.getOriginalLink()); preparedStatement.setTimestamp(3, date); preparedStatement.setInt(4, in.getMaxVisits()); preparedStatement.setInt(5, 0); preparedStatement.setString(6, in.getIp()); preparedStatement.setString(7, in.getBrowser()); preparedStatement.setString(8, in.getPassword()); preparedStatement.setInt(9, userID); // Выолнили вставку и закрыли соединение preparedStatement.execute(); preparedStatement.close(); } catch (SQLException e) { // Отловили ошибки e.printStackTrace(); WritePutLog(in, e);// Пишем лог return errorCode + "<br>SQL Error!"; } // Пишем лог LogEvent l = new LogEvent(in); l.setClassName("LSH.DBConnect.Put"); l.setType("Success"); l.setMessage("Return code:" + code); Log.instance.WriteEvent(l); return code; // И возращаем саму ссылки } /** * Метод, который возращает оригинальную ссылку по коду * @param in данные об переходе * @return оригинальная ссылка или сообщение об ошибке */ ReturnLinkData Get (GetLinkData in) { String code = in.getCode(); // Получили код long id = Shortner.GetID(code); // Попытались код преобразовать к id if (id == -1 || code.equals("ERROR")) { // Если ошибка то вернули WriteGetLog(in); // Пишем лог return new ReturnLinkData("<br>Error code!"); } ResultSet resultSet; PreparedStatement preparedStatement; try { // Проверили, что этот id вообще есть // Инвалидируем ссылки preparedStatement = connection.prepareStatement("SELECT invalidate()"); resultSet = preparedStatement.executeQuery(); resultSet.close(); // Создали и выполнили запрос preparedStatement = connection.prepareStatement("SELECT valid FROM status WHERE user_id = ?"); preparedStatement.setLong(1, id); resultSet = preparedStatement.executeQuery(); // Получили ответ resultSet.next(); boolean t = resultSet.getBoolean(1); if (!t) { // Если этот id false = свободен, то выдаем ошибку // Пишем в лог WriteGetLog(in); return new ReturnLinkData("<br>Invalid code!"); } // Закрыли соединение resultSet.close(); preparedStatement.close(); } catch (SQLException e) { // Вывели ошибки if (e.getSQLState().equals("24000")) { // Пустой ответ - нет такого кода - еще не использовался // Пишем в лог WriteGetLog(in); return new ReturnLinkData("<br>Invalid code!"); } e.printStackTrace(); // Пишем в лог LogEvent l = new LogEvent(in); l.setClassName("LSH.DBConnect.Get"); l.setType("SQLException"); l.setMessage(e.getMessage()); Log.instance.WriteEvent(l); return new ReturnLinkData("<br>SQL Error!"); } Integer tableID; // id в таблице для foreign key в аналитике Integer curCount; // Текущее кол-во переходов String link; // Сам линк String password; // Хэш от пароля try { // Получили оригинальную ссылку //connection.setAutoCommit(false); // Запретили автоматический коммит preparedStatement = connection.prepareStatement("SELECT id, link, password, current_count FROM short WHERE user_id = ? ORDER BY user_id DESC LIMIT 1"); preparedStatement.setLong(1, id); resultSet = preparedStatement.executeQuery(); resultSet.next(); link = resultSet.getString("link"); tableID = resultSet.getInt("id"); curCount = resultSet.getInt("current_count"); password = resultSet.getString("password"); resultSet.close(); preparedStatement.close(); } catch (SQLException e) { e.printStackTrace(); WriteGetELog(in, e);// Пишем в лог return new ReturnLinkData("<br>SQL Error!"); } try { // Обновили ко-во переходов preparedStatement = connection.prepareStatement("UPDATE short SET current_count = ? WHERE id = ?"); preparedStatement.setInt(1, curCount + 1); preparedStatement.setInt(2, tableID); preparedStatement.execute(); preparedStatement.close(); //connection.commit(); // Закомитили изменений } catch (SQLException e) { e.printStackTrace(); WriteGetELog(in, e);// Пишем в лог return new ReturnLinkData("<br>SQL Error!"); } try { // Запись аналитики // Вставили данные в таблицу аналитики preparedStatement = connection.prepareStatement("INSERT INTO analitics (short_id, visit_time, ip, user_agent) VALUES (?, ?, ?::cidr, ?)"); preparedStatement.setInt(1, tableID); preparedStatement.setTimestamp(2, new Timestamp( System.currentTimeMillis() ) ); preparedStatement.setString(3, in.getIp()); preparedStatement.setString(4, in.getBrowser()); preparedStatement.execute(); preparedStatement.close(); // Закрыли соединение } catch (SQLException e) { // Вывели ошибки e.printStackTrace(); WriteGetELog(in, e);// Пишем в лог return new ReturnLinkData("<br>SQL Error!"); } // Пишем лог LogEvent l = new LogEvent(in); l.setClassName("LSH.DBConnect.Get"); l.setType("Success"); l.setMessage("Return link"); Log.instance.WriteEvent(l); return new ReturnLinkData(link, password); // Вернули оригинальную ссылку для редиректа } /** * Функция входа пользователя * @param userLogin логин * @param userPassword пароль * @return true если вошел, false если ошибка */ String Login(String userLogin, String userPassword) { // Идем в БД и проверяем на соответствие пользователя try { // Создаем запрос и выполняем его PreparedStatement st = connection.prepareStatement("SELECT id, password FROM users WHERE login = ?", ResultSet.TYPE_SCROLL_INSENSITIVE); st.setString(1, userLogin); ResultSet rs = st.executeQuery(); // Получаем ответ rs.next(); String pass = rs.getString("password"); // Закрываем rs.close(); st.close(); if (pass.equals(userPassword)) { // Если пользователь ввел верный логин и пароль, то запоминаем его id return "OK"; } else { // Если пароль не подошел, то возращаем ошибку // Пишем лог LogEvent l = new LogEvent(); l.setClassName("LSH.DBConnect.Login"); l.setType("Login"); l.setMessage("Login = " + userLogin + "; Password = " + userPassword); Log.instance.WriteEvent(l); return errorCode + "<br>Incorrect user or password!"; } } catch (SQLException e) { // Ловим ошибки if (e.getSQLState().equals("24000")) { // Пустой ответ - нет такого логина - создаем нового пользователя try { // Создаем пользователя PreparedStatement st = connection.prepareStatement("INSERT INTO users(login, password) VALUES (?, ?)", ResultSet.TYPE_SCROLL_INSENSITIVE); st.setString(1, userLogin); st.setString(2, userPassword); st.execute(); st.close(); return "OK"; } catch (SQLException ee) { // Ловим ошибки System.out.println("Connection Failed! Check output console"); ee.printStackTrace(); // Пишем лог LogEvent l = new LogEvent(); l.setClassName("LSH.DBConnect.Login.CreateNew"); l.setType("SQLException"); l.setMessage(ee.getMessage()); Log.instance.WriteEvent(l); return errorCode + "<br>SQL Error!"; } } else { // Иначе это обычная ошибка System.out.println("Connection Failed! Check output console"); e.printStackTrace(); // Пишем лог LogEvent l = new LogEvent(); l.setClassName("LSH.DBConnect.Login"); l.setType("SQLException"); l.setMessage(e.getMessage()); Log.instance.WriteEvent(l); return errorCode + "<br>SQL Error!"; } } } // Функции для записи в лог - вынес, тк надоела подсветка в IDEA о дублировании кода /** * Функция, пишушья код ошибки в лог. * Вынес, тк используеться больше 2 раз * @param in данные * @param e exception */ private void WritePutLog (PutLinkData in, SQLException e) { LogEvent l = new LogEvent(in); l.setClassName("LSH.DBConnect.Put"); l.setType("SQLException"); l.setMessage(e.getMessage()); Log.instance.WriteEvent(l); } /** * Функция, пишушья код ошибки в лог. * Вынес, тк используеться больше 2 раз * @param in данные */ private void WriteGetLog (GetLinkData in) { LogEvent l = new LogEvent(in); l.setClassName("LSH.DBConnect.Get"); l.setType("CodeError"); l.setMessage("Invalid code!"); Log.instance.WriteEvent(l); } /** * Функция, пишушая код ошибки в лог. * Вынес, тк используеться больше 3 раз * @param in данные * @param e exception */ private void WriteGetELog (GetLinkData in, SQLException e) { LogEvent l = new LogEvent(in); l.setClassName("LSH.DBConnect.Get"); l.setType("SQLException"); l.setMessage(e.getMessage()); Log.instance.WriteEvent(l); } }
{ "content_hash": "d176ebdfcede82831d47414d5fea9295", "timestamp": "", "source": "github", "line_count": 574, "max_line_length": 168, "avg_line_length": 34.16202090592334, "alnum_prop": 0.5507675047172217, "repo_name": "AlNat/LSH", "id": "fdb4fbb3cc5309f60d64525eba08d77ce6996f6f", "size": "22156", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "LSH/src/com/LSH/server/DBConnect.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "7229" }, { "name": "HTML", "bytes": "7011" }, { "name": "Java", "bytes": "142183" }, { "name": "PLpgSQL", "bytes": "7342" }, { "name": "Shell", "bytes": "755" } ], "symlink_target": "" }
package org.optaplanner.core.impl.domain.solution.cloner; import java.lang.reflect.Field; import java.util.function.Consumer; final class CharFieldCloner implements FieldCloner { static final FieldCloner INSTANCE = new CharFieldCloner(); @Override public <C> void clone(DeepCloningUtils deepCloningUtils, Field field, Class<? extends C> instanceClass, C original, C clone, Consumer<Object> deferredValueConsumer) { char originalValue = getFieldValue(original, field); setFieldValue(clone, field, originalValue); } private static char getFieldValue(Object bean, Field field) { try { return field.getChar(bean); } catch (IllegalAccessException e) { throw FieldCloner.createExceptionOnRead(bean, field, e); } } private static void setFieldValue(Object bean, Field field, char value) { try { field.setChar(bean, value); } catch (IllegalAccessException e) { throw FieldCloner.createExceptionOnWrite(bean, field, value, e); } } private CharFieldCloner() { } }
{ "content_hash": "50a713ab88105c031b8be983bbfcb197", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 107, "avg_line_length": 30.45945945945946, "alnum_prop": 0.6708074534161491, "repo_name": "tkobayas/optaplanner", "id": "7af875f0c3755c2d8b26a918154831fa3c0e1d6e", "size": "1127", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "core/optaplanner-core-impl/src/main/java/org/optaplanner/core/impl/domain/solution/cloner/CharFieldCloner.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "2540" }, { "name": "CSS", "bytes": "32771" }, { "name": "FreeMarker", "bytes": "116587" }, { "name": "Groovy", "bytes": "20273" }, { "name": "HTML", "bytes": "3966" }, { "name": "Java", "bytes": "11961620" }, { "name": "JavaScript", "bytes": "304742" }, { "name": "Shell", "bytes": "5984" }, { "name": "XSLT", "bytes": "775" } ], "symlink_target": "" }
Service discovery that serves a static list of addresses.
{ "content_hash": "c792f36606e2da89624fdb106d4d2a93", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 57, "avg_line_length": 58, "alnum_prop": 0.8275862068965517, "repo_name": "bigeasy/mingle", "id": "2341bac8782bd1f225dbe8ab19335566bbca1a17", "size": "58", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "mingle.static/README.md", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "18959" }, { "name": "Makefile", "bytes": "763" } ], "symlink_target": "" }
SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name Sordaria insignis E.C. Hansen, 1876 ### Remarks null
{ "content_hash": "de56ec0a4d6383a8a3e4d9722ccb0bbb", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 12.615384615384615, "alnum_prop": 0.7073170731707317, "repo_name": "mdoering/backbone", "id": "6dd817d024ecb6133987a919e1578e2e1dcc5b1c", "size": "232", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Fungi/Ascomycota/Sordariomycetes/Sordariales/Lasiosphaeriaceae/Strattonia/Strattonia insignis/ Syn. Hypocopra insignis/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
package org.wso2.maven.p2.generate.feature; import org.apache.maven.plugin.MojoExecutionException; public class ImportBundle extends Bundle{ /** * Version Compatibility of the Bundle * * @parameter default-value="false" */ private boolean exclude; /** * OSGI Symbolic name * * @parameter */ private String bundleSymbolicName; /** * OSGI Version * * @parameter */ private String bundleVersion; public void setExclude(boolean exclude) { this.exclude = exclude; } public boolean isExclude() { return exclude; } public static ImportBundle getBundle(String bundleDefinition) throws MojoExecutionException{ return (ImportBundle) Bundle.getBundle(bundleDefinition, new ImportBundle()); } public void setBundleSymbolicName(String bundleSymbolicName) { this.bundleSymbolicName = bundleSymbolicName; } public String getBundleSymbolicName() { return bundleSymbolicName; } public void setBundleVersion(String bundleVersion) { this.bundleVersion = bundleVersion; } public String getBundleVersion() { return bundleVersion; } }
{ "content_hash": "353d953a7b7c771c07d3ad34816a4ff5", "timestamp": "", "source": "github", "line_count": 58, "max_line_length": 93, "avg_line_length": 19.43103448275862, "alnum_prop": 0.7169476486246673, "repo_name": "wso2/maven-tools", "id": "ab582bebd7011d2afddbbd9cd39d4a45ed567c2e", "size": "1794", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "carbon-p2-plugin/src/main/java/org/wso2/maven/p2/generate/feature/ImportBundle.java", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "669398" } ], "symlink_target": "" }
body { padding-top: 70px; } /* * The code below adds some padding to the top of the current anchor target so * that, when navigating to it, the header isn't hidden by the navbar at the * top. This is especially complicated because we want to *remove* the padding * after navigation so that hovering over the header shows the permalink icon * correctly. Thus, we create a CSS animation to remove the extra padding after * a second. We have two animations so that navigating to an anchor within the * page always restarts the animation. * * See <https://github.com/mkdocs/mkdocs/issues/843> for more details. */ :target::before { content: ""; display: block; margin-top: -75px; height: 75px; pointer-events: none; animation: 0s 1s forwards collapse-anchor-padding-1; } body.clicky :target::before { animation-name: collapse-anchor-padding-2; } @keyframes collapse-anchor-padding-1 { to { margin-top: 0; height: 0; } } @keyframes collapse-anchor-padding-2 { to { margin-top: 0; height: 0; } } ul.nav li.main { font-weight: bold; } div.col-md-3 { padding-left: 0; } div.col-md-9 { padding-bottom: 100px; } div.source-links { float: right; } div.col-md-9 img { max-width: 100%; } code { padding: 1px 3px; background: #ecf0f1; border: solid 1px #ccc; color: #7b8a8b; } pre code { background: transparent; border: none; } a > code { color: #18bc9c; } /* * Side navigation * * Scrollspy and affixed enhanced navigation to highlight sections and secondary * sections of docs content. */ /* By default it's not affixed in mobile views, so undo that */ .bs-sidebar.affix { position: static; } .bs-sidebar.well { padding: 0; } /* First level of nav */ .bs-sidenav { margin-top: 30px; margin-bottom: 30px; padding-top: 10px; padding-bottom: 10px; border-radius: 5px; } /* All levels of nav */ .bs-sidebar .nav > li > a { display: block; padding: 5px 20px; z-index: 1; } .bs-sidebar .nav > li > a:hover, .bs-sidebar .nav > li > a:focus { text-decoration: none; border-right: 1px solid; } .bs-sidebar .nav > .active > a, .bs-sidebar .nav > .active:hover > a, .bs-sidebar .nav > .active:focus > a { font-weight: bold; background-color: transparent; border-right: 1px solid; } /* Nav: second level (shown on .active) */ .bs-sidebar .nav .nav { display: none; /* Hide by default, but at >768px, show it */ margin-bottom: 8px; } .bs-sidebar .nav .nav > li > a { padding-top: 3px; padding-bottom: 3px; padding-left: 30px; font-size: 90%; } /* Show and affix the side nav when space allows it */ @media (min-width: 992px) { .bs-sidebar .nav > .active > ul { display: block; } /* Widen the fixed sidebar */ .bs-sidebar.affix, .bs-sidebar.affix-bottom { width: 213px; } .bs-sidebar.affix { position: fixed; /* Undo the static from mobile first approach */ top: 80px; } .bs-sidebar.affix-bottom { position: absolute; /* Undo the static from mobile first approach */ } .bs-sidebar.affix-bottom .bs-sidenav, .bs-sidebar.affix .bs-sidenav { margin-top: 0; margin-bottom: 0; } } @media (min-width: 1200px) { /* Widen the fixed sidebar again */ .bs-sidebar.affix-bottom, .bs-sidebar.affix { width: 263px; } } .headerlink { display: none; padding-left: .5em; } h1:hover .headerlink, h2:hover .headerlink, h3:hover .headerlink, h4:hover .headerlink, h5:hover .headerlink, h6:hover .headerlink{ display:inline-block; } /* display submenu relative to parent*/ .dropdown-submenu { position: relative; } /* sub menu stlye */ .dropdown-submenu>.dropdown-menu { top: 0; left: 100%; margin-top: 0px; margin-left: -1px; -webkit-border-radius: 0 4px 4px 4px; -moz-border-radius: 0 4px 4px; border-radius: 0 4px 4px 4px; } /* display sub menu on hover*/ .dropdown-submenu:hover>.dropdown-menu { display: block; } /* little arrow */ .dropdown-submenu>a:after { display: block; content: " "; float: right; width: 0; height: 0; border-color: transparent; border-style: solid; border-width: 5px 0 5px 5px; border-left-color: #ccc; margin-top: 5px; margin-right: -10px; } /* little arrow of parent menu */ .dropdown-submenu:hover>a:after { border-left-color: #404040; }
{ "content_hash": "2d55ffa979ec9b0d7148aa6f575c5619", "timestamp": "", "source": "github", "line_count": 214, "max_line_length": 131, "avg_line_length": 21.07943925233645, "alnum_prop": 0.628463755264908, "repo_name": "lcomstive/CoffeeEngine", "id": "62b343754c0cec87de35fdf7fbda8c7d620b3292", "size": "4511", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "src/Docs/site/css/base.css", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "1927" }, { "name": "C++", "bytes": "14982" }, { "name": "CMake", "bytes": "3263" }, { "name": "CSS", "bytes": "27851" }, { "name": "HTML", "bytes": "37776" }, { "name": "JavaScript", "bytes": "19405" }, { "name": "Shell", "bytes": "1216" } ], "symlink_target": "" }
require 'bigdecimal' module Fedex::WebServices module Service class Rate < Base def get_rates(service_type, rate_request_type, from, to, weight, &process_contents) request = GetRates.new(self, service_type, rate_request_type, from, to, weight ) response = issue_request(request, &process_contents) return [ Rate.rate_for(response, rate_request_type), response ] end def service_id :crs end def self.rate_for(response, rate_request_type) details = response.rateReplyDetails.first.ratedShipmentDetails.select do |detail| detail.shipmentRateDetail.rateType == "PAYOR_#{rate_request_type}_PACKAGE" end details.inject(0) do |acc, detail| acc + BigDecimal.new(detail.shipmentRateDetail.totalNetCharge.amount) end end protected def port RatePortType.new(service_url) end end end end
{ "content_hash": "9ab4f4470022a0e5617b6ed60b750738", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 89, "avg_line_length": 25.1, "alnum_prop": 0.6105577689243028, "repo_name": "parndt/fedex-web-services", "id": "a5c6564a83c0fbd5d65845f8a4be385d5c04eef4", "size": "1004", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/fedex/web_services/service/rate.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "16715" } ], "symlink_target": "" }
phonegap-fun ============ Phonegap spiking ground
{ "content_hash": "f26071405f73878d73f60e65a40d68da", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 23, "avg_line_length": 12.75, "alnum_prop": 0.6274509803921569, "repo_name": "levexis/phonegap-fun", "id": "ece914991ba0383081cfa71b16f47d6e6d45771d", "size": "51", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "3670" }, { "name": "C++", "bytes": "3056" }, { "name": "CSS", "bytes": "43288" }, { "name": "D", "bytes": "33539" }, { "name": "Java", "bytes": "751487" }, { "name": "JavaScript", "bytes": "104637" }, { "name": "Objective-C", "bytes": "4063" }, { "name": "Shell", "bytes": "11293" } ], "symlink_target": "" }
<?php namespace Sensio\Bundle\TodoBundle; use Symfony\Component\HttpKernel\Bundle\Bundle; class SensioTodoBundle extends Bundle { }
{ "content_hash": "c31e4edc77e873e130bca1b4cf9e3dbf", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 47, "avg_line_length": 15, "alnum_prop": 0.8074074074074075, "repo_name": "foxreymann/getting-started-with-symfony2", "id": "e7e29c2096e69a08160767e4ed3f53b53fa362a7", "size": "135", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/Sensio/Bundle/TodoBundle/SensioTodoBundle.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "61423" }, { "name": "Ruby", "bytes": "552" } ], "symlink_target": "" }
<!DOCTYPE html> <html> <head> <meta charset="utf-8" /> <meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=no" /> <title>Jeremy’s Letter (WEB)</title> <link href="../../../build/mobile.css" rel="stylesheet" /> <script src="../../../build/mobile.js"></script> </head> <body dir="ltr" class="division-index"> <div class="header"><div class="nav"> <span class="name">Jeremy’s Letter</span> <a class="home" href="index.html">&#9776;</a> </div></div> <ul class="section-list"> <li><a href="LJ6.html">Jeremy’s Letter 6</a></li> </ul> </body> </html>
{ "content_hash": "76f844608000203b39a10d5a58b707db", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 90, "avg_line_length": 30.263157894736842, "alnum_prop": 0.6434782608695652, "repo_name": "khangpng/VietnameseBible", "id": "38540cb448f06e1c05e45dd050823f4b0f0f9fde", "size": "581", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/content/texts/eng_web/LJ.html", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "5625" }, { "name": "CSS", "bytes": "77416" }, { "name": "HTML", "bytes": "175720807" }, { "name": "JavaScript", "bytes": "853195" }, { "name": "PHP", "bytes": "25275" } ], "symlink_target": "" }
ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in Mém. Soc. nat. hist. Nat. Math. Cherbourg 37: 136 (1909) #### Original name Lecidea cinereofusca var. ammiospila Ach. ### Remarks null
{ "content_hash": "4dd3ca91c08ff7f9e2402ea2d423cdb7", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 56, "avg_line_length": 17.153846153846153, "alnum_prop": 0.7085201793721974, "repo_name": "mdoering/backbone", "id": "b60200e996243983dd22c07b7ad5ead2725493cb", "size": "290", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Fungi/Ascomycota/Lecanoromycetes/Teloschistales/Teloschistaceae/Caloplaca/Caloplaca ammiospila/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
var express = require("express"), app = express(), http = require("http"), url = require("url"), path = require("path"), fs = require("fs"), port = process.argv[2] || 8888, upload = require('loadimage.js'), managepresentation = require('managepresentation.js'), bodyParser = require('body-parser'); app.use(express.static(__dirname + '/public/')); app.use(bodyParser.urlencoded({ limit: '100mb', extended: true })); app.use(bodyParser.json({ limit: '100mb' })); app.get("/", function(request, response) { var uri = url.parse(request.url).pathname , filename = path.join(process.cwd(), uri); var contentTypesByExtension = { '.html': "text/html", '.css': "text/css", '.js': "text/javascript" }; fs.exists(filename, function(exists) { if (!exists) { response.writeHead(404, {"Content-Type": "text/plain"}); response.write("404 Not Found\n"); response.end(); return; } if (fs.statSync(filename).isDirectory()) filename += '/public/index.html'; fs.readFile(filename, "binary", function(err, file) { if (err) { response.writeHead(500, {"Content-Type": "text/plain"}); response.write(err + "\n"); response.end(); return; } var headers = {}; var contentType = contentTypesByExtension[path.extname(filename)]; if (contentType) headers["Content-Type"] = contentType; response.writeHead(200, headers); response.write(file, "binary"); response.end(); }); }); }); var multipart = require('connect-multiparty'); var multipartMiddleware = multipart(); app.post('/uploadimage', multipartMiddleware, upload.uploadImage); app.post('/upimagefromurl', upload.uploadUrlImage); app.post('/toPDF', managepresentation.toPDF); app.post('/exportPres', managepresentation.exportPresentation); app.post('/getPres', managepresentation.getPresentation); app.post('/downloadPres', managepresentation.downloadPresentation); app.listen(parseInt(port, 10));
{ "content_hash": "fd7a14a6683e62c72340dd3aa76b9224", "timestamp": "", "source": "github", "line_count": 73, "max_line_length": 78, "avg_line_length": 30.82191780821918, "alnum_prop": 0.5804444444444444, "repo_name": "EduAwaSeka/fullslider", "id": "81aab0b458681f0f8e8fdb81b6bc3adbbaba6147", "size": "2250", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "public_html/init.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "874" }, { "name": "CSS", "bytes": "618456" }, { "name": "HTML", "bytes": "806481" }, { "name": "Java", "bytes": "7182" }, { "name": "JavaScript", "bytes": "1967765" }, { "name": "Smarty", "bytes": "392" } ], "symlink_target": "" }
"""Community relations manager. The manager provides the API to add, remove and iterate over communities associated with a record. """ from invenio_db import db from invenio_records.api import Record from invenio_communities.communities.records.api import Community class CommunitiesRelationManager: """Manager for a record's community relations.""" def __init__(self, m2m_model_cls, record_id, data): """Constructor.""" self._m2m_model_cls = m2m_model_cls self._record_id = record_id self._default_id = None self._communities_ids = set() self._communities_cache = {} self.from_dict(data) # # Helpers # def _to_id(self, val): """Get the community id.""" if isinstance(val, str): return val elif isinstance(val, Record): return str(val.id) return None def _lookup_community(self, community_id): """Retrieve a community by id. Caches the community. """ if community_id not in self._communities_cache: c = Community.get_record(community_id) self._communities_cache[str(c.id)] = c return self._communities_cache[community_id] # # API # def add(self, community_or_id, request=None, default=False): """Add a record to a community. If a record was already added to a community an IntegrityError will be raised. """ community_id = self._to_id(community_or_id) # Create M2M object obj = self._m2m_model_cls( record_id=self._record_id, community_id=community_id, request_id=self._to_id(request), ) db.session.add(obj) # Add to internal set self._communities_ids.add(community_id) # Set default if default: self._default_id = community_id # Cache community only if provided if isinstance(community_or_id, Community): self._communities_cache[community_id] = community_or_id def remove(self, community_or_id): """Remove a record from a community.""" community_id = self._to_id(community_or_id) # Delete M2M row. res = self._m2m_model_cls.query.filter_by( community_id=community_id, record_id=self._record_id ).delete() if res != 1: raise ValueError("The record has not been added to the community.") # Remove from internal set self._communities_ids.remove(community_id) # Unset default if needed if self._default_id == community_id: self._default_id = None def clear(self): """Clear all communities from the record.""" # Remove all associations res = self._m2m_model_cls.query.filter_by(record_id=self._record_id).delete() self._communities_ids = set() self._default_id = None self._communities_cache = {} def refresh(self): """Refresh from the database M2M table.""" # Retrieve from M2M table ids = ( db.session.query(self._m2m_model_cls.community_id) .filter(self._m2m_model_cls.record_id == self._record_id) .all() ) # Set internal list self._communities_ids = set([str(x[0]) for x in ids]) # Unset default if no longer available if self._default_id and self._default_id not in self._communities_ids: self._default_id = None def __len__(self): """Get number of communities.""" return len(self._communities_ids) def __contains__(self, community_or_id): """Check record is in community.""" id_ = self._to_id(community_or_id) return id_ in self._communities_ids def __iter__(self): """Iterate over a communities.""" # Determine community ids not already cached. nocache_ids = self._communities_ids - set(self._communities_cache.keys()) # Fetch and cache missing community records if nocache_ids: communities = Community.get_records(nocache_ids) for c in communities: self._communities_cache[str(c.id)] = c # Iterate (sort by identifier to ensure consistent results) return (self._communities_cache[c] for c in sorted(self._communities_ids)) @property def ids(self): """Get communities ids.""" return sorted(self._communities_ids) @property def default(self): """Get the default community.""" if self._default_id is not None: return self._lookup_community(self._default_id) return None @default.setter def default(self, community_or_id): """Set the default community. Note, the community must already have been added to the community. If not, then use ``.add(community, default=True)`` instead. """ id_ = self._to_id(community_or_id) if id_ not in self._communities_ids: raise AttributeError( "Cannot set community as the default. " "The record has not been added to the community." ) self._default_id = id_ @default.deleter def default(self): self._default_id = None # Persist relationships in record (denormalize the M2M table). # This enables 1) tracking community membership via the record versioning # and 2) faster indexing by not having to query the database for # relationships. def to_dict(self): """Get the dictionary which will be stored in the record.""" data = {} if self._default_id is not None: data["default"] = self._default_id ids = list(self.ids) if ids: data["ids"] = ids return data def from_dict(self, data): """Build manager from the record dict.""" data = data or {} self._default_id = data.get("default", None) self._communities_ids = set(data.get("ids", [])) return self
{ "content_hash": "3c608939914a32144eb1dfade1958e48", "timestamp": "", "source": "github", "line_count": 191, "max_line_length": 85, "avg_line_length": 31.905759162303664, "alnum_prop": 0.5899245159172957, "repo_name": "inveniosoftware/invenio-communities", "id": "75740a838563bcc34575f133308ecbd4957199e1", "size": "6306", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "invenio_communities/records/records/systemfields/communities/manager.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "21753" }, { "name": "JavaScript", "bytes": "226672" }, { "name": "Python", "bytes": "361750" }, { "name": "Shell", "bytes": "1881" } ], "symlink_target": "" }
ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "b1c5e410981c5196552d6dbcca603b6f", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.307692307692308, "alnum_prop": 0.6940298507462687, "repo_name": "mdoering/backbone", "id": "a89c8356fd45be199108ea631fbb1a398e247c59", "size": "181", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Gentianales/Rubiaceae/Remijia/Remijia trianae/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
/* Dropdown Button */ .dropbtn { padding: 16px; font-size: 16px; width: 50px; height: 50px; border: none; cursor: pointer; } /* Dropdown button on hover & focus */ .dropbtn:hover, .dropbstn:focus { background-color: yellow; } /* The container <div> - needed to position the dropdown content */ .dropdown { position: relative; display: inline-block; } /* Dropdown Content (Hidden by Default) */ .dropdown-content { display: none; position: absolute; background-color: #f9f9f9; min-width: 160px; box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2); } /* Links inside the dropdown */ .dropdown-content a { color: black; padding: 12px 16px; text-decoration: none; display: block; } /* Change color of dropdown links on hover */ .dropdown-content a:hover {background-color: #f1f1f1} /* Show the dropdown menu (use JS to add this class to the .dropdown-content container when the user clicks on the dropdown button) */ .show { display:block; }
{ "content_hash": "cbf2d0739d56c149b80be822aad8ecdf", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 134, "avg_line_length": 20.632653061224488, "alnum_prop": 0.6656775469831849, "repo_name": "cwcllama/Clean-simple-Portfolio-Website", "id": "58b1010f9be8f0c64a3bbe6edba53170e8192491", "size": "1011", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "menu.css", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "1667" }, { "name": "HTML", "bytes": "4209" }, { "name": "JavaScript", "bytes": "1589" } ], "symlink_target": "" }
<?php class NewsletterSubscriberController extends AdminController { /** * @return array action filters */ public function filters() { return array( 'accessControl', // perform access control for CRUD operations 'postOnly + delete', // we only allow deletion via POST request ); } /** * Specifies the access control rules. * This method is used by the 'accessControl' filter. * @return array access control rules */ public function accessRules() { return array( array('allow', // allow admin user to perform 'admin' and 'delete' actions 'actions' => array('index', 'view', 'delete', 'create', 'update'), 'roles' => array('admin'), ), array('deny', // deny all users 'users' => array('*'), ), ); } /** * Displays a particular model. * @param integer $id the ID of the model to be displayed */ public function actionView($id) { $this->render('view', array( 'model' => $this->loadModel($id), )); } /** * Creates a new model. * If creation is successful, the browser will be redirected to the 'view' page. */ public function actionCreate() { $model = new NewsletterSubscriber; // Uncomment the following line if AJAX validation is needed // $this->performAjaxValidation($model); if (isset($_POST['NewsletterSubscriber'])) { $model->attributes = $_POST['NewsletterSubscriber']; if ($model->save()) { Yii::app()->user->setFlash('success_msg', "Newsletter Subscriber is created successfully"); //$this->redirect(array('update','id'=>$model->id)); $this->redirect(array('Index')); } } $this->render('create', array( 'model' => $model, )); } /** * Updates a particular model. * If update is successful, the browser will be redirected to the 'view' page. * @param integer $id the ID of the model to be updated */ public function actionUpdate($id) { $model = $this->loadModel($id); // Uncomment the following line if AJAX validation is needed // $this->performAjaxValidation($model); if (isset($_POST['NewsletterSubscriber'])) { $model->attributes = $_POST['NewsletterSubscriber']; if ($model->save()) { Yii::app()->user->setFlash('success_msg', "Newsletter Subscriber is updated successfully"); $this->redirect(array('update', 'id' => $model->id)); } } $this->render('update', array( 'model' => $model, )); } /** * Deletes a particular model. * If deletion is successful, the browser will be redirected to the 'admin' page. * @param integer $id the ID of the model to be deleted */ public function actionDelete($id) { $this->loadModel($id)->delete(); // if AJAX request (triggered by deletion via admin grid view), we should not redirect the browser if (!isset($_GET['ajax'])) $this->redirect(isset($_POST['returnUrl']) ? $_POST['returnUrl'] : array('admin')); } /** * Manages all models. */ public function actionIndex() { $model = new NewsletterSubscriber('search'); $model->unsetAttributes(); // clear any default values if (isset($_GET['NewsletterSubscriber'])) $model->attributes = $_GET['NewsletterSubscriber']; $this->render('list', array( 'model' => $model, )); } /** * Returns the data model based on the primary key given in the GET variable. * If the data model is not found, an HTTP exception will be raised. * @param integer $id the ID of the model to be loaded * @return NewsletterSubscriber the loaded model * @throws CHttpException */ public function loadModel($id) { $model = NewsletterSubscriber::model()->findByPk($id); if ($model === null) throw new CHttpException(404, 'The requested page does not exist.'); return $model; } /** * Performs the AJAX validation. * @param NewsletterSubscriber $model the model to be validated */ protected function performAjaxValidation($model) { if (isset($_POST['ajax']) && $_POST['ajax'] === 'newsletter-subscriber-form') { echo CActiveForm::validate($model); Yii::app()->end(); } } }
{ "content_hash": "820dfca31ccb4bf271078ffec5346da1", "timestamp": "", "source": "github", "line_count": 142, "max_line_length": 107, "avg_line_length": 32.62676056338028, "alnum_prop": 0.5631340384200302, "repo_name": "atnamus/eropagnil", "id": "2934f007a82cbb3690de69acaf98a9f8c4096aa7", "size": "4633", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "protected/modules/admin/controllers/NewsletterSubscriberController.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "168827" }, { "name": "JavaScript", "bytes": "2456195" }, { "name": "PHP", "bytes": "938028" }, { "name": "Shell", "bytes": "380" } ], "symlink_target": "" }
<?php require 'autoload.php'; use JM\MailReader\MailReader; try { $reader = new MailReader(); /** * Note: port is an additional option to set. * This option is not required. */ $reader->connect([ 'server' => 'foobar.com', 'username' => '[email protected]', 'password' => 'bar' ]); $mailbox = 'Never seen mailbox'; $targetEmailAddress = '[email protected]'; // Create the mailbox (folder) if it does not already exist. if ($reader->mailboxExists($mailbox) === false) { $reader->createMailbox($mailbox); $reader->subscribeMailbox($mailbox); } $messages = $reader->filterUnReadMessagesTo($targetEmailAddress); if (is_array($messages) && count($messages) > 0) { foreach ($messages as $message) { $reader->moveMessage($message['index'], $mailbox); } } // Switch to mailbox (folder) $mailbox $reader->setMailbox($mailbox); $messages = $reader->readMailbox(); // List emails in mailbox (folder) $mailbox if (is_array($messages) && count($messages) > 0) { foreach ($messages as $email) { $header = $email['header']; // Assuming CLI here print $header->Subject."\n"; } } } catch (\Exception $e) { }
{ "content_hash": "5f5f894e3f6ea9cedeb158de7eaca0c6", "timestamp": "", "source": "github", "line_count": 53, "max_line_length": 69, "avg_line_length": 24.641509433962263, "alnum_prop": 0.5627871362940275, "repo_name": "johnnymast/mailreader", "id": "47f0409c56e1079e4aec6e1ec554e74d116e8fa4", "size": "1306", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "demos/movemail.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "14837" } ], "symlink_target": "" }
package nom.bdezonia.zorbage.algorithm; import nom.bdezonia.zorbage.predicate.Equal; import nom.bdezonia.zorbage.tuple.Tuple2; import nom.bdezonia.zorbage.algebra.Algebra; import nom.bdezonia.zorbage.datasource.IndexedDataSource; import nom.bdezonia.zorbage.function.Function1; /** * * @author Barry DeZonia * */ public class SearchN { // do not instantiate private SearchN() { } /** * * @param <T> * @param <U> * @param algebra * @param count * @param value * @param a * @return */ public static <T extends Algebra<T,U>, U> long compute(T algebra, long count, U value, IndexedDataSource<U> a) { return compute(algebra, new Equal<T,U>(algebra), count, value, a); } /** * * @param <T> * @param <U> * @param algebra * @param count * @param cond * @param value * @param a * @return */ public static <T extends Algebra<T,U>, U> long compute(T algebra, Function1<Boolean,Tuple2<U,U>> cond, long count, U value, IndexedDataSource<U> a) { U tmpA = algebra.construct(); Tuple2<U,U> tuple = new Tuple2<U, U>(tmpA, value); long first = 0; long last = a.size(); if (count <= 0) { return first; } for(; first != last; first++) { a.get(first, tmpA); if (!cond.call(tuple)) { continue; } long candidate = first; long cur_count = 0; while (true) { cur_count++; if (cur_count >= count) { // success return candidate; } first++; if (first == last) { // exhausted the list return last; } a.get(first, tmpA); if (!cond.call(tuple)) { // too few in a row break; } } } return last; } }
{ "content_hash": "c7f90e44dd687e1d2218dc183412493d", "timestamp": "", "source": "github", "line_count": 87, "max_line_length": 107, "avg_line_length": 18.977011494252874, "alnum_prop": 0.6038764385221078, "repo_name": "bdezonia/zorbage", "id": "d7897718dc10373f2ad2e00c88902e0112b5e39c", "size": "3272", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/nom/bdezonia/zorbage/algorithm/SearchN.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Java", "bytes": "15927379" }, { "name": "Shell", "bytes": "169" } ], "symlink_target": "" }
package com.amazonaws.codegen.model.intermediate; import com.amazonaws.codegen.model.intermediate.customization.ShapeCustomizationInfo; import com.amazonaws.util.StringUtils; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static com.amazonaws.codegen.internal.Constants.REQUEST_CLASS_SUFFIX; import static com.amazonaws.codegen.internal.Constants.RESPONSE_CLASS_SUFFIX; import static com.amazonaws.codegen.internal.DocumentationUtils.removeFromEnd; public class ShapeModel extends DocumentationModel { private final String c2jName; // shapeName might be later modified by the customization. private String shapeName; // the local variable name inside marshaller/unmarshaller implementation private String fullyQualifiedName; private String packageName; private boolean deprecated; private String type; private List<String> required; private boolean hasPayloadMember; private boolean hasHeaderMember; private boolean hasStatusCodeMember; private boolean hasStreamingMember; private boolean hasRequiresLengthMember; private boolean wrapper; // For APIG generated requests private String requestSignerClassFqcn; // For AWS service requests private String signerType; private List<String> endpointDiscoveryMembers; private List<MemberModel> members; // Any constructor in addition to the default no-arg private List<ConstructorModel> additionalConstructors; private List<EnumModel> enums; private VariableModel variable; private ShapeMarshaller marshaller; private ShapeUnmarshaller unmarshaller; private String errorCode; private ShapeCustomizationInfo customization = new ShapeCustomizationInfo(); public ShapeModel(@JsonProperty("c2jName") String c2jName) { this.c2jName = c2jName; } public String getShapeName() { return shapeName; } public void setShapeName(String shapeName) { this.shapeName = shapeName; } public String getFullyQualifiedName() { return fullyQualifiedName; } public void setFullyQualifiedName(String fullyQualifiedName) { this.fullyQualifiedName = fullyQualifiedName; } public String getPackageName() { return packageName; } public void setPackageName(String packageName) { this.packageName = packageName; } public boolean isDeprecated() { return deprecated; } public void setDeprecated(boolean deprecated) { this.deprecated = deprecated; } public String getC2jName() { return c2jName; } public String getType() { return type; } @JsonIgnore public ShapeType getShapeType() { return ShapeType.fromValue(type); } @JsonIgnore public void setType(ShapeType shapeType) { setType(shapeType.getValue()); } public void setType(String type) { this.type = type; } public ShapeModel withType(String type) { this.type = type; return this; } // Returns the list of C2j member names that are required for this shape. public List<String> getRequired() { return required; } public void setRequired(List<String> required) { this.required = required; } public boolean isHasPayloadMember() { return hasPayloadMember; } public void setHasPayloadMember(boolean hasPayloadMember) { this.hasPayloadMember = hasPayloadMember; } public ShapeModel withHasPayloadMember(boolean hasPayloadMember) { setHasPayloadMember(hasPayloadMember); return this; } /** * @return The member explicitly designated as the payload member */ @JsonIgnore public MemberModel getPayloadMember() { MemberModel payloadMember = null; for (MemberModel member : members) { if (member.getHttp().getIsPayload()) { if (payloadMember == null) { payloadMember = member; } else { throw new IllegalStateException( String.format( "Only one payload member can be explicitly set on %s. This is likely an error in the C2J model", c2jName)); } } } return payloadMember; } /** * @return The list of members whose location is not specified. If no payload member is * explicitly set then these members will appear in the payload */ @JsonIgnore public List<MemberModel> getUnboundMembers() { List<MemberModel> unboundMembers = new ArrayList<MemberModel>(); if (members != null) { for (MemberModel member : members) { if (member.getHttp().getLocation() == null) { if (hasPayloadMember) { throw new IllegalStateException(String.format( "C2J Shape %s has both an explicit payload member and unbound (no explicit location) members. " + "This is undefined behavior, verify the correctness of the C2J model", c2jName)); } unboundMembers.add(member); } } } return unboundMembers; } /** * @return True if the shape has an explicit payload member or implicit payload member(s). */ public boolean hasPayloadMembers() { return hasPayloadMember || getUnboundMembers().size() > 0; } public boolean isHasStreamingMember() { return hasStreamingMember; } public void setHasStreamingMember(boolean hasStreamingMember) { this.hasStreamingMember = hasStreamingMember; } public ShapeModel withHasStreamingMember(boolean hasStreamingMember) { setHasStreamingMember(hasStreamingMember); return this; } public boolean isHasRequiresLengthMember() { return hasRequiresLengthMember; } public void setHasRequiresLengthMember(boolean hasRequiresLengthMember) { this.hasRequiresLengthMember = hasRequiresLengthMember; } public ShapeModel withHasRequiresLengthMember(boolean hasRequiresLengthMember) { setHasRequiresLengthMember(hasRequiresLengthMember); return this; } public boolean isHasHeaderMember() { return hasHeaderMember; } public void setHasHeaderMember(boolean hasHeaderMember) { this.hasHeaderMember = hasHeaderMember; } public ShapeModel withHasHeaderMember(boolean hasHeaderMember) { setHasHeaderMember(hasHeaderMember); return this; } public boolean isHasStatusCodeMember() { return hasStatusCodeMember; } public void setHasStatusCodeMember(boolean hasStatusCodeMember) { this.hasStatusCodeMember = hasStatusCodeMember; } public boolean isWrapper() { return wrapper; } public void setWrapper(boolean wrapper) { this.wrapper = wrapper; } public ShapeModel withHasStatusCodeMember(boolean hasStatusCodeMember) { setHasStatusCodeMember(hasStatusCodeMember); return this; } public MemberModel getMemberByVariableName(String memberVariableName) { for (MemberModel memberModel: members) { if (memberModel.getVariable().getVariableName().equals(memberVariableName)) return memberModel; } throw new IllegalArgumentException("Unknown member variable name: " + memberVariableName); } public MemberModel getMemberByName(String memberName) { for (MemberModel memberModel : members) { if (memberModel.getName().equals(memberName)) { return memberModel; } } return null; } public MemberModel getMemberByC2jName(String memberName) { for (MemberModel memberModel : members) { if (memberModel.getC2jName().equals(memberName)) { return memberModel; } } return null; } public List<String> getEndpointDiscoveryMembers() { return endpointDiscoveryMembers; } public void setEndpointDiscoveryMembers(List<String> endpointDiscoveryMembers) { this.endpointDiscoveryMembers = endpointDiscoveryMembers; } public void addEndpointDiscoveryMember(String endpointDiscoveryMember) { if (this.endpointDiscoveryMembers == null) { this.endpointDiscoveryMembers = new ArrayList<String>(); } endpointDiscoveryMembers.add(endpointDiscoveryMember); } public List<MemberModel> getMembers() { return members; } public void setMembers(List<MemberModel> members) { this.members = members; } public void addMember(MemberModel member) { if (this.members == null) { this.members = new ArrayList<MemberModel>(); } members.add(member); } /** * Returns true if the shape is empty (ie, there are no members in the shape) */ public boolean hasNoMembers() { return members == null || members.isEmpty(); } @JsonIgnore public List<ConstructorModel> getAdditionalConstructors() { return additionalConstructors; } public void setAdditionalConstructors(List<ConstructorModel> additionalConstructors) { this.additionalConstructors = additionalConstructors; } public void addConstructor(ConstructorModel constructor) { if (this.additionalConstructors == null) { this.additionalConstructors = new ArrayList<ConstructorModel>(); } this.additionalConstructors.add(constructor); } public List<EnumModel> getEnums() { return enums; } public void setEnums(List<EnumModel> enums) { this.enums = enums; } public void addEnum(EnumModel enumModel) { if (this.enums == null) { this.enums = new ArrayList<EnumModel>(); } this.enums.add(enumModel); } public VariableModel getVariable() { return variable; } public void setVariable(VariableModel variable) { this.variable = variable; } public ShapeMarshaller getMarshaller() { return marshaller; } public void setMarshaller(ShapeMarshaller marshaller) { this.marshaller = marshaller; } public ShapeUnmarshaller getUnmarshaller() { return unmarshaller; } public void setUnmarshaller(ShapeUnmarshaller unmarshaller) { this.unmarshaller = unmarshaller; } public ShapeCustomizationInfo getCustomization() { return customization; } public void setCustomization(ShapeCustomizationInfo customization) { this.customization = customization; } public Map<String, MemberModel> getMembersAsMap() { final Map<String, MemberModel> shapeMembers = new HashMap<String, MemberModel>(); // Creating a map of shape's members. This map is used below when // fetching the details of a memeber. final List<MemberModel> memberModels = getMembers(); if (memberModels != null) { for (MemberModel model : memberModels) { shapeMembers.put(model.getName(), model); } } return shapeMembers; } /** * Tries to find the member model associated with the given c2j member name from this shape * model. Returns the member model if present else returns null. */ private MemberModel tryFindMemberModelByC2jName(String memberC2jName, boolean ignoreCase) { final List<MemberModel> memberModels = getMembers(); final String expectedName = ignoreCase ? StringUtils.lowerCase(memberC2jName) : memberC2jName; if (memberModels != null) { for (MemberModel member : memberModels) { String actualName = ignoreCase ? StringUtils.lowerCase(member.getC2jName()) : member.getC2jName(); if (expectedName.equals(actualName)) { return member; } } } return null; } /** * Returns the member model associated with the given c2j member name from this shape model. */ public MemberModel findMemberModelByC2jName(String memberC2jName) { MemberModel model = tryFindMemberModelByC2jName(memberC2jName, false); if (model == null) { throw new IllegalArgumentException(memberC2jName + " member (c2j name) does not exist in the shape."); } return model; } /** * Takes in the c2j member name as input and removes if the shape contains a member with the * given name. Return false otherwise. */ public boolean removeMemberByC2jName(String memberC2jName, boolean ignoreCase) { // Implicitly depending on the default equals and hashcode // implementation of the class MemberModel MemberModel model = tryFindMemberModelByC2jName(memberC2jName, ignoreCase); return model == null ? false : members.remove(model); } /** * Returns the enum model for the given enum value. * Returns null if no such enum value exists. */ public EnumModel findEnumModelByValue(String enumValue) { if (enums != null) { for (EnumModel enumModel : enums) { if (enumValue.equals(enumModel.getValue())) { return enumModel; } } } return null; } @JsonIgnore public String getDocumentationShapeName() { switch (getShapeType()) { case Request: return removeFromEnd(shapeName, REQUEST_CLASS_SUFFIX); case Response: return removeFromEnd(shapeName, RESPONSE_CLASS_SUFFIX); default: return c2jName; } } @Override public String toString() { return shapeName; } public String getErrorCode() { return errorCode; } public void setErrorCode(String errorCode) { this.errorCode = errorCode; } public boolean isRequestSignerAware() { return requestSignerClassFqcn != null; } public String getRequestSignerClassFqcn() { return requestSignerClassFqcn; } public boolean isSignerAware() { return getSignerType() != null; } public String getSignerType() { return signerType; } public void setSignerType(String signerType) { this.signerType = signerType; } public void setRequestSignerClassFqcn(String authorizerClass) { this.requestSignerClassFqcn = authorizerClass; } }
{ "content_hash": "834afd86c8bfadcb2fd7231c04bdf0e7", "timestamp": "", "source": "github", "line_count": 506, "max_line_length": 132, "avg_line_length": 29.782608695652176, "alnum_prop": 0.6448573324485734, "repo_name": "aws/aws-sdk-java", "id": "d2876da3c5a038c44358c7aa46e5d0716c814aa2", "size": "15653", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "aws-java-sdk-code-generator/src/main/java/com/amazonaws/codegen/model/intermediate/ShapeModel.java", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
Calc::Calc(QObject* parent) : QObject(parent) { ; } void Calc::readOut() { cout << "@ readOut()" << endl; QString all; all.append(process->readAllStandardOutput()); emit message(all); } void Calc::readErr() { cout << "@ readErr()" << endl; QString all; all.append(process->readAllStandardError()); emit message(all); } void Calc::doCalc() { QProcess* process = new QProcess(); this->process = process; process->setWorkingDirectory("res"); //process->start("./OutputTest.bash"); process->start("./CatkinTest.bash", QStringList() << "--color"); process->waitForStarted(); QObject::connect(process, SIGNAL(readyReadStandardOutput()), this, SLOT(readOut())); QObject::connect(process, SIGNAL(readyReadStandardError()), this, SLOT(readErr())); emit message("finished!"); } Calc::~Calc() { ; }
{ "content_hash": "3547e86f0e278a4be5cf912abe2ff817", "timestamp": "", "source": "github", "line_count": 46, "max_line_length": 88, "avg_line_length": 19.108695652173914, "alnum_prop": 0.6222980659840728, "repo_name": "DeepBlue14/rqt_ide", "id": "0c219e0a9fa7864f94111486842138c0d5b3052d", "size": "899", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/thread_tests/src/Calc.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "13672" }, { "name": "C++", "bytes": "1557042" }, { "name": "CMake", "bytes": "11201" }, { "name": "CSS", "bytes": "6669" }, { "name": "Makefile", "bytes": "2645477" }, { "name": "Python", "bytes": "1379" }, { "name": "QMake", "bytes": "51941" }, { "name": "Shell", "bytes": "88679" } ], "symlink_target": "" }
package fr.janalyse.ssh import java.io.File trait AllOperations extends ShellOperations with TransfertOperations { /** * Recursively get a remote directory to a local destination * @param remote remote path, file or directory. * @param dest local destination directory, it it doesn't exist then it is created */ def rreceive(remote:String, dest:File): Unit = { def worker(curremote: String, curdest: File):Unit = { if (isDirectory(curremote)) { for { found <- ls(curremote) newremote = curremote + "/" + found newdest = new File(curdest, found) } { curdest.mkdirs worker(curremote=newremote, curdest=newdest) } } else receive(curremote, curdest) } worker(curremote=remote, curdest=dest) } /** * Recursively send a local directory to a remote destination * @param src local path, file or directory * @param remote remote destination directory, if it doesn't exist then it is created */ def rsend(src:File, remote:String): Unit = { def worker(cursrc: File, curremote: String): Unit = { if (cursrc.isDirectory) { for { found <- cursrc.listFiles newsrc = new File(cursrc, found.getName) newremote = curremote + "/" + found.getName } { mkdir(curremote) worker(cursrc=newsrc, curremote=newremote) } } else send(cursrc, curremote) } worker(cursrc=src, curremote=remote) } }
{ "content_hash": "5e0d472b86d996abd7488cb6cfd1ee55", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 89, "avg_line_length": 32.30612244897959, "alnum_prop": 0.5975994946304485, "repo_name": "dacr/jassh", "id": "668acc00fc191317837b9ee2b75456ca29d3450b", "size": "1583", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/scala/fr/janalyse/ssh/AllOperations.scala", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Scala", "bytes": "162437" }, { "name": "Shell", "bytes": "1038" } ], "symlink_target": "" }
package org.oscm.vo; import org.oscm.types.enumtypes.OperationParameterType; /** * Represents a parameter of an operation defined for a technical service. * */ public class VOServiceOperationParameter extends BaseVO { private static final long serialVersionUID = -4857777143451365879L; /** * The identifier of the parameter. */ private String parameterId; /** * The name of the parameter. */ private String parameterName; /** * Indicates if the parameter is mandatory. */ private boolean mandatory; /** * The type of the parameter values. */ private OperationParameterType type; /** * The value of the parameter. */ private String parameterValue; /** * Retrieves the identifier of the parameter. * * @return the parameter ID */ public String getParameterId() { return parameterId; } /** * Sets the identifier of the parameter. * * @param parameterId * the parameter ID */ public void setParameterId(String parameterId) { this.parameterId = parameterId; } /** * Retrieves the name of the parameter. * * @return the parameter name */ public String getParameterName() { return parameterName; } /** * Sets the name of the parameter. * * @param parameterName * the parameter name */ public void setParameterName(String parameterName) { this.parameterName = parameterName; } /** * Retrieves the current value set for the parameter. * * @return the parameter value */ public String getParameterValue() { return parameterValue; } /** * Sets the value of the parameter. * * @param parameterValue * the parameter value */ public void setParameterValue(String parameterValue) { this.parameterValue = parameterValue; } /** * Returns whether the parameter must be set for the service operation. * * @return <code>true</code> if the parameter is mandatory, * <code>false</code> otherwise */ public boolean isMandatory() { return mandatory; } /** * Specifies whether the parameter must be set for the service operation. * * @param mandatory * <code>true</code> if the parameter is mandatory, * <code>false</code> otherwise */ public void setMandatory(boolean mandatory) { this.mandatory = mandatory; } /** * Retrieves the type of the parameter values. * * @return the type */ public OperationParameterType getType() { return type; } /** * Sets the type of the parameter values. * * @param type * the type */ public void setType(OperationParameterType type) { this.type = type; } }
{ "content_hash": "506d266dee42177395b3140f5727c474", "timestamp": "", "source": "github", "line_count": 137, "max_line_length": 77, "avg_line_length": 21.927007299270073, "alnum_prop": 0.5865512649800266, "repo_name": "opetrovski/development", "id": "f0ea68415869617ac6997f32638b7ad308d90fb8", "size": "3457", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "oscm-extsvc/javasrc/org/oscm/vo/VOServiceOperationParameter.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "5304" }, { "name": "Batchfile", "bytes": "273" }, { "name": "CSS", "bytes": "389539" }, { "name": "HTML", "bytes": "1884410" }, { "name": "Java", "bytes": "41884121" }, { "name": "JavaScript", "bytes": "259479" }, { "name": "PHP", "bytes": "620531" }, { "name": "PLSQL", "bytes": "4929" }, { "name": "SQLPL", "bytes": "25278" }, { "name": "Shell", "bytes": "3250" } ], "symlink_target": "" }
from ._disks_operations import DisksOperations from ._snapshots_operations import SnapshotsOperations from ._disk_encryption_sets_operations import DiskEncryptionSetsOperations from ._disk_accesses_operations import DiskAccessesOperations from ._disk_restore_point_operations import DiskRestorePointOperations from ._operations import Operations from ._availability_sets_operations import AvailabilitySetsOperations from ._proximity_placement_groups_operations import ProximityPlacementGroupsOperations from ._dedicated_host_groups_operations import DedicatedHostGroupsOperations from ._dedicated_hosts_operations import DedicatedHostsOperations from ._ssh_public_keys_operations import SshPublicKeysOperations from ._virtual_machine_extension_images_operations import VirtualMachineExtensionImagesOperations from ._virtual_machine_extensions_operations import VirtualMachineExtensionsOperations from ._virtual_machine_images_operations import VirtualMachineImagesOperations from ._virtual_machine_images_edge_zone_operations import VirtualMachineImagesEdgeZoneOperations from ._usage_operations import UsageOperations from ._virtual_machines_operations import VirtualMachinesOperations from ._virtual_machine_scale_sets_operations import VirtualMachineScaleSetsOperations from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations from ._images_operations import ImagesOperations from ._virtual_machine_scale_set_extensions_operations import VirtualMachineScaleSetExtensionsOperations from ._virtual_machine_scale_set_rolling_upgrades_operations import VirtualMachineScaleSetRollingUpgradesOperations from ._virtual_machine_scale_set_vm_extensions_operations import VirtualMachineScaleSetVMExtensionsOperations from ._virtual_machine_scale_set_vms_operations import VirtualMachineScaleSetVMsOperations from ._log_analytics_operations import LogAnalyticsOperations from ._virtual_machine_run_commands_operations import VirtualMachineRunCommandsOperations from ._virtual_machine_scale_set_vm_run_commands_operations import VirtualMachineScaleSetVMRunCommandsOperations from ._patch import __all__ as _patch_all from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import from ._patch import patch_sdk as _patch_sdk __all__ = [ "DisksOperations", "SnapshotsOperations", "DiskEncryptionSetsOperations", "DiskAccessesOperations", "DiskRestorePointOperations", "Operations", "AvailabilitySetsOperations", "ProximityPlacementGroupsOperations", "DedicatedHostGroupsOperations", "DedicatedHostsOperations", "SshPublicKeysOperations", "VirtualMachineExtensionImagesOperations", "VirtualMachineExtensionsOperations", "VirtualMachineImagesOperations", "VirtualMachineImagesEdgeZoneOperations", "UsageOperations", "VirtualMachinesOperations", "VirtualMachineScaleSetsOperations", "VirtualMachineSizesOperations", "ImagesOperations", "VirtualMachineScaleSetExtensionsOperations", "VirtualMachineScaleSetRollingUpgradesOperations", "VirtualMachineScaleSetVMExtensionsOperations", "VirtualMachineScaleSetVMsOperations", "LogAnalyticsOperations", "VirtualMachineRunCommandsOperations", "VirtualMachineScaleSetVMRunCommandsOperations", ] __all__.extend([p for p in _patch_all if p not in __all__]) _patch_sdk()
{ "content_hash": "f7b3a5228039aaff4f3daca2533d1aa0", "timestamp": "", "source": "github", "line_count": 63, "max_line_length": 115, "avg_line_length": 53.03174603174603, "alnum_prop": 0.83507931756959, "repo_name": "Azure/azure-sdk-for-python", "id": "5d0d019d7de4917af3d21c1d778d285e2a266c3a", "size": "3809", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2020_12_01/operations/__init__.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1224" }, { "name": "Bicep", "bytes": "24196" }, { "name": "CSS", "bytes": "6089" }, { "name": "Dockerfile", "bytes": "4892" }, { "name": "HTML", "bytes": "12058" }, { "name": "JavaScript", "bytes": "8137" }, { "name": "Jinja", "bytes": "10377" }, { "name": "Jupyter Notebook", "bytes": "272022" }, { "name": "PowerShell", "bytes": "518535" }, { "name": "Python", "bytes": "715484989" }, { "name": "Shell", "bytes": "3631" } ], "symlink_target": "" }
require "spec_helper" require "gds_api/test_helpers/worldwide" RSpec.describe Healthchecks::RegistriesCache do include GdsApi::TestHelpers::Worldwide include TaxonomySpecHelper include GdsApi::TestHelpers::ContentStore include GovukContentSchemaExamples include RegistrySpecHelper subject(:check) { described_class.new } before :each do Rails.cache.clear end after { Rails.cache.clear } context "All Registries have cached data" do before do stub_worldwide_api_has_locations %w[hogwarts privet-drive diagon-alley] topic_taxonomy_has_taxons stub_people_registry_request stub_roles_registry_request stub_manuals_registry_request stub_organisations_registry_request stub_topical_events_registry_request Registries::BaseRegistries.new.refresh_cache end it "has an OK status" do expect(check.status).to eq :ok expect(check.message).to eq "OK" end end context "Registries caches are empty" do it "has a critical status" do expect(check.status).to eq :critical expect(check.message).to eq "The following registry caches are empty: world_locations, all_part_of_taxonomy_tree, part_of_taxonomy_tree, people, roles, organisations, manual, full_topic_taxonomy, topical_events." end end end
{ "content_hash": "83764ba472a49ef552196b5aa3106b83", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 218, "avg_line_length": 29.84090909090909, "alnum_prop": 0.7319116527037319, "repo_name": "alphagov/finder-frontend", "id": "c58b286214e9f998623042a296695ffc02fa0128", "size": "1313", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "spec/lib/healthchecks/registries_cache_spec.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Dockerfile", "bytes": "664" }, { "name": "Gherkin", "bytes": "13670" }, { "name": "HTML", "bytes": "34851" }, { "name": "JavaScript", "bytes": "138487" }, { "name": "Procfile", "bytes": "65" }, { "name": "Ruby", "bytes": "517317" }, { "name": "SCSS", "bytes": "18505" }, { "name": "Shell", "bytes": "730" } ], "symlink_target": "" }
`td-chart-series[td-wordCloud]` element generates a graph series echarts visualization inside a `td-chart`. Its the equivalent of creating a JS series object `type="wordCloud"` in echarts. ## API Summary #### Inputs - config?: any - Sets the JS config object if you choose to not use the property inputs. - Note: [config] input properties will override input values There are also lots of property inputs like: - id?: string - It can be used to refer the component in option or API. - name: string - Series name used for displaying in tooltip and filtering with legend. - value: number - Series value used for varying the size of the labels. - color?: string | () => string - Global color for the series. It can be generated by a function - gridSize?: number - Size of the grid in pixels for marking the availability of the canvas the larger the grid size, the bigger the gap between words. And so many more.. for more info [click here](https://github.com/ecomfe/echarts-wordcloud) ## Setup Import the [CovalentWordcloudEchartsModule] in your NgModule: ```typescript import { CovalentBaseEchartsModule } from '@covalent/echarts/base'; import { CovalentWordcloudEchartsModule } from '@covalent/echarts/wordcloud'; @NgModule({ imports: [ CovalentBaseEchartsModule, CovalentWordcloudEchartsModule, ... ], ... }) export class MyModule {} ``` ## Usage Basic Example: ```html <td-chart [style.height.px]="500"> <td-chart-series td-wordCloud [shape]="circle" [sizeRange]="[12, 60]" [rotationRange]="[-90, 90]" [rotationStep]="45" [gridSize]="8" [drawOutOfBound]="false" [textStyle]="{ normal: { fontFamily: 'sans-serif', fontWeight: 'bold', color: '#ccc' }, emphasis: { shadowBlur: 10, shadowColor: '#333' } }" [data]="[{ name: 'Benjamin Abraham', value: 366, normal: { fontFamily: 'arial', fontWeight: '100', color: 'red' }, }, { name: 'Mary Stuart', value: 366 }, { name: 'John Doe', value: 350 }]" > </td-chart-series> </td-chart> ```
{ "content_hash": "108e5ca3e8abeca4cb86c8fdf8404148", "timestamp": "", "source": "github", "line_count": 87, "max_line_length": 188, "avg_line_length": 28.79310344827586, "alnum_prop": 0.555688622754491, "repo_name": "Teradata/covalent", "id": "f5b8ec44bbbbe88cecd9cb710517787a3019a984", "size": "2538", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "libs/angular-echarts/wordcloud/README.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "25936" }, { "name": "HTML", "bytes": "400615" }, { "name": "JavaScript", "bytes": "142209" }, { "name": "SCSS", "bytes": "375607" }, { "name": "Shell", "bytes": "1377" }, { "name": "TypeScript", "bytes": "1653078" } ], "symlink_target": "" }
layout: default title: CAS - Configuration Extensions --- # Extending CAS Configuration Being a [Spring Boot](https://github.com/spring-projects/spring-boot) application at its core, designing and extending CAS configuration components very much comes down to [the following guide](https://docs.spring.io/spring-boot/docs/current/reference/html/boot-features-developing-auto-configuration.html) some aspects of which are briefly highlighted in this document. ## Configuration Components This is the recommended approach to create additional Spring beans, override existing ones and simply inject your own custom behavior into the CAS application runtime. Given CAS’ adoption of Spring Boot, most if not all of the old XML configuration is transformed into `@Configuration` components. These are classes declared by each relevant module that are automatically picked up at runtime whose job is to declare and configure beans and register them into the application context. Another way of thinking about it is, components that are decorated with `@Configuration` are loose equivalents of old XML configuration files that are highly organized where `<bean>` tags are translated to java methods tagged with `@Bean` and configured dynamically. ### Design To design your own configuration class, take inspiration from the following sample: ```java package org.apereo.cas.custom.config; @Configuration("SomethingConfiguration") @EnableConfigurationProperties(CasConfigurationProperties.class) public class SomethingConfiguration { @Autowired private CasConfigurationProperties casProperties; @Autowired @Qualifier("someOtherBeanId") private SomeBean someOtherBeanId; @RefreshScope @Bean public MyBean myBean() { return new MyBean(); } } ``` - The `@Bean` definitions can also be tagged with `@RefreshScope` to become auto-reloadable when the CAS context is refreshed as a result of an external property change. - `@Configuration` classes can be assigned an order with `@Order(1984)` which would place them in an ordered queue waiting to be loaded in that sequence. - To be more explicit, `@Configuration` classes can also be loaded exactly before/after another `@Configuration` component with `@AutoConfigureBefore` or `@AutoConfigureAfter` annotations. ### Register How are `@Configuration` components picked up? Each CAS module declares its set of configuration components as such, per guidelines [laid out by Spring Boot](https://docs.spring.io/spring-boot/docs/current/reference/html/boot-features-developing-auto-configuration.html): - Create a `src/main/resources/META-INF/spring.factories` file - Add the following into the file: ```properties org.springframework.boot.autoconfigure.EnableAutoConfiguration=org.apereo.cas.custom.config.SomethingConfiguration ``` ### Overrides What if you needed to override the definition of a CAS-provided bean and replace it entirely with your own? This is where `@Conditional` components come to aid. Most component/bean definitions in CAS are registered with some form of `@Conditional` tag that indicates to the bootstrapping process to ignore their creation, if *a bean definition with the same id* is already defined. This means you can create your own configuration class, register it and the design a `@Bean` definition only to have the context utilize yours rather than what ships with CAS by default. ## CAS Properties The [collection of CAS-provided settings](Configuration-Properties.html) are all encapsulated inside a `CasConfigurationProperties` component. This is a parent class that brings all elements of the entire CAS platform together and binds values to the relevant fields inside in a very type-safe manner. The [configuration binding](Configuration-Server-Management.html) is typically done via `@EnableConfigurationProperties(CasConfigurationProperties.class)` on the actual configuration class. <div class="alert alert-info"><strong>Prefix Notation</strong><p>Note that all CAS-provided settings exclusively begin with the prefix <code>cas</code>. Other frameworks and packages upon which CAS depends may present their own configuration naming scheme. Note the difference.</p></div> If you wish to design your own and extend the CAS configuration file, you can surely follow the same approach with the `@EnableConfigurationProperties` annotation or use the good ol' `@Value`.
{ "content_hash": "f8fd617e023427f59415abb8a988404d", "timestamp": "", "source": "github", "line_count": 68, "max_line_length": 583, "avg_line_length": 64.80882352941177, "alnum_prop": 0.7978216473791695, "repo_name": "dodok1/cas", "id": "c135e1de1d117b175e51169f0861c19ac8d0acaf", "size": "4413", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "docs/cas-server-documentation/installation/Configuration-Management-Extensions.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "124789" }, { "name": "Groovy", "bytes": "14591" }, { "name": "HTML", "bytes": "145115" }, { "name": "Java", "bytes": "7704384" }, { "name": "JavaScript", "bytes": "169109" }, { "name": "Shell", "bytes": "19481" } ], "symlink_target": "" }
module Axlsx # Table # @note Worksheet#add_pivot_table is the recommended way to create tables for your worksheets. # @see README for examples class PivotTableCacheDefinition include Axlsx::OptionsParser # Creates a new PivotTable object # @param [String] pivot_table The pivot table this cache definition is in def initialize(pivot_table) @pivot_table = pivot_table end # # The reference to the pivot table data # # @return [PivotTable] attr_reader :pivot_table # The index of this chart in the workbooks charts collection # @return [Integer] def index pivot_table.sheet.workbook.pivot_tables.index(pivot_table) end # The part name for this table # @return [String] def pn "#{PIVOT_TABLE_CACHE_DEFINITION_PN % (index+1)}" end def cache_id index + 1 end # The relation reference id for this table # @return [String] def rId "rId#{index + 1}" end # Serializes the object # @param [String] str # @return [String] def to_xml_string(str = '') str << '<?xml version="1.0" encoding="UTF-8"?>' str << '<pivotCacheDefinition xmlns="' << XML_NS << '" xmlns:r="' << XML_NS_R << '" invalid="1" refreshOnLoad="1" recordCount="0">' str << '<cacheSource type="worksheet">' str << '<worksheetSource ref="' << pivot_table.range << '" sheet="Data Sheet"/>' str << '</cacheSource>' str << '<cacheFields count="' << pivot_table.header_cells_count.to_s << '">' pivot_table.header_cells.each do |cell| str << '<cacheField name="' << cell.value << '" numFmtId="0">' str << '<sharedItems count="0">' str << '</sharedItems>' str << '</cacheField>' end str << '</cacheFields>' str << '</pivotCacheDefinition>' end end end
{ "content_hash": "fccab33f0137857a1688596e4dff1c3a", "timestamp": "", "source": "github", "line_count": 62, "max_line_length": 137, "avg_line_length": 30.161290322580644, "alnum_prop": 0.5957219251336898, "repo_name": "nilbus/axlsx", "id": "5a6c74423f47ce5cebd791deb2360f165e674d20", "size": "1888", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/axlsx/workbook/worksheet/pivot_table_cache_definition.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "750660" } ], "symlink_target": "" }
namespace device { PowerMonitorMessageBroadcaster::PowerMonitorMessageBroadcaster() { base::PowerMonitor* power_monitor = base::PowerMonitor::Get(); if (power_monitor) power_monitor->AddObserver(this); } PowerMonitorMessageBroadcaster::~PowerMonitorMessageBroadcaster() { base::PowerMonitor* power_monitor = base::PowerMonitor::Get(); if (power_monitor) power_monitor->RemoveObserver(this); } // static void PowerMonitorMessageBroadcaster::Create( device::mojom::PowerMonitorRequest request) { mojo::MakeStrongBinding(base::MakeUnique<PowerMonitorMessageBroadcaster>(), std::move(request)); } void PowerMonitorMessageBroadcaster::SetClient( device::mojom::PowerMonitorClientPtr power_monitor_client) { power_monitor_client_ = std::move(power_monitor_client); base::PowerMonitor* power_monitor = base::PowerMonitor::Get(); // Unit tests does not initialize the PowerMonitor. if (power_monitor) OnPowerStateChange(power_monitor->IsOnBatteryPower()); } void PowerMonitorMessageBroadcaster::OnPowerStateChange(bool on_battery_power) { if (power_monitor_client_) { power_monitor_client_->PowerStateChange(on_battery_power); } } void PowerMonitorMessageBroadcaster::OnSuspend() { if (power_monitor_client_) { power_monitor_client_->Suspend(); } } void PowerMonitorMessageBroadcaster::OnResume() { if (power_monitor_client_) { power_monitor_client_->Resume(); } } } // namespace device
{ "content_hash": "855873a80d7474dd4acf00b6f1e7f8b0", "timestamp": "", "source": "github", "line_count": 49, "max_line_length": 80, "avg_line_length": 30.122448979591837, "alnum_prop": 0.736449864498645, "repo_name": "google-ar/WebARonARCore", "id": "e8f1b1b0dbbfea97da77c9d1ea6440b8c3663098", "size": "1845", "binary": false, "copies": "2", "ref": "refs/heads/webarcore_57.0.2987.5", "path": "device/power_monitor/power_monitor_message_broadcaster.cc", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
static DWORD getWindowStyle(const _GLFWwindow* window) { DWORD style = WS_CLIPSIBLINGS | WS_CLIPCHILDREN; if (window->monitor) style |= WS_POPUP; else { style |= WS_SYSMENU | WS_MINIMIZEBOX; if (window->decorated) { style |= WS_CAPTION; if (window->resizable) style |= WS_MAXIMIZEBOX | WS_THICKFRAME; } else style |= WS_POPUP; } return style; } // Returns the extended window style for the specified window // static DWORD getWindowExStyle(const _GLFWwindow* window) { DWORD style = WS_EX_APPWINDOW; if (window->monitor || window->floating) style |= WS_EX_TOPMOST; return style; } // Returns the image whose area most closely matches the desired one // static const GLFWimage* chooseImage(int count, const GLFWimage* images, int width, int height) { int i, leastDiff = INT_MAX; const GLFWimage* closest = NULL; for (i = 0; i < count; i++) { const int currDiff = abs(images[i].width * images[i].height - width * height); if (currDiff < leastDiff) { closest = images + i; leastDiff = currDiff; } } return closest; } // Creates an RGBA icon or cursor // static HICON createIcon(const GLFWimage* image, int xhot, int yhot, GLFWbool icon) { int i; HDC dc; HICON handle; HBITMAP color, mask; BITMAPV5HEADER bi; ICONINFO ii; unsigned char* target = NULL; unsigned char* source = image->pixels; ZeroMemory(&bi, sizeof(bi)); bi.bV5Size = sizeof(bi); bi.bV5Width = image->width; bi.bV5Height = -image->height; bi.bV5Planes = 1; bi.bV5BitCount = 32; bi.bV5Compression = BI_BITFIELDS; bi.bV5RedMask = 0x00ff0000; bi.bV5GreenMask = 0x0000ff00; bi.bV5BlueMask = 0x000000ff; bi.bV5AlphaMask = 0xff000000; dc = GetDC(NULL); color = CreateDIBSection(dc, (BITMAPINFO*) &bi, DIB_RGB_COLORS, (void**) &target, NULL, (DWORD) 0); ReleaseDC(NULL, dc); if (!color) { _glfwInputErrorWin32(GLFW_PLATFORM_ERROR, "Win32: Failed to create RGBA bitmap"); return NULL; } mask = CreateBitmap(image->width, image->height, 1, 1, NULL); if (!mask) { _glfwInputErrorWin32(GLFW_PLATFORM_ERROR, "Win32: Failed to create mask bitmap"); DeleteObject(color); return NULL; } for (i = 0; i < image->width * image->height; i++) { target[0] = source[2]; target[1] = source[1]; target[2] = source[0]; target[3] = source[3]; target += 4; source += 4; } ZeroMemory(&ii, sizeof(ii)); ii.fIcon = icon; ii.xHotspot = xhot; ii.yHotspot = yhot; ii.hbmMask = mask; ii.hbmColor = color; handle = CreateIconIndirect(&ii); DeleteObject(color); DeleteObject(mask); if (!handle) { if (icon) { _glfwInputErrorWin32(GLFW_PLATFORM_ERROR, "Win32: Failed to create icon"); } else { _glfwInputErrorWin32(GLFW_PLATFORM_ERROR, "Win32: Failed to create cursor"); } } return handle; } // Translate content area size to full window size according to styles and DPI // static void getFullWindowSize(DWORD style, DWORD exStyle, int contentWidth, int contentHeight, int* fullWidth, int* fullHeight, UINT dpi) { RECT rect = { 0, 0, contentWidth, contentHeight }; if (_glfwIsWindows10AnniversaryUpdateOrGreaterWin32()) AdjustWindowRectExForDpi(&rect, style, FALSE, exStyle, dpi); else AdjustWindowRectEx(&rect, style, FALSE, exStyle); *fullWidth = rect.right - rect.left; *fullHeight = rect.bottom - rect.top; } // Enforce the content area aspect ratio based on which edge is being dragged // static void applyAspectRatio(_GLFWwindow* window, int edge, RECT* area) { int xoff, yoff; UINT dpi = USER_DEFAULT_SCREEN_DPI; const float ratio = (float) window->numer / (float) window->denom; if (_glfwIsWindows10AnniversaryUpdateOrGreaterWin32()) dpi = GetDpiForWindow(window->win32.handle); getFullWindowSize(getWindowStyle(window), getWindowExStyle(window), 0, 0, &xoff, &yoff, dpi); if (edge == WMSZ_LEFT || edge == WMSZ_BOTTOMLEFT || edge == WMSZ_RIGHT || edge == WMSZ_BOTTOMRIGHT) { area->bottom = area->top + yoff + (int) ((area->right - area->left - xoff) / ratio); } else if (edge == WMSZ_TOPLEFT || edge == WMSZ_TOPRIGHT) { area->top = area->bottom - yoff - (int) ((area->right - area->left - xoff) / ratio); } else if (edge == WMSZ_TOP || edge == WMSZ_BOTTOM) { area->right = area->left + xoff + (int) ((area->bottom - area->top - yoff) * ratio); } } // Updates the cursor image according to its cursor mode // static void updateCursorImage(_GLFWwindow* window) { if (window->cursorMode == GLFW_CURSOR_NORMAL) { if (window->cursor) SetCursor(window->cursor->win32.handle); else SetCursor(LoadCursorW(NULL, IDC_ARROW)); } else SetCursor(NULL); } // Updates the cursor clip rect // static void updateClipRect(_GLFWwindow* window) { if (window) { RECT clipRect; GetClientRect(window->win32.handle, &clipRect); ClientToScreen(window->win32.handle, (POINT*) &clipRect.left); ClientToScreen(window->win32.handle, (POINT*) &clipRect.right); ClipCursor(&clipRect); } else ClipCursor(NULL); } // Apply disabled cursor mode to a focused window // static void disableCursor(_GLFWwindow* window) { const RAWINPUTDEVICE rid = { 0x01, 0x02, 0, window->win32.handle }; _glfw.win32.disabledCursorWindow = window; _glfwPlatformGetCursorPos(window, &_glfw.win32.restoreCursorPosX, &_glfw.win32.restoreCursorPosY); updateCursorImage(window); _glfwCenterCursorInContentArea(window); updateClipRect(window); if (!RegisterRawInputDevices(&rid, 1, sizeof(rid))) { _glfwInputErrorWin32(GLFW_PLATFORM_ERROR, "Win32: Failed to register raw input device"); } } // Exit disabled cursor mode for the specified window // static void enableCursor(_GLFWwindow* window) { const RAWINPUTDEVICE rid = { 0x01, 0x02, RIDEV_REMOVE, NULL }; _glfw.win32.disabledCursorWindow = NULL; updateClipRect(NULL); _glfwPlatformSetCursorPos(window, _glfw.win32.restoreCursorPosX, _glfw.win32.restoreCursorPosY); updateCursorImage(window); if (!RegisterRawInputDevices(&rid, 1, sizeof(rid))) { _glfwInputErrorWin32(GLFW_PLATFORM_ERROR, "Win32: Failed to remove raw input device"); } } // Returns whether the cursor is in the content area of the specified window // static GLFWbool cursorInContentArea(_GLFWwindow* window) { RECT area; POINT pos; if (!GetCursorPos(&pos)) return GLFW_FALSE; if (WindowFromPoint(pos) != window->win32.handle) return GLFW_FALSE; GetClientRect(window->win32.handle, &area); ClientToScreen(window->win32.handle, (POINT*) &area.left); ClientToScreen(window->win32.handle, (POINT*) &area.right); return PtInRect(&area, pos); } // Update native window styles to match attributes // static void updateWindowStyles(const _GLFWwindow* window) { RECT rect; DWORD style = GetWindowLongW(window->win32.handle, GWL_STYLE); style &= ~(WS_OVERLAPPEDWINDOW | WS_POPUP); style |= getWindowStyle(window); GetClientRect(window->win32.handle, &rect); if (_glfwIsWindows10AnniversaryUpdateOrGreaterWin32()) { AdjustWindowRectExForDpi(&rect, style, FALSE, getWindowExStyle(window), GetDpiForWindow(window->win32.handle)); } else AdjustWindowRectEx(&rect, style, FALSE, getWindowExStyle(window)); ClientToScreen(window->win32.handle, (POINT*) &rect.left); ClientToScreen(window->win32.handle, (POINT*) &rect.right); SetWindowLongW(window->win32.handle, GWL_STYLE, style); SetWindowPos(window->win32.handle, HWND_TOP, rect.left, rect.top, rect.right - rect.left, rect.bottom - rect.top, SWP_FRAMECHANGED | SWP_NOACTIVATE | SWP_NOZORDER); } // Update window framebuffer transparency // static void updateFramebufferTransparency(const _GLFWwindow* window) { BOOL enabled; if (!IsWindowsVistaOrGreater()) return; if (SUCCEEDED(DwmIsCompositionEnabled(&enabled)) && enabled) { HRGN region = CreateRectRgn(0, 0, -1, -1); DWM_BLURBEHIND bb = {0}; bb.dwFlags = DWM_BB_ENABLE | DWM_BB_BLURREGION; bb.hRgnBlur = region; bb.fEnable = TRUE; if (SUCCEEDED(DwmEnableBlurBehindWindow(window->win32.handle, &bb))) { // Decorated windows don't repaint the transparent background // leaving a trail behind animations // HACK: Making the window layered with a transparency color key // seems to fix this. Normally, when specifying // a transparency color key to be used when composing the // layered window, all pixels painted by the window in this // color will be transparent. That doesn't seem to be the // case anymore, at least when used with blur behind window // plus negative region. LONG exStyle = GetWindowLongW(window->win32.handle, GWL_EXSTYLE); exStyle |= WS_EX_LAYERED; SetWindowLongW(window->win32.handle, GWL_EXSTYLE, exStyle); // Using a color key not equal to black to fix the trailing // issue. When set to black, something is making the hit test // not resize with the window frame. SetLayeredWindowAttributes(window->win32.handle, RGB(0, 193, 48), 255, LWA_COLORKEY); } DeleteObject(region); } else { LONG exStyle = GetWindowLongW(window->win32.handle, GWL_EXSTYLE); exStyle &= ~WS_EX_LAYERED; SetWindowLongW(window->win32.handle, GWL_EXSTYLE, exStyle); RedrawWindow(window->win32.handle, NULL, NULL, RDW_ERASE | RDW_INVALIDATE | RDW_FRAME); } } // Retrieves and translates modifier keys // static int getKeyMods(void) { int mods = 0; if (GetKeyState(VK_SHIFT) & 0x8000) mods |= GLFW_MOD_SHIFT; if (GetKeyState(VK_CONTROL) & 0x8000) mods |= GLFW_MOD_CONTROL; if (GetKeyState(VK_MENU) & 0x8000) mods |= GLFW_MOD_ALT; if ((GetKeyState(VK_LWIN) | GetKeyState(VK_RWIN)) & 0x8000) mods |= GLFW_MOD_SUPER; if (GetKeyState(VK_CAPITAL) & 1) mods |= GLFW_MOD_CAPS_LOCK; if (GetKeyState(VK_NUMLOCK) & 1) mods |= GLFW_MOD_NUM_LOCK; return mods; } // Retrieves and translates modifier keys // static int getAsyncKeyMods(void) { int mods = 0; if (GetAsyncKeyState(VK_SHIFT) & 0x8000) mods |= GLFW_MOD_SHIFT; if (GetAsyncKeyState(VK_CONTROL) & 0x8000) mods |= GLFW_MOD_CONTROL; if (GetAsyncKeyState(VK_MENU) & 0x8000) mods |= GLFW_MOD_ALT; if ((GetAsyncKeyState(VK_LWIN) | GetAsyncKeyState(VK_RWIN)) & 0x8000) mods |= GLFW_MOD_SUPER; if (GetAsyncKeyState(VK_CAPITAL) & 1) mods |= GLFW_MOD_CAPS_LOCK; if (GetAsyncKeyState(VK_NUMLOCK) & 1) mods |= GLFW_MOD_NUM_LOCK; return mods; } // Translates a Windows key to the corresponding GLFW key // static int translateKey(WPARAM wParam, LPARAM lParam) { // The Ctrl keys require special handling if (wParam == VK_CONTROL) { MSG next; DWORD time; // Right side keys have the extended key bit set if (lParam & 0x01000000) return GLFW_KEY_RIGHT_CONTROL; // HACK: Alt Gr sends Left Ctrl and then Right Alt in close sequence // We only want the Right Alt message, so if the next message is // Right Alt we ignore this (synthetic) Left Ctrl message time = GetMessageTime(); if (PeekMessageW(&next, NULL, 0, 0, PM_NOREMOVE)) { if (next.message == WM_KEYDOWN || next.message == WM_SYSKEYDOWN || next.message == WM_KEYUP || next.message == WM_SYSKEYUP) { if (next.wParam == VK_MENU && (next.lParam & 0x01000000) && next.time == time) { // Next message is Right Alt down so discard this return _GLFW_KEY_INVALID; } } } return GLFW_KEY_LEFT_CONTROL; } if (wParam == VK_PROCESSKEY) { // IME notifies that keys have been filtered by setting the virtual // key-code to VK_PROCESSKEY return _GLFW_KEY_INVALID; } return _glfw.win32.keycodes[HIWORD(lParam) & 0x1FF]; } static void fitToMonitor(_GLFWwindow* window) { MONITORINFO mi = { sizeof(mi) }; GetMonitorInfo(window->monitor->win32.handle, &mi); SetWindowPos(window->win32.handle, HWND_TOPMOST, mi.rcMonitor.left, mi.rcMonitor.top, mi.rcMonitor.right - mi.rcMonitor.left, mi.rcMonitor.bottom - mi.rcMonitor.top, SWP_NOZORDER | SWP_NOACTIVATE | SWP_NOCOPYBITS); } // Make the specified window and its video mode active on its monitor // static void acquireMonitor(_GLFWwindow* window) { if (!_glfw.win32.acquiredMonitorCount) { SetThreadExecutionState(ES_CONTINUOUS | ES_DISPLAY_REQUIRED); // HACK: When mouse trails are enabled the cursor becomes invisible when // the OpenGL ICD switches to page flipping if (IsWindowsXPOrGreater()) { SystemParametersInfo(SPI_GETMOUSETRAILS, 0, &_glfw.win32.mouseTrailSize, 0); SystemParametersInfo(SPI_SETMOUSETRAILS, 0, 0, 0); } } if (!window->monitor->window) _glfw.win32.acquiredMonitorCount++; _glfwSetVideoModeWin32(window->monitor, &window->videoMode); _glfwInputMonitorWindow(window->monitor, window); } // Remove the window and restore the original video mode // static void releaseMonitor(_GLFWwindow* window) { if (window->monitor->window != window) return; _glfw.win32.acquiredMonitorCount--; if (!_glfw.win32.acquiredMonitorCount) { SetThreadExecutionState(ES_CONTINUOUS); // HACK: Restore mouse trail length saved in acquireMonitor if (IsWindowsXPOrGreater()) SystemParametersInfo(SPI_SETMOUSETRAILS, _glfw.win32.mouseTrailSize, 0, 0); } _glfwInputMonitorWindow(window->monitor, NULL); _glfwRestoreVideoModeWin32(window->monitor); } // Window callback function (handles window messages) // static LRESULT CALLBACK windowProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam) { _GLFWwindow* window = GetPropW(hWnd, L"GLFW"); if (!window) { // This is the message handling for the hidden helper window // and for a regular window during its initial creation switch (uMsg) { case WM_NCCREATE: { if (_glfwIsWindows10AnniversaryUpdateOrGreaterWin32()) EnableNonClientDpiScaling(hWnd); break; } case WM_DISPLAYCHANGE: _glfwPollMonitorsWin32(); break; case WM_DEVICECHANGE: { if (wParam == DBT_DEVICEARRIVAL) { DEV_BROADCAST_HDR* dbh = (DEV_BROADCAST_HDR*) lParam; if (dbh && dbh->dbch_devicetype == DBT_DEVTYP_DEVICEINTERFACE) _glfwDetectJoystickConnectionWin32(); } else if (wParam == DBT_DEVICEREMOVECOMPLETE) { DEV_BROADCAST_HDR* dbh = (DEV_BROADCAST_HDR*) lParam; if (dbh && dbh->dbch_devicetype == DBT_DEVTYP_DEVICEINTERFACE) _glfwDetectJoystickDisconnectionWin32(); } break; } } return DefWindowProcW(hWnd, uMsg, wParam, lParam); } switch (uMsg) { case WM_MOUSEACTIVATE: { // HACK: Postpone cursor disabling when the window was activated by // clicking a caption button if (HIWORD(lParam) == WM_LBUTTONDOWN) { if (LOWORD(lParam) == HTCLOSE || LOWORD(lParam) == HTMINBUTTON || LOWORD(lParam) == HTMAXBUTTON) { window->win32.frameAction = GLFW_TRUE; } } break; } case WM_CAPTURECHANGED: { // HACK: Disable the cursor once the caption button action has been // completed or cancelled if (lParam == 0 && window->win32.frameAction) { if (window->cursorMode == GLFW_CURSOR_DISABLED) disableCursor(window); window->win32.frameAction = GLFW_FALSE; } break; } case WM_SETFOCUS: { _glfwInputWindowFocus(window, GLFW_TRUE); // HACK: Do not disable cursor while the user is interacting with // a caption button if (window->win32.frameAction) break; if (window->cursorMode == GLFW_CURSOR_DISABLED) disableCursor(window); return 0; } case WM_KILLFOCUS: { if (window->cursorMode == GLFW_CURSOR_DISABLED) enableCursor(window); if (window->monitor && window->autoIconify) _glfwPlatformIconifyWindow(window); _glfwInputWindowFocus(window, GLFW_FALSE); return 0; } case WM_SYSCOMMAND: { switch (wParam & 0xfff0) { case SC_SCREENSAVE: case SC_MONITORPOWER: { if (window->monitor) { // We are running in full screen mode, so disallow // screen saver and screen blanking return 0; } else break; } // User trying to access application menu using ALT? case SC_KEYMENU: return 0; } break; } case WM_CLOSE: { _glfwInputWindowCloseRequest(window); return 0; } case WM_INPUTLANGCHANGE: { _glfwUpdateKeyNamesWin32(); break; } case WM_CHAR: case WM_SYSCHAR: case WM_UNICHAR: { const GLFWbool plain = (uMsg != WM_SYSCHAR); if (uMsg == WM_UNICHAR && wParam == UNICODE_NOCHAR) { // WM_UNICHAR is not sent by Windows, but is sent by some // third-party input method engine // Returning TRUE here announces support for this message return TRUE; } _glfwInputChar(window, (unsigned int) wParam, getKeyMods(), plain); return 0; } case WM_KEYDOWN: case WM_SYSKEYDOWN: case WM_KEYUP: case WM_SYSKEYUP: { const int key = translateKey(wParam, lParam); const int scancode = (lParam >> 16) & 0x1ff; const int action = ((lParam >> 31) & 1) ? GLFW_RELEASE : GLFW_PRESS; const int mods = getKeyMods(); if (key == _GLFW_KEY_INVALID) break; if (action == GLFW_RELEASE && wParam == VK_SHIFT) { // HACK: Release both Shift keys on Shift up event, as when both // are pressed the first release does not emit any event // NOTE: The other half of this is in _glfwPlatformPollEvents _glfwInputKey(window, GLFW_KEY_LEFT_SHIFT, scancode, action, mods); _glfwInputKey(window, GLFW_KEY_RIGHT_SHIFT, scancode, action, mods); } else if (wParam == VK_SNAPSHOT) { // HACK: Key down is not reported for the Print Screen key _glfwInputKey(window, key, scancode, GLFW_PRESS, mods); _glfwInputKey(window, key, scancode, GLFW_RELEASE, mods); } else _glfwInputKey(window, key, scancode, action, mods); break; } case WM_LBUTTONDOWN: case WM_RBUTTONDOWN: case WM_MBUTTONDOWN: case WM_XBUTTONDOWN: case WM_LBUTTONUP: case WM_RBUTTONUP: case WM_MBUTTONUP: case WM_XBUTTONUP: { int i, button, action; if (uMsg == WM_LBUTTONDOWN || uMsg == WM_LBUTTONUP) button = GLFW_MOUSE_BUTTON_LEFT; else if (uMsg == WM_RBUTTONDOWN || uMsg == WM_RBUTTONUP) button = GLFW_MOUSE_BUTTON_RIGHT; else if (uMsg == WM_MBUTTONDOWN || uMsg == WM_MBUTTONUP) button = GLFW_MOUSE_BUTTON_MIDDLE; else if (GET_XBUTTON_WPARAM(wParam) == XBUTTON1) button = GLFW_MOUSE_BUTTON_4; else button = GLFW_MOUSE_BUTTON_5; if (uMsg == WM_LBUTTONDOWN || uMsg == WM_RBUTTONDOWN || uMsg == WM_MBUTTONDOWN || uMsg == WM_XBUTTONDOWN) { action = GLFW_PRESS; } else action = GLFW_RELEASE; for (i = 0; i <= GLFW_MOUSE_BUTTON_LAST; i++) { if (window->mouseButtons[i] == GLFW_PRESS) break; } if (i > GLFW_MOUSE_BUTTON_LAST) SetCapture(hWnd); _glfwInputMouseClick(window, button, action, getKeyMods()); for (i = 0; i <= GLFW_MOUSE_BUTTON_LAST; i++) { if (window->mouseButtons[i] == GLFW_PRESS) break; } if (i > GLFW_MOUSE_BUTTON_LAST) ReleaseCapture(); if (uMsg == WM_XBUTTONDOWN || uMsg == WM_XBUTTONUP) return TRUE; return 0; } case WM_MOUSEMOVE: { const int x = GET_X_LPARAM(lParam); const int y = GET_Y_LPARAM(lParam); // Disabled cursor motion input is provided by WM_INPUT if (window->cursorMode == GLFW_CURSOR_DISABLED) break; _glfwInputCursorPos(window, x, y); window->win32.lastCursorPosX = x; window->win32.lastCursorPosY = y; if (!window->win32.cursorTracked) { TRACKMOUSEEVENT tme; ZeroMemory(&tme, sizeof(tme)); tme.cbSize = sizeof(tme); tme.dwFlags = TME_LEAVE; tme.hwndTrack = window->win32.handle; TrackMouseEvent(&tme); window->win32.cursorTracked = GLFW_TRUE; _glfwInputCursorEnter(window, GLFW_TRUE); } return 0; } case WM_INPUT: { UINT size; HRAWINPUT ri = (HRAWINPUT) lParam; RAWINPUT* data; int dx, dy; // Only process input when disabled cursor mode is applied if (_glfw.win32.disabledCursorWindow != window) break; GetRawInputData(ri, RID_INPUT, NULL, &size, sizeof(RAWINPUTHEADER)); if (size > (UINT) _glfw.win32.rawInputSize) { free(_glfw.win32.rawInput); _glfw.win32.rawInput = calloc(size, 1); _glfw.win32.rawInputSize = size; } size = _glfw.win32.rawInputSize; if (GetRawInputData(ri, RID_INPUT, _glfw.win32.rawInput, &size, sizeof(RAWINPUTHEADER)) == (UINT) -1) { _glfwInputError(GLFW_PLATFORM_ERROR, "Win32: Failed to retrieve raw input data"); break; } data = _glfw.win32.rawInput; if (data->data.mouse.usFlags & MOUSE_MOVE_ABSOLUTE) { dx = data->data.mouse.lLastX - window->win32.lastCursorPosX; dy = data->data.mouse.lLastY - window->win32.lastCursorPosY; } else { dx = data->data.mouse.lLastX; dy = data->data.mouse.lLastY; } _glfwInputCursorPos(window, window->virtualCursorPosX + dx, window->virtualCursorPosY + dy); window->win32.lastCursorPosX += dx; window->win32.lastCursorPosY += dy; break; } case WM_MOUSELEAVE: { window->win32.cursorTracked = GLFW_FALSE; _glfwInputCursorEnter(window, GLFW_FALSE); return 0; } case WM_MOUSEWHEEL: { _glfwInputScroll(window, 0.0, (SHORT) HIWORD(wParam) / (double) WHEEL_DELTA); return 0; } case WM_MOUSEHWHEEL: { // This message is only sent on Windows Vista and later // NOTE: The X-axis is inverted for consistency with macOS and X11 _glfwInputScroll(window, -((SHORT) HIWORD(wParam) / (double) WHEEL_DELTA), 0.0); return 0; } case WM_ENTERSIZEMOVE: case WM_ENTERMENULOOP: { // HACK: Enable the cursor while the user is moving or // resizing the window or using the window menu if (window->cursorMode == GLFW_CURSOR_DISABLED) enableCursor(window); break; } case WM_EXITSIZEMOVE: case WM_EXITMENULOOP: { // HACK: Disable the cursor once the user is done moving or // resizing the window or using the menu if (window->cursorMode == GLFW_CURSOR_DISABLED) disableCursor(window); break; } case WM_SIZE: { const GLFWbool iconified = wParam == SIZE_MINIMIZED; const GLFWbool maximized = wParam == SIZE_MAXIMIZED || (window->win32.maximized && wParam != SIZE_RESTORED); if (_glfw.win32.disabledCursorWindow == window) updateClipRect(window); if (window->win32.iconified != iconified) _glfwInputWindowIconify(window, iconified); if (window->win32.maximized != maximized) _glfwInputWindowMaximize(window, maximized); _glfwInputFramebufferSize(window, LOWORD(lParam), HIWORD(lParam)); _glfwInputWindowSize(window, LOWORD(lParam), HIWORD(lParam)); if (window->monitor && window->win32.iconified != iconified) { if (iconified) releaseMonitor(window); else { acquireMonitor(window); fitToMonitor(window); } } window->win32.iconified = iconified; window->win32.maximized = maximized; return 0; } case WM_MOVE: { if (_glfw.win32.disabledCursorWindow == window) updateClipRect(window); // NOTE: This cannot use LOWORD/HIWORD recommended by MSDN, as // those macros do not handle negative window positions correctly _glfwInputWindowPos(window, GET_X_LPARAM(lParam), GET_Y_LPARAM(lParam)); return 0; } case WM_SIZING: { if (window->numer == GLFW_DONT_CARE || window->denom == GLFW_DONT_CARE) { break; } applyAspectRatio(window, (int) wParam, (RECT*) lParam); return TRUE; } case WM_GETMINMAXINFO: { int xoff, yoff; UINT dpi = USER_DEFAULT_SCREEN_DPI; MINMAXINFO* mmi = (MINMAXINFO*) lParam; if (window->monitor) break; if (_glfwIsWindows10AnniversaryUpdateOrGreaterWin32()) dpi = GetDpiForWindow(window->win32.handle); getFullWindowSize(getWindowStyle(window), getWindowExStyle(window), 0, 0, &xoff, &yoff, dpi); if (window->minwidth != GLFW_DONT_CARE && window->minheight != GLFW_DONT_CARE) { mmi->ptMinTrackSize.x = window->minwidth + xoff; mmi->ptMinTrackSize.y = window->minheight + yoff; } if (window->maxwidth != GLFW_DONT_CARE && window->maxheight != GLFW_DONT_CARE) { mmi->ptMaxTrackSize.x = window->maxwidth + xoff; mmi->ptMaxTrackSize.y = window->maxheight + yoff; } if (!window->decorated) { MONITORINFO mi; const HMONITOR mh = MonitorFromWindow(window->win32.handle, MONITOR_DEFAULTTONEAREST); ZeroMemory(&mi, sizeof(mi)); mi.cbSize = sizeof(mi); GetMonitorInfo(mh, &mi); mmi->ptMaxPosition.x = mi.rcWork.left - mi.rcMonitor.left; mmi->ptMaxPosition.y = mi.rcWork.top - mi.rcMonitor.top; mmi->ptMaxSize.x = mi.rcWork.right - mi.rcWork.left; mmi->ptMaxSize.y = mi.rcWork.bottom - mi.rcWork.top; } return 0; } case WM_PAINT: { _glfwInputWindowDamage(window); break; } case WM_ERASEBKGND: { return TRUE; } case WM_NCACTIVATE: case WM_NCPAINT: { // Prevent title bar from being drawn after restoring a minimized // undecorated window if (!window->decorated) return TRUE; break; } case WM_DWMCOMPOSITIONCHANGED: { if (window->win32.transparent) updateFramebufferTransparency(window); return 0; } case WM_GETDPISCALEDSIZE: { if (window->win32.scaleToMonitor) break; // Adjust the window size to keep the content area size constant if (_glfwIsWindows10CreatorsUpdateOrGreaterWin32()) { RECT source = {0}, target = {0}; SIZE* size = (SIZE*) lParam; AdjustWindowRectExForDpi(&source, getWindowStyle(window), FALSE, getWindowExStyle(window), GetDpiForWindow(window->win32.handle)); AdjustWindowRectExForDpi(&target, getWindowStyle(window), FALSE, getWindowExStyle(window), LOWORD(wParam)); size->cx += (target.right - target.left) - (source.right - source.left); size->cy += (target.bottom - target.top) - (source.bottom - source.top); return TRUE; } break; } case WM_DPICHANGED: { const float xscale = HIWORD(wParam) / (float) USER_DEFAULT_SCREEN_DPI; const float yscale = LOWORD(wParam) / (float) USER_DEFAULT_SCREEN_DPI; // Only apply the suggested size if the OS is new enough to have // sent a WM_GETDPISCALEDSIZE before this if (_glfwIsWindows10CreatorsUpdateOrGreaterWin32()) { RECT* suggested = (RECT*) lParam; SetWindowPos(window->win32.handle, HWND_TOP, suggested->left, suggested->top, suggested->right - suggested->left, suggested->bottom - suggested->top, SWP_NOACTIVATE | SWP_NOZORDER); } _glfwInputWindowContentScale(window, xscale, yscale); break; } case WM_SETCURSOR: { if (LOWORD(lParam) == HTCLIENT) { updateCursorImage(window); return TRUE; } break; } case WM_DROPFILES: { HDROP drop = (HDROP) wParam; POINT pt; int i; const int count = DragQueryFileW(drop, 0xffffffff, NULL, 0); char** paths = calloc(count, sizeof(char*)); // Move the mouse to the position of the drop DragQueryPoint(drop, &pt); _glfwInputCursorPos(window, pt.x, pt.y); for (i = 0; i < count; i++) { const UINT length = DragQueryFileW(drop, i, NULL, 0); WCHAR* buffer = calloc(length + 1, sizeof(WCHAR)); DragQueryFileW(drop, i, buffer, length + 1); paths[i] = _glfwCreateUTF8FromWideStringWin32(buffer); free(buffer); } _glfwInputDrop(window, count, (const char**) paths); for (i = 0; i < count; i++) free(paths[i]); free(paths); DragFinish(drop); return 0; } } return DefWindowProcW(hWnd, uMsg, wParam, lParam); } // Creates the GLFW window // static int createNativeWindow(_GLFWwindow* window, const _GLFWwndconfig* wndconfig, const _GLFWfbconfig* fbconfig) { int xpos, ypos, fullWidth, fullHeight; WCHAR* wideTitle; DWORD style = getWindowStyle(window); DWORD exStyle = getWindowExStyle(window); if (window->monitor) { GLFWvidmode mode; // NOTE: This window placement is temporary and approximate, as the // correct position and size cannot be known until the monitor // video mode has been picked in _glfwSetVideoModeWin32 _glfwPlatformGetMonitorPos(window->monitor, &xpos, &ypos); _glfwPlatformGetVideoMode(window->monitor, &mode); fullWidth = mode.width; fullHeight = mode.height; } else { xpos = CW_USEDEFAULT; ypos = CW_USEDEFAULT; if (wndconfig->maximized) style |= WS_MAXIMIZE; getFullWindowSize(style, exStyle, wndconfig->width, wndconfig->height, &fullWidth, &fullHeight, USER_DEFAULT_SCREEN_DPI); } wideTitle = _glfwCreateWideStringFromUTF8Win32(wndconfig->title); if (!wideTitle) return GLFW_FALSE; window->win32.handle = CreateWindowExW(exStyle, _GLFW_WNDCLASSNAME, wideTitle, style, xpos, ypos, fullWidth, fullHeight, NULL, // No parent window NULL, // No window menu GetModuleHandleW(NULL), NULL); free(wideTitle); if (!window->win32.handle) { _glfwInputErrorWin32(GLFW_PLATFORM_ERROR, "Win32: Failed to create window"); return GLFW_FALSE; } SetPropW(window->win32.handle, L"GLFW", window); if (IsWindows7OrGreater()) { ChangeWindowMessageFilterEx(window->win32.handle, WM_DROPFILES, MSGFLT_ALLOW, NULL); ChangeWindowMessageFilterEx(window->win32.handle, WM_COPYDATA, MSGFLT_ALLOW, NULL); ChangeWindowMessageFilterEx(window->win32.handle, WM_COPYGLOBALDATA, MSGFLT_ALLOW, NULL); } window->win32.scaleToMonitor = wndconfig->scaleToMonitor; // Adjust window size to account for DPI scaling of the window frame and // optionally DPI scaling of the content area // This cannot be done until we know what monitor it was placed on if (!window->monitor) { RECT rect = { 0, 0, wndconfig->width, wndconfig->height }; if (wndconfig->scaleToMonitor) { float xscale, yscale; _glfwPlatformGetWindowContentScale(window, &xscale, &yscale); rect.right = (int) (rect.right * xscale); rect.bottom = (int) (rect.bottom * yscale); } ClientToScreen(window->win32.handle, (POINT*) &rect.left); ClientToScreen(window->win32.handle, (POINT*) &rect.right); if (_glfwIsWindows10AnniversaryUpdateOrGreaterWin32()) { AdjustWindowRectExForDpi(&rect, style, FALSE, exStyle, GetDpiForWindow(window->win32.handle)); } else AdjustWindowRectEx(&rect, style, FALSE, exStyle); SetWindowPos(window->win32.handle, NULL, rect.left, rect.top, rect.right - rect.left, rect.bottom - rect.top, SWP_NOACTIVATE | SWP_NOZORDER); } DragAcceptFiles(window->win32.handle, TRUE); if (fbconfig->transparent) { updateFramebufferTransparency(window); window->win32.transparent = GLFW_TRUE; } return GLFW_TRUE; } ////////////////////////////////////////////////////////////////////////// ////// GLFW internal API ////// ////////////////////////////////////////////////////////////////////////// // Registers the GLFW window class // GLFWbool _glfwRegisterWindowClassWin32(void) { WNDCLASSEXW wc; ZeroMemory(&wc, sizeof(wc)); wc.cbSize = sizeof(wc); wc.style = CS_HREDRAW | CS_VREDRAW | CS_OWNDC; wc.lpfnWndProc = (WNDPROC) windowProc; wc.hInstance = GetModuleHandleW(NULL); wc.hCursor = LoadCursorW(NULL, IDC_ARROW); wc.lpszClassName = _GLFW_WNDCLASSNAME; // Load user-provided icon if available wc.hIcon = LoadImageW(GetModuleHandleW(NULL), L"GLFW_ICON", IMAGE_ICON, 0, 0, LR_DEFAULTSIZE | LR_SHARED); if (!wc.hIcon) { // No user-provided icon found, load default icon wc.hIcon = LoadImageW(NULL, IDI_APPLICATION, IMAGE_ICON, 0, 0, LR_DEFAULTSIZE | LR_SHARED); } if (!RegisterClassExW(&wc)) { _glfwInputErrorWin32(GLFW_PLATFORM_ERROR, "Win32: Failed to register window class"); return GLFW_FALSE; } return GLFW_TRUE; } // Unregisters the GLFW window class // void _glfwUnregisterWindowClassWin32(void) { UnregisterClassW(_GLFW_WNDCLASSNAME, GetModuleHandleW(NULL)); } ////////////////////////////////////////////////////////////////////////// ////// GLFW platform API ////// ////////////////////////////////////////////////////////////////////////// int _glfwPlatformCreateWindow(_GLFWwindow* window, const _GLFWwndconfig* wndconfig, const _GLFWctxconfig* ctxconfig, const _GLFWfbconfig* fbconfig) { if (!createNativeWindow(window, wndconfig, fbconfig)) return GLFW_FALSE; if (ctxconfig->client != GLFW_NO_API) { if (ctxconfig->source == GLFW_NATIVE_CONTEXT_API) { if (!_glfwInitWGL()) return GLFW_FALSE; if (!_glfwCreateContextWGL(window, ctxconfig, fbconfig)) return GLFW_FALSE; } else if (ctxconfig->source == GLFW_EGL_CONTEXT_API) { if (!_glfwInitEGL()) return GLFW_FALSE; if (!_glfwCreateContextEGL(window, ctxconfig, fbconfig)) return GLFW_FALSE; } else if (ctxconfig->source == GLFW_OSMESA_CONTEXT_API) { if (!_glfwInitOSMesa()) return GLFW_FALSE; if (!_glfwCreateContextOSMesa(window, ctxconfig, fbconfig)) return GLFW_FALSE; } } if (window->monitor) { _glfwPlatformShowWindow(window); _glfwPlatformFocusWindow(window); acquireMonitor(window); fitToMonitor(window); } return GLFW_TRUE; } void _glfwPlatformDestroyWindow(_GLFWwindow* window) { if (window->monitor) releaseMonitor(window); if (window->context.destroy) window->context.destroy(window); if (_glfw.win32.disabledCursorWindow == window) _glfw.win32.disabledCursorWindow = NULL; if (window->win32.handle) { RemovePropW(window->win32.handle, L"GLFW"); DestroyWindow(window->win32.handle); window->win32.handle = NULL; } if (window->win32.bigIcon) DestroyIcon(window->win32.bigIcon); if (window->win32.smallIcon) DestroyIcon(window->win32.smallIcon); } void _glfwPlatformSetWindowTitle(_GLFWwindow* window, const char* title) { WCHAR* wideTitle = _glfwCreateWideStringFromUTF8Win32(title); if (!wideTitle) return; SetWindowTextW(window->win32.handle, wideTitle); free(wideTitle); } void _glfwPlatformSetWindowIcon(_GLFWwindow* window, int count, const GLFWimage* images) { HICON bigIcon = NULL, smallIcon = NULL; if (count) { const GLFWimage* bigImage = chooseImage(count, images, GetSystemMetrics(SM_CXICON), GetSystemMetrics(SM_CYICON)); const GLFWimage* smallImage = chooseImage(count, images, GetSystemMetrics(SM_CXSMICON), GetSystemMetrics(SM_CYSMICON)); bigIcon = createIcon(bigImage, 0, 0, GLFW_TRUE); smallIcon = createIcon(smallImage, 0, 0, GLFW_TRUE); } else { bigIcon = (HICON) GetClassLongPtrW(window->win32.handle, GCLP_HICON); smallIcon = (HICON) GetClassLongPtrW(window->win32.handle, GCLP_HICONSM); } SendMessage(window->win32.handle, WM_SETICON, ICON_BIG, (LPARAM) bigIcon); SendMessage(window->win32.handle, WM_SETICON, ICON_SMALL, (LPARAM) smallIcon); if (window->win32.bigIcon) DestroyIcon(window->win32.bigIcon); if (window->win32.smallIcon) DestroyIcon(window->win32.smallIcon); if (count) { window->win32.bigIcon = bigIcon; window->win32.smallIcon = smallIcon; } } void _glfwPlatformGetWindowPos(_GLFWwindow* window, int* xpos, int* ypos) { POINT pos = { 0, 0 }; ClientToScreen(window->win32.handle, &pos); if (xpos) *xpos = pos.x; if (ypos) *ypos = pos.y; } void _glfwPlatformSetWindowPos(_GLFWwindow* window, int xpos, int ypos) { RECT rect = { xpos, ypos, xpos, ypos }; if (_glfwIsWindows10AnniversaryUpdateOrGreaterWin32()) { AdjustWindowRectExForDpi(&rect, getWindowStyle(window), FALSE, getWindowExStyle(window), GetDpiForWindow(window->win32.handle)); } else { AdjustWindowRectEx(&rect, getWindowStyle(window), FALSE, getWindowExStyle(window)); } SetWindowPos(window->win32.handle, NULL, rect.left, rect.top, 0, 0, SWP_NOACTIVATE | SWP_NOZORDER | SWP_NOSIZE); } void _glfwPlatformGetWindowSize(_GLFWwindow* window, int* width, int* height) { RECT area; GetClientRect(window->win32.handle, &area); if (width) *width = area.right; if (height) *height = area.bottom; } void _glfwPlatformSetWindowSize(_GLFWwindow* window, int width, int height) { if (window->monitor) { if (window->monitor->window == window) { acquireMonitor(window); fitToMonitor(window); } } else { RECT rect = { 0, 0, width, height }; if (_glfwIsWindows10AnniversaryUpdateOrGreaterWin32()) { AdjustWindowRectExForDpi(&rect, getWindowStyle(window), FALSE, getWindowExStyle(window), GetDpiForWindow(window->win32.handle)); } else { AdjustWindowRectEx(&rect, getWindowStyle(window), FALSE, getWindowExStyle(window)); } SetWindowPos(window->win32.handle, HWND_TOP, 0, 0, rect.right - rect.left, rect.bottom - rect.top, SWP_NOACTIVATE | SWP_NOOWNERZORDER | SWP_NOMOVE | SWP_NOZORDER); } } void _glfwPlatformSetWindowSizeLimits(_GLFWwindow* window, int minwidth, int minheight, int maxwidth, int maxheight) { RECT area; if ((minwidth == GLFW_DONT_CARE || minheight == GLFW_DONT_CARE) && (maxwidth == GLFW_DONT_CARE || maxheight == GLFW_DONT_CARE)) { return; } GetWindowRect(window->win32.handle, &area); MoveWindow(window->win32.handle, area.left, area.top, area.right - area.left, area.bottom - area.top, TRUE); } void _glfwPlatformSetWindowAspectRatio(_GLFWwindow* window, int numer, int denom) { RECT area; if (numer == GLFW_DONT_CARE || denom == GLFW_DONT_CARE) return; GetWindowRect(window->win32.handle, &area); applyAspectRatio(window, WMSZ_BOTTOMRIGHT, &area); MoveWindow(window->win32.handle, area.left, area.top, area.right - area.left, area.bottom - area.top, TRUE); } void _glfwPlatformGetFramebufferSize(_GLFWwindow* window, int* width, int* height) { _glfwPlatformGetWindowSize(window, width, height); } void _glfwPlatformGetWindowFrameSize(_GLFWwindow* window, int* left, int* top, int* right, int* bottom) { RECT rect; int width, height; _glfwPlatformGetWindowSize(window, &width, &height); SetRect(&rect, 0, 0, width, height); if (_glfwIsWindows10AnniversaryUpdateOrGreaterWin32()) { AdjustWindowRectExForDpi(&rect, getWindowStyle(window), FALSE, getWindowExStyle(window), GetDpiForWindow(window->win32.handle)); } else { AdjustWindowRectEx(&rect, getWindowStyle(window), FALSE, getWindowExStyle(window)); } if (left) *left = -rect.left; if (top) *top = -rect.top; if (right) *right = rect.right - width; if (bottom) *bottom = rect.bottom - height; } void _glfwPlatformGetWindowContentScale(_GLFWwindow* window, float* xscale, float* yscale) { const HANDLE handle = MonitorFromWindow(window->win32.handle, MONITOR_DEFAULTTONEAREST); _glfwGetMonitorContentScaleWin32(handle, xscale, yscale); } void _glfwPlatformIconifyWindow(_GLFWwindow* window) { ShowWindow(window->win32.handle, SW_MINIMIZE); } void _glfwPlatformRestoreWindow(_GLFWwindow* window) { ShowWindow(window->win32.handle, SW_RESTORE); } void _glfwPlatformMaximizeWindow(_GLFWwindow* window) { ShowWindow(window->win32.handle, SW_MAXIMIZE); } void _glfwPlatformShowWindow(_GLFWwindow* window) { ShowWindow(window->win32.handle, SW_SHOWNA); } void _glfwPlatformHideWindow(_GLFWwindow* window) { ShowWindow(window->win32.handle, SW_HIDE); } void _glfwPlatformRequestWindowAttention(_GLFWwindow* window) { FlashWindow(window->win32.handle, TRUE); } void _glfwPlatformFocusWindow(_GLFWwindow* window) { BringWindowToTop(window->win32.handle); SetForegroundWindow(window->win32.handle); SetFocus(window->win32.handle); } void _glfwPlatformSetWindowMonitor(_GLFWwindow* window, _GLFWmonitor* monitor, int xpos, int ypos, int width, int height, int refreshRate) { if (window->monitor == monitor) { if (monitor) { if (monitor->window == window) { acquireMonitor(window); fitToMonitor(window); } } else { RECT rect = { xpos, ypos, xpos + width, ypos + height }; if (_glfwIsWindows10AnniversaryUpdateOrGreaterWin32()) { AdjustWindowRectExForDpi(&rect, getWindowStyle(window), FALSE, getWindowExStyle(window), GetDpiForWindow(window->win32.handle)); } else { AdjustWindowRectEx(&rect, getWindowStyle(window), FALSE, getWindowExStyle(window)); } SetWindowPos(window->win32.handle, HWND_TOP, rect.left, rect.top, rect.right - rect.left, rect.bottom - rect.top, SWP_NOCOPYBITS | SWP_NOACTIVATE | SWP_NOZORDER); } return; } if (window->monitor) releaseMonitor(window); _glfwInputWindowMonitor(window, monitor); if (monitor) { MONITORINFO mi = { sizeof(mi) }; UINT flags = SWP_SHOWWINDOW | SWP_NOACTIVATE | SWP_NOCOPYBITS; if (window->decorated) { DWORD style = GetWindowLongW(window->win32.handle, GWL_STYLE); style &= ~WS_OVERLAPPEDWINDOW; style |= getWindowStyle(window); SetWindowLongW(window->win32.handle, GWL_STYLE, style); flags |= SWP_FRAMECHANGED; } acquireMonitor(window); GetMonitorInfo(window->monitor->win32.handle, &mi); SetWindowPos(window->win32.handle, HWND_TOPMOST, mi.rcMonitor.left, mi.rcMonitor.top, mi.rcMonitor.right - mi.rcMonitor.left, mi.rcMonitor.bottom - mi.rcMonitor.top, flags); } else { HWND after; RECT rect = { xpos, ypos, xpos + width, ypos + height }; DWORD style = GetWindowLongW(window->win32.handle, GWL_STYLE); UINT flags = SWP_NOACTIVATE | SWP_NOCOPYBITS; if (window->decorated) { style &= ~WS_POPUP; style |= getWindowStyle(window); SetWindowLongW(window->win32.handle, GWL_STYLE, style); flags |= SWP_FRAMECHANGED; } if (window->floating) after = HWND_TOPMOST; else after = HWND_NOTOPMOST; if (_glfwIsWindows10AnniversaryUpdateOrGreaterWin32()) { AdjustWindowRectExForDpi(&rect, getWindowStyle(window), FALSE, getWindowExStyle(window), GetDpiForWindow(window->win32.handle)); } else { AdjustWindowRectEx(&rect, getWindowStyle(window), FALSE, getWindowExStyle(window)); } SetWindowPos(window->win32.handle, after, rect.left, rect.top, rect.right - rect.left, rect.bottom - rect.top, flags); } } int _glfwPlatformWindowFocused(_GLFWwindow* window) { return window->win32.handle == GetActiveWindow(); } int _glfwPlatformWindowIconified(_GLFWwindow* window) { return IsIconic(window->win32.handle); } int _glfwPlatformWindowVisible(_GLFWwindow* window) { return IsWindowVisible(window->win32.handle); } int _glfwPlatformWindowMaximized(_GLFWwindow* window) { return IsZoomed(window->win32.handle); } int _glfwPlatformWindowHovered(_GLFWwindow* window) { return cursorInContentArea(window); } int _glfwPlatformFramebufferTransparent(_GLFWwindow* window) { BOOL enabled; if (!window->win32.transparent) return GLFW_FALSE; if (!IsWindowsVistaOrGreater()) return GLFW_FALSE; return SUCCEEDED(DwmIsCompositionEnabled(&enabled)) && enabled; } void _glfwPlatformSetWindowResizable(_GLFWwindow* window, GLFWbool enabled) { updateWindowStyles(window); } void _glfwPlatformSetWindowDecorated(_GLFWwindow* window, GLFWbool enabled) { updateWindowStyles(window); } void _glfwPlatformSetWindowFloating(_GLFWwindow* window, GLFWbool enabled) { const HWND after = enabled ? HWND_TOPMOST : HWND_NOTOPMOST; SetWindowPos(window->win32.handle, after, 0, 0, 0, 0, SWP_NOACTIVATE | SWP_NOMOVE | SWP_NOSIZE); } float _glfwPlatformGetWindowOpacity(_GLFWwindow* window) { BYTE alpha; DWORD flags; if ((GetWindowLongW(window->win32.handle, GWL_EXSTYLE) & WS_EX_LAYERED) && GetLayeredWindowAttributes(window->win32.handle, NULL, &alpha, &flags)) { if (flags & LWA_ALPHA) return alpha / 255.f; } return 1.f; } void _glfwPlatformSetWindowOpacity(_GLFWwindow* window, float opacity) { if (opacity < 1.f) { const BYTE alpha = (BYTE) (255 * opacity); DWORD style = GetWindowLongW(window->win32.handle, GWL_EXSTYLE); style |= WS_EX_LAYERED; SetWindowLongW(window->win32.handle, GWL_EXSTYLE, style); SetLayeredWindowAttributes(window->win32.handle, 0, alpha, LWA_ALPHA); } else { DWORD style = GetWindowLongW(window->win32.handle, GWL_EXSTYLE); style &= ~WS_EX_LAYERED; SetWindowLongW(window->win32.handle, GWL_EXSTYLE, style); } } void _glfwPlatformPollEvents(void) { MSG msg; HWND handle; _GLFWwindow* window; while (PeekMessageW(&msg, NULL, 0, 0, PM_REMOVE)) { if (msg.message == WM_QUIT) { // NOTE: While GLFW does not itself post WM_QUIT, other processes // may post it to this one, for example Task Manager // HACK: Treat WM_QUIT as a close on all windows window = _glfw.windowListHead; while (window) { _glfwInputWindowCloseRequest(window); window = window->next; } } else { TranslateMessage(&msg); DispatchMessageW(&msg); } } handle = GetActiveWindow(); if (handle) { // NOTE: Shift keys on Windows tend to "stick" when both are pressed as // no key up message is generated by the first key release // The other half of this is in the handling of WM_KEYUP // HACK: Query actual key state and synthesize release events as needed window = GetPropW(handle, L"GLFW"); if (window) { const GLFWbool lshift = (GetAsyncKeyState(VK_LSHIFT) >> 15) & 1; const GLFWbool rshift = (GetAsyncKeyState(VK_RSHIFT) >> 15) & 1; if (!lshift && window->keys[GLFW_KEY_LEFT_SHIFT] == GLFW_PRESS) { const int mods = getAsyncKeyMods(); const int scancode = _glfw.win32.scancodes[GLFW_KEY_LEFT_SHIFT]; _glfwInputKey(window, GLFW_KEY_LEFT_SHIFT, scancode, GLFW_RELEASE, mods); } else if (!rshift && window->keys[GLFW_KEY_RIGHT_SHIFT] == GLFW_PRESS) { const int mods = getAsyncKeyMods(); const int scancode = _glfw.win32.scancodes[GLFW_KEY_RIGHT_SHIFT]; _glfwInputKey(window, GLFW_KEY_RIGHT_SHIFT, scancode, GLFW_RELEASE, mods); } } } window = _glfw.win32.disabledCursorWindow; if (window) { int width, height; _glfwPlatformGetWindowSize(window, &width, &height); // NOTE: Re-center the cursor only if it has moved since the last call, // to avoid breaking glfwWaitEvents with WM_MOUSEMOVE if (window->win32.lastCursorPosX != width / 2 || window->win32.lastCursorPosY != height / 2) { _glfwPlatformSetCursorPos(window, width / 2, height / 2); } } } void _glfwPlatformWaitEvents(void) { WaitMessage(); _glfwPlatformPollEvents(); } void _glfwPlatformWaitEventsTimeout(double timeout) { MsgWaitForMultipleObjects(0, NULL, FALSE, (DWORD) (timeout * 1e3), QS_ALLEVENTS); _glfwPlatformPollEvents(); } void _glfwPlatformPostEmptyEvent(void) { PostMessage(_glfw.win32.helperWindowHandle, WM_NULL, 0, 0); } void _glfwPlatformGetCursorPos(_GLFWwindow* window, double* xpos, double* ypos) { POINT pos; if (GetCursorPos(&pos)) { ScreenToClient(window->win32.handle, &pos); if (xpos) *xpos = pos.x; if (ypos) *ypos = pos.y; } } void _glfwPlatformSetCursorPos(_GLFWwindow* window, double xpos, double ypos) { POINT pos = { (int) xpos, (int) ypos }; // Store the new position so it can be recognized later window->win32.lastCursorPosX = pos.x; window->win32.lastCursorPosY = pos.y; ClientToScreen(window->win32.handle, &pos); SetCursorPos(pos.x, pos.y); } void _glfwPlatformSetCursorMode(_GLFWwindow* window, int mode) { if (mode == GLFW_CURSOR_DISABLED) { if (_glfwPlatformWindowFocused(window)) disableCursor(window); } else if (_glfw.win32.disabledCursorWindow == window) enableCursor(window); else if (cursorInContentArea(window)) updateCursorImage(window); } const char* _glfwPlatformGetScancodeName(int scancode) { return _glfw.win32.keynames[_glfw.win32.keycodes[scancode]]; } int _glfwPlatformGetKeyScancode(int key) { return _glfw.win32.scancodes[key]; } int _glfwPlatformCreateCursor(_GLFWcursor* cursor, const GLFWimage* image, int xhot, int yhot) { cursor->win32.handle = (HCURSOR) createIcon(image, xhot, yhot, GLFW_FALSE); if (!cursor->win32.handle) return GLFW_FALSE; return GLFW_TRUE; } int _glfwPlatformCreateStandardCursor(_GLFWcursor* cursor, int shape) { int id = 0; if (shape == GLFW_ARROW_CURSOR) id = OCR_NORMAL; else if (shape == GLFW_IBEAM_CURSOR) id = OCR_IBEAM; else if (shape == GLFW_CROSSHAIR_CURSOR) id = OCR_CROSS; else if (shape == GLFW_HAND_CURSOR) id = OCR_HAND; else if (shape == GLFW_HRESIZE_CURSOR) id = OCR_SIZEWE; else if (shape == GLFW_VRESIZE_CURSOR) id = OCR_SIZENS; else return GLFW_FALSE; cursor->win32.handle = LoadImageW(NULL, MAKEINTRESOURCEW(id), IMAGE_CURSOR, 0, 0, LR_DEFAULTSIZE | LR_SHARED); if (!cursor->win32.handle) { _glfwInputErrorWin32(GLFW_PLATFORM_ERROR, "Win32: Failed to create standard cursor"); return GLFW_FALSE; } return GLFW_TRUE; } void _glfwPlatformDestroyCursor(_GLFWcursor* cursor) { if (cursor->win32.handle) DestroyIcon((HICON) cursor->win32.handle); } void _glfwPlatformSetCursor(_GLFWwindow* window, _GLFWcursor* cursor) { if (cursorInContentArea(window)) updateCursorImage(window); } void _glfwPlatformSetClipboardString(const char* string) { int characterCount; HANDLE object; WCHAR* buffer; characterCount = MultiByteToWideChar(CP_UTF8, 0, string, -1, NULL, 0); if (!characterCount) return; object = GlobalAlloc(GMEM_MOVEABLE, characterCount * sizeof(WCHAR)); if (!object) { _glfwInputErrorWin32(GLFW_PLATFORM_ERROR, "Win32: Failed to allocate global handle for clipboard"); return; } buffer = GlobalLock(object); if (!buffer) { _glfwInputErrorWin32(GLFW_PLATFORM_ERROR, "Win32: Failed to lock global handle"); GlobalFree(object); return; } MultiByteToWideChar(CP_UTF8, 0, string, -1, buffer, characterCount); GlobalUnlock(object); if (!OpenClipboard(_glfw.win32.helperWindowHandle)) { _glfwInputErrorWin32(GLFW_PLATFORM_ERROR, "Win32: Failed to open clipboard"); GlobalFree(object); return; } EmptyClipboard(); SetClipboardData(CF_UNICODETEXT, object); CloseClipboard(); } const char* _glfwPlatformGetClipboardString(void) { HANDLE object; WCHAR* buffer; if (!OpenClipboard(_glfw.win32.helperWindowHandle)) { _glfwInputErrorWin32(GLFW_PLATFORM_ERROR, "Win32: Failed to open clipboard"); return NULL; } object = GetClipboardData(CF_UNICODETEXT); if (!object) { _glfwInputErrorWin32(GLFW_FORMAT_UNAVAILABLE, "Win32: Failed to convert clipboard to string"); CloseClipboard(); return NULL; } buffer = GlobalLock(object); if (!buffer) { _glfwInputErrorWin32(GLFW_PLATFORM_ERROR, "Win32: Failed to lock global handle"); CloseClipboard(); return NULL; } free(_glfw.win32.clipboardString); _glfw.win32.clipboardString = _glfwCreateUTF8FromWideStringWin32(buffer); GlobalUnlock(object); CloseClipboard(); return _glfw.win32.clipboardString; } void _glfwPlatformGetRequiredInstanceExtensions(char** extensions) { if (!_glfw.vk.KHR_surface || !_glfw.vk.KHR_win32_surface) return; extensions[0] = "VK_KHR_surface"; extensions[1] = "VK_KHR_win32_surface"; } int _glfwPlatformGetPhysicalDevicePresentationSupport(VkInstance instance, VkPhysicalDevice device, uint32_t queuefamily) { PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = (PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR) vkGetInstanceProcAddr(instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR"); if (!vkGetPhysicalDeviceWin32PresentationSupportKHR) { _glfwInputError(GLFW_API_UNAVAILABLE, "Win32: Vulkan instance missing VK_KHR_win32_surface extension"); return GLFW_FALSE; } return vkGetPhysicalDeviceWin32PresentationSupportKHR(device, queuefamily); } VkResult _glfwPlatformCreateWindowSurface(VkInstance instance, _GLFWwindow* window, const VkAllocationCallbacks* allocator, VkSurfaceKHR* surface) { VkResult err; VkWin32SurfaceCreateInfoKHR sci; PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR; vkCreateWin32SurfaceKHR = (PFN_vkCreateWin32SurfaceKHR) vkGetInstanceProcAddr(instance, "vkCreateWin32SurfaceKHR"); if (!vkCreateWin32SurfaceKHR) { _glfwInputError(GLFW_API_UNAVAILABLE, "Win32: Vulkan instance missing VK_KHR_win32_surface extension"); return VK_ERROR_EXTENSION_NOT_PRESENT; } memset(&sci, 0, sizeof(sci)); sci.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR; sci.hinstance = GetModuleHandle(NULL); sci.hwnd = window->win32.handle; err = vkCreateWin32SurfaceKHR(instance, &sci, allocator, surface); if (err) { _glfwInputError(GLFW_PLATFORM_ERROR, "Win32: Failed to create Vulkan surface: %s", _glfwGetVulkanResultString(err)); } return err; } ////////////////////////////////////////////////////////////////////////// ////// GLFW native API ////// ////////////////////////////////////////////////////////////////////////// GLFWAPI HWND glfwGetWin32Window(GLFWwindow* handle) { _GLFWwindow* window = (_GLFWwindow*) handle; _GLFW_REQUIRE_INIT_OR_RETURN(NULL); return window->win32.handle; }
{ "content_hash": "59283c9c146d7365757a6dd937872413", "timestamp": "", "source": "github", "line_count": 2158, "max_line_length": 92, "avg_line_length": 30.669601482854496, "alnum_prop": 0.5470272720404925, "repo_name": "RichieSams/lantern", "id": "77338468f4c3c586fb48234ea396112b4378a851", "size": "67663", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "third_party/glfw/src/win32_window.c", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "569" }, { "name": "C", "bytes": "786" }, { "name": "C++", "bytes": "169826" }, { "name": "CMake", "bytes": "61433" }, { "name": "GLSL", "bytes": "1543" } ], "symlink_target": "" }
package com.adobe.acs.commons.util; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; public class CookieUtil { private static final Logger log = LoggerFactory.getLogger(CookieUtil.class); private CookieUtil() { } /** * Add the provided HTTP Cookie to the Response * * @param cookie Cookie to add * @param response Response to add Cookie to * @return true unless cookie or response is null */ public static boolean addCookie(final Cookie cookie, final HttpServletResponse response) { if (cookie == null || response == null) { return false; } response.addCookie(cookie); return true; } /** * Get the named cookie from the HTTP Request * * @param request Request to get the Cookie from * @param cookieName name of Cookie to get * @return the named Cookie, null if the named Cookie cannot be found */ public static Cookie getCookie(final HttpServletRequest request, final String cookieName) { if (StringUtils.isBlank(cookieName)) { return null; } final Cookie[] cookies = request.getCookies(); if (cookies == null) { return null; } if (cookies.length > 0) { for (final Cookie cookie : cookies) { if (StringUtils.equals(cookieName, cookie.getName())) { return cookie; } } } return null; } /** * Gets Cookies from the Request whose's names match the provides Regex * * @param request Request to get the Cookie from * @param regex Regex to match against Cookie names * @return Cookies which match the Regex */ public static List<Cookie> getCookies(final HttpServletRequest request, final String regex) { final ArrayList<Cookie> foundCookies = new ArrayList<Cookie>(); if (StringUtils.isBlank(regex)) { return foundCookies; } final Cookie[] cookies = request.getCookies(); if (cookies == null) { return null; } final Pattern p = Pattern.compile(regex); for (final Cookie cookie : cookies) { final Matcher m = p.matcher(cookie.getName()); if (m.matches()) { foundCookies.add(cookie); } } return foundCookies; } /** * <p> * Extend the cookie life. * <p></p> * This can be used when a cookie should be valid for X minutes from the last point of activity. * <p></p> * This method will leave expired or deleted cookies alone. * </p> * * @param request Request to get the Cookie from * @param response Response to write the extended Cookie to * @param cookieName Name of Cookie to extend the life of * @param expiry New Cookie expiry */ public static boolean extendCookieLife(final HttpServletRequest request, final HttpServletResponse response, final String cookieName, final String cookiePath, final int expiry) { final Cookie cookie = getCookie(request, cookieName); if (cookie == null) { return false; } if (cookie.getMaxAge() <= 0) { return false; } cookie.setMaxAge(expiry); cookie.setPath(cookiePath); addCookie(cookie, response); return true; } /** * Remove the named Cookies from Response * * @param request Request to get the Cookies to drop * @param response Response to expire the Cookies on * @param cookieNames Names of cookies to drop * @return Number of Cookies dropped */ public static int dropCookies(final HttpServletRequest request, final HttpServletResponse response, final String cookiePath, final String... cookieNames) { int count = 0; if (cookieNames == null) { return count; } final List<Cookie> cookies = new ArrayList<Cookie>(); for (final String cookieName : cookieNames) { cookies.add(getCookie(request, cookieName)); } return dropCookies(response, cookies.toArray(new Cookie[cookies.size()]), cookiePath); } /** * Remove the Cookies whose names match the provided Regex from Response * * @param request Request to get the Cookies to drop * @param response Response to expire the Cookies on * @param regexes Regex to find Cookies to drop * @return Number of Cookies dropped */ public static int dropCookiesByRegex(final HttpServletRequest request, final HttpServletResponse response, final String cookiePath, final String... regexes) { return dropCookiesByRegexArray(request, response, cookiePath, regexes); } /** * Remove the Cookies whose names match the provided Regex from Response * * @param request Request to get the Cookies to drop * @param response Response to expire the Cookies on * @param regexes Regex to find Cookies to drop * @return Number of Cookies dropped */ public static int dropCookiesByRegexArray(final HttpServletRequest request, final HttpServletResponse response, final String cookiePath, final String[] regexes) { int count = 0; if (regexes == null) { return count; } final List<Cookie> cookies = new ArrayList<Cookie>(); for (final String regex : regexes) { cookies.addAll(getCookies(request, regex)); } return dropCookies(response, cookies.toArray(new Cookie[cookies.size()]), cookiePath); } /** * Removes all cookies for the domain * * @param request Request to get the Cookies to drop * @param response Response to expire the Cookies on */ public static int dropAllCookies(final HttpServletRequest request, final HttpServletResponse response, final String cookiePath) { final Cookie[] cookies = request.getCookies(); if (cookies == null) { return 0; } return dropCookies(response, cookies, cookiePath); } /** * Internal method used for dropping cookies * * @param response * @param cookies * @param cookiePath * @return */ private static int dropCookies(final HttpServletResponse response, final Cookie[] cookies, final String cookiePath) { int count = 0; for (final Cookie cookie : cookies) { if (cookie == null) { continue; } cookie.setMaxAge(0); cookie.setPath(cookiePath); addCookie(cookie, response); count++; } return count; } }
{ "content_hash": "1e9ce098de351efc383d8bd604ed49ea", "timestamp": "", "source": "github", "line_count": 225, "max_line_length": 166, "avg_line_length": 31.613333333333333, "alnum_prop": 0.6183045128637705, "repo_name": "davidjgonzalez/acs-aem-commons", "id": "4a2a595e773594c13e1c38bffb8edaf49804bbf2", "size": "7757", "binary": false, "copies": "1", "ref": "refs/heads/feature/workflow-reauthentication", "path": "bundle/src/main/java/com/adobe/acs/commons/util/CookieUtil.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "41223" }, { "name": "Groovy", "bytes": "1457" }, { "name": "HTML", "bytes": "7595" }, { "name": "Java", "bytes": "2786110" }, { "name": "JavaScript", "bytes": "242813" }, { "name": "Shell", "bytes": "330" } ], "symlink_target": "" }
package local.org.apache.http.client.utils; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import org.apache.http.annotation.Immutable; import local.org.apache.http.client.utils.Idn; /** * Uses the java.net.IDN class through reflection. * * @since 4.0 */ @Immutable public class JdkIdn implements Idn { private final Method toUnicode; /** * * @throws ClassNotFoundException if java.net.IDN is not available */ public JdkIdn() throws ClassNotFoundException { Class<?> clazz = Class.forName("java.net.IDN"); try { toUnicode = clazz.getMethod("toUnicode", String.class); } catch (SecurityException e) { // doesn't happen throw new IllegalStateException(e.getMessage(), e); } catch (NoSuchMethodException e) { // doesn't happen throw new IllegalStateException(e.getMessage(), e); } } public String toUnicode(String punycode) { try { return (String) toUnicode.invoke(null, punycode); } catch (IllegalAccessException e) { throw new IllegalStateException(e.getMessage(), e); } catch (InvocationTargetException e) { Throwable t = e.getCause(); throw new RuntimeException(t.getMessage(), t); } } }
{ "content_hash": "6fc07e0530c54998280f670cba945311", "timestamp": "", "source": "github", "line_count": 48, "max_line_length": 70, "avg_line_length": 28.291666666666668, "alnum_prop": 0.6310751104565537, "repo_name": "Phoenix1708/t2-server-jar-android-0.1", "id": "526e47c613be8ec0abc56d1cddf7e545916c76a3", "size": "2590", "binary": false, "copies": "2", "ref": "refs/heads/hyde", "path": "t2-server-jar-android-0.1-hyde/src/main/java/local/org/apache/http/client/utils/JdkIdn.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Java", "bytes": "1039129" } ], "symlink_target": "" }
ALTER TABLE event ADD COLUMN noreminderssent integer; ALTER TABLE event ALTER COLUMN noreminderssent SET DEFAULT 0; update event set noreminderssent = 0;
{ "content_hash": "de1f32064830dc84e8ac76976d2ac6dd", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 61, "avg_line_length": 51, "alnum_prop": 0.8366013071895425, "repo_name": "mokoka/grassroot-platform", "id": "5658b61e58de4e1833607ca684aed960f1e7b9b1", "size": "153", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "grassroot-webapp/src/main/resources/db/scripts/script_15.sql", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ASP", "bytes": "15432" }, { "name": "CSS", "bytes": "169062" }, { "name": "HTML", "bytes": "481968" }, { "name": "Java", "bytes": "2714415" }, { "name": "JavaScript", "bytes": "127382" }, { "name": "PLSQL", "bytes": "4704" }, { "name": "PLpgSQL", "bytes": "4905" }, { "name": "SQLPL", "bytes": "3679" }, { "name": "Shell", "bytes": "1633" } ], "symlink_target": "" }
<?php class FileField extends EditControl { /** * Instanse of UploadHandler class * @var {object} */ var $upload_handler = null; /** * Field random identifier for sessions values * @var {string} */ var $formStamp = ""; function FileField($field, $pageObject, $id, $connection) { parent::EditControl($field, $pageObject, $id, $connection); $this->format = EDIT_FORMAT_FILE; } function addJSFiles() { if($this->pageObject->pageType == PAGE_ADD || $this->pageObject->pageType == PAGE_EDIT || $this->pageObject->pageType == PAGE_REGISTER){ $this->pageObject->AddJSFile("include/mupload.js"); $this->pageObject->AddJSFile("include/zoombox/zoombox.js"); } } function addCSSFiles() { if($this->pageObject->pageType == PAGE_ADD || $this->pageObject->pageType == PAGE_EDIT || $this->pageObject->pageType == PAGE_REGISTER){ $this->pageObject->AddCSSFile("include/zoombox/zoombox.css"); } } function buildControl($value, $mode, $fieldNum, $validate, $additionalCtrlParams, $data) { parent::buildControl($value, $mode, $fieldNum, $validate, $additionalCtrlParams, $data); if($this->pageObject->pageType == PAGE_SEARCH || $this->pageObject->pageType == PAGE_LIST) { echo '<input id="'.$this->cfield.'" '.$this->inputStyle.' type="text" ' .($mode == MODE_SEARCH ? 'autocomplete="off" ' : '') .(($mode==MODE_INLINE_EDIT || $mode==MODE_INLINE_ADD) && $this->is508==true ? 'alt="'.$this->strLabel.'" ' : '') .'name="'.$this->cfield.'" '.$this->pageObject->pSetEdit->getEditParams($this->field).' value="' .runner_htmlspecialchars($value).'">'; $this->buildControlEnd($validate); return; } if($mode == MODE_SEARCH) $this->format = ""; $this->formStamp = generatePassword(15); $this->initUploadHandler(); $this->upload_handler->formStamp = $this->formStamp; $filesArray = my_json_decode($value); if(!is_array($filesArray) || count($filesArray) == 0) { if(!$value) $jsonValue = "[]"; else { $uploadedFile = $this->upload_handler->get_file_object($value); if(is_null($uploadedFile)) $filesArray = array(); else $filesArray = array(my_json_decode(my_json_encode($uploadedFile))); } } if($this->pageObject->pageType == PAGE_EDIT) { if(count($this->pageObject->keys) > 0) { $i = 1; foreach($this->pageObject->keys as $keyName => $keyValue) { $this->upload_handler->tkeys .= "&key".$i."=".rawurlencode($keyValue); $i++; } } } $_SESSION["mupload_".$this->formStamp] = array(); $userFilesArray = array(); if(is_array($filesArray)) { foreach ($filesArray as $file) { $sessionArray = array(); $sessionArray["file"] = $file; $sessionArray["fromDB"] = true; $sessionArray["deleted"] = false; $_SESSION["mupload_".$this->formStamp][$file["usrName"]] = $sessionArray; $userFile = $this->upload_handler->buildUserFile($file); if(!$userFile["isImg"]){ $userFile["isImg"] = true; $userFile["isIco"] = true; $userFile["thumbnail_url"] = $userFile["url"]."&icon=1"; } $userFilesArray[] = $userFile; } } $jsonValue = my_json_encode($userFilesArray); $multiple = ""; if( !isIOS() && $this->pageObject->pSetEdit->getMaxNumberOfFiles($this->field) != 1 ) $multiple = "multiple "; echo ' <!-- The file upload form used as target for the file upload widget --> <form id="fileupload_'.$this->cfieldname.'" action="'.GetTableLink("mfhandler").'" method="POST" enctype="multipart/form-data"> <input type="hidden" name="formStamp_'.$this->cfieldname.'" id="formStamp_'.$this->cfieldname.'" value="'.$this->formStamp.'" /> <input type="hidden" name="_action" value="POST" /> <input type="hidden" id="value_'.$this->cfieldname.'" name="value_'.$this->cfieldname.'" value="'.runner_htmlspecialchars($jsonValue).'" /> <!-- The fileupload-buttonbar contains buttons to add/delete files and start/cancel the upload --> <div class="row fileupload-buttonbar"> <div class="span7"> <!-- The fileinput-button span is used to style the file input field as button --> <SPAN class="btn btn-success fileinput-button"> <A class="rnr-button filesUpload button" href="#" ><input class="fileinput-button-input" type="file" name="files[]" value="' ."Add files" .'" '. $multiple .' />' ."Add files" .'</A> </SPAN>' .($this->pageObject->pSetEdit->isAutoUpload($this->field) ? '' : ' <SPAN class="btn btn-primary start"> <A class="rnr-button" href="#" >' ."Upload" .'</A> </SPAN> <SPAN class="btn btn-warning cancel"> <A class="rnr-button" href="#" >' ."Cancel" .'</A> </SPAN>') .' </div> <!-- The global progress information --> <div class="fileupload-progress fade"> <!-- The global progress bar --> <div class="progress progress-success progress-striped active" role="progressbar" aria-valuemin="0" aria-valuemax="100"> <div class="bar" style="width:0;"></div> </div> <!-- The extended global progress information --> <div class="progress-extended">&nbsp;</div> </div> </div> <!-- The loading indicator is shown during file processing --> <div class="fileupload-loading"></div> <!-- The dummy for FireFox --> <input type="text" name="focusDummy" class="rnr-focusDummy" /> <br> <!-- The table listing the files available for upload/download --> <table><tbody class="files"></tbody></table> </form> '; if(!isset($this->container->globalVals["muploadTemplateIncluded"])) { echo '<script type="text/x-tmpl" id="template-download">{% for (var i=0, file; file=o.files[i]; i++) { %} <tr class="template-download fade"> {% if (file.error) { %} <td></td> <td class="name"><span>{%=file.name%}</span></td> <td class="size"><span dir="LTR">{%=o.formatFileSize(file.size)%}</span></td> <td class="error" colspan="2"><span class="label rnr-error">' ."" .' {%=locale.fileupload.errors[file.error] || file.error%}</span></td> {% } else { %} <td class="preview">{% if (file.thumbnail_url) { %} <a href="{%=file.url%}" title="{%=file.name%}" rel="gallery" download="{%=file.name%}" {% if (!file.isIco) { %} class="zoombox zgallery" {% } %} ><img src="{%=file.thumbnail_url%}&src=1"></a> {% } else { %} {% if (file.isImg) { %} <a href="{%=file.url%}&nodisp=1" title="{%=file.name%}" rel="gallery" download="{%=file.name%}" class="zoombox zgallery"><img src="{%=file.url%}&src=1"></a> {% } %} {% } %}</td> <td class="name"> <a href="{%=file.url%}" title="{%=file.name%}" rel="{%=file.thumbnail_url&&\'gallery\'%}" download="{%=file.name%}">{%=file.name%}</a> </td> <td class="size"><span dir="LTR">{%=o.formatFileSize(file.size)%}</span></td> <td colspan="2"></td> {% } %} <td class="delete"> {% if (!file.error) { %} <SPAN class="btn btn-danger delete" data-type="{%=file.delete_type%}" data-url="{%=file.delete_url%}" data-name="{%=file.name%}"> <A href="#" >' ."Delete" .'</A> </SPAN> {% } %} </td> </tr> {% } %} </script> <script type="text/x-tmpl" id="template-upload">{% for (var i=0, file; file=o.files[i]; i++) { %} <tr class="template-upload fade"> <td class="preview"><span class="fade"></span></td> <td class="name"><span>{%=file.name%}</span></td> <td class="size"><span>{%=o.formatFileSize(file.size)%}</span></td> {% if (file.error) { %} <td class="error" colspan="2"><span class="label rnr-error">' ."" .' {%=locale.fileupload.errors[file.error] || file.error%}</span></td> {% } else if (o.files.valid && !i) { %} <td> <div class="progress progress-success progress-striped active" role="progressbar" aria-valuemin="0" aria-valuemax="100" aria-valuenow="0"><div class="bar" style="width:0;"></div></div> </td> <td class="start">{% if (!o.options.autoUpload) { %} <SPAN class="btn btn-primary"> <A href="#" >' ."Upload" .'</A> </SPAN> {% } %}</td> {% } else { %} <td colspan="2"></td> {% } %} <td class="cancel">{% if (!i) { %} {% if (!file.error) { %} <SPAN class="btn btn-warning"> <A href="#" >' ."Cancel" .'</A> </SPAN> {% } %} {% } %}</td> </tr> {% } %}</script>'; $this->container->globalVals["muploadTemplateIncluded"] = true; } $this->buildControlEnd($validate); } /** * Create CSS code for specifying control's width */ function makeWidthStyle($widthPx) { if(0 == $widthPx) return ""; return "min-width: ".$widthPx."px"; } function initUploadHandler() { if(is_null($this->upload_handler)) { require_once getabspath("classes/uploadhandler.php"); $this->upload_handler = new UploadHandler(getOptionsForMultiUpload($this->pageObject->pSet, $this->field)); $this->upload_handler->pSet = $this->pageObject->pSetEdit; $this->upload_handler->field = $this->field; $this->upload_handler->table = $this->pageObject->tName; $this->upload_handler->pageType = $this->pageObject->pageType; } } function readWebValue(&$avalues, &$blobfields, $legacy1, $legacy2, &$filename_values) { $this->getPostValueAndType(); $this->formStamp = postvalue("formStamp_".$this->goodFieldName."_".$this->id); if (FieldSubmitted($this->goodFieldName."_".$this->id) && $this->formStamp != "") { $filesArray = my_json_decode($this->webValue); if(!is_array($filesArray) || count($filesArray) == 0) $this->webValue = ""; else { if(count($_SESSION["mupload_".$this->formStamp]) > 0) { foreach($_SESSION["mupload_".$this->formStamp] as $fileArray) $fileArray["deleted"] = true; } $result = array(); $uploadDir = $this->pageObject->pSetEdit->getLinkPrefix($this->field); $searchStr = ""; foreach ($filesArray as $file) { if(isset($_SESSION["mupload_".$this->formStamp][$file["name"]])) { $sessionFile = $_SESSION["mupload_".$this->formStamp][$file["name"]]["file"]; $searchStr .= $file["name"].",!"; $result[] = array("name" => $sessionFile["name"], "usrName" => $file["name"], "size" => $sessionFile["size"], "type" => $sessionFile["type"] ); if($this->pageObject->pSetEdit->getCreateThumbnail($this->field) && $sessionFile["thumbnail"] != "") { $lastIndex = count($result) - 1; $result[$lastIndex]["thumbnail"] = $sessionFile["thumbnail"]; $result[$lastIndex]["thumbnail_type"] = $sessionFile["thumbnail_type"]; $result[$lastIndex]["thumbnail_size"] = $sessionFile["thumbnail_size"]; } $_SESSION["mupload_".$this->formStamp][$file["name"]]["deleted"] = false; } } if(count($result) > 0) { $result[0]["searchStr"] = $searchStr.":sStrEnd"; $this->webValue = my_json_encode_unescaped_unicode($result); } else $this->webValue = ""; } } else $this->webValue = false; if(!($this->webValue===false)) { if( $this->connection->dbType == nDATABASE_Informix ) { if(IsTextType($this->pageObject->pSetEdit->getFieldType($this->field))) $blobfields[] = $this->field; } $avalues[$this->field] = $this->webValue; } } public function showDBValue($value, $keyLink) { $imageValue = ""; $this->initUploadHandler(); $this->upload_handler->tkeys = $keyLink; $filesArray = my_json_decode($value); if(!is_array($filesArray) || count($filesArray) == 0) { if($value == "") $filesArray = array(); else { $uploadedFile = $this->upload_handler->get_file_object($value); if(is_null($uploadedFile)) $filesArray = array(); else $filesArray = array($uploadedFile); } } foreach ($filesArray as $imageFile) { $userFile = $this->upload_handler->buildUserFile($imageFile); if($this->pageObject->pSetEdit->getViewFormat($this->field) == FORMAT_FILE) { $imageValue .= ($imageValue != "" ? "</br>" : ""); $imageValue .= '<a href="'.runner_htmlspecialchars($userFile["url"]).'">' .runner_htmlspecialchars($imageFile["usrName"] != "" ? $imageFile["usrName"] : $imageFile["name"]).'</a>'; } else if(CheckImageExtension($imageFile["name"])) { $imageValue .= ($imageValue != "" ? "</br>" : ""); if($this->pageObject->pSetEdit->showThumbnail($this->field)) { $thumbname = $userFile["thumbnail_url"]; $imageValue .= "<a target=_blank"; $imageValue .= " href=\"".runner_htmlspecialchars($userFile["url"])."\" class='zoombox'>"; $imageValue .= "<img"; if($thumbname == "" || $imageFile["name"] == $imageFile["thumbnail"]) { $imgWidth = $this->pageObject->pSetEdit->getImageWidth($this->field); $imageValue .=($imgWidth ? " width=".$imgWidth : ""); $imgHeight = $this->pageObject->pSetEdit->getImageHeight($this->field); $imageValue .=($imgHeight ? " height=".$imgHeight : ""); } $imageValue .= " border=0"; if($this->is508) $imageValue .= " alt=\"".runner_htmlspecialchars($userFile["name"])."\""; $imageValue .= " src=\"".runner_htmlspecialchars($userFile["thumbnail_url"])."\"></a>"; } else { $imageValue .= "<img"; $imgWidth = $this->pageObject->pSetEdit->getImageWidth($this->field); $imageValue.=($imgWidth ? " width=".$imgWidth : ""); $imgHeight = $this->pageObject->pSetEdit->getImageHeight($this->field); $imageValue .=($imgHeight ? " height=".$imgHeight : ""); $imageValue .= " border=0"; if($this->is508) $imageValue.= " alt=\"".runner_htmlspecialchars($userFile["name"])."\""; $imageValue .= " src=\"".runner_htmlspecialchars($userFile["url"])."\">"; } } } return $imageValue; } function SQLWhere($SearchFor, $strSearchOption, $SearchFor2, $etype, $isSuggest) { $baseResult = $this->baseSQLWhere($strSearchOption); if( $baseResult === false ) return ""; if( $baseResult != "" ) return $baseResult; if( IsCharType($this->type) ) { $gstrField = $this->getFieldSQLDecrypt(); if( !$this->pageObject->cipherer->isFieldPHPEncrypted($this->field) && $this->pageObject->pSetEdit->getNCSearch() ) { // search is case-insensitive $gstrField = $this->connection->upper( $gstrField ); } } elseif( $strSearchOption == "Contains" || $strSearchOption == "Starts with" ) { $gstrField = $this->connection->field2char($this->getFieldSQLDecrypt(), $this->type); } else { $gstrField = $this->getFieldSQLDecrypt(); } if( $strSearchOption == "Contains" || $strSearchOption == "Starts with" ) $SearchFor = $this->connection->escapeLIKEpattern( $SearchFor ); if( $strSearchOption == "Contains" ) $SearchFor = "%".$SearchFor."%"; else if( $strSearchOption == "Starts with" ) $SearchFor = $SearchFor."%"; if( $strSearchOption=="Contains" || $strSearchOption=="Starts with" || $strSearchOption == "Equals" ) return $this->buildWhere($gstrField, $SearchFor, $strSearchOption == "Equals"); return ""; } function buildWhere($gstrField, $value, $equals = false) { $likeVal = $this->connection->prepareString('%searchStr":"'.$value.':sStrEnd"%'); $notLikeVal = $this->connection->prepareString($value); if( IsCharType($this->type) && $this->pageObject->pSetEdit->getNCSearch() ) { // search is case-insensitive $likeVal = $this->connection->upper( $likeVal ); $notLikeVal = $this->connection->upper( $notLikeVal); } if( $this->connection->dbType == nDATABASE_Access ) $testSymbols = "'_{%}_'"; else $testSymbols = "'[{%'"; return "((".$gstrField." ".$this->like." ".$testSymbols." and ".$gstrField." ".$this->like." ".$likeVal.") or (". $gstrField." not ".$this->like." ".$testSymbols." and ".$gstrField." ".($equals ? "=" : $this->like)." ".$notLikeVal."))"; } /** * Form the control specified search options array and built the control's search options markup * @param String selOpt The search option value * @param Boolean not It indicates if the search option negation is set * @param Boolean both It indicates if the control needs 'NOT'-options * @return String A string containing options markup */ function getSearchOptions($selOpt, $not, $both) { $optionsArray = array(); $isPHPEncripted = $this->pageObject->cipherer->isFieldPHPEncrypted($this->field); if(!$isPHPEncripted){ $optionsArray[] = CONTAINS; $optionsArray[] = EQUALS; } $optionsArray[] = EMPTY_SEARCH; if($both) { if(!$isPHPEncripted){ $optionsArray[] = NOT_CONTAINS; $optionsArray[] = NOT_EQUALS; } $optionsArray[] = NOT_EMPTY; } return $this->buildSearchOptions($optionsArray, $selOpt, $not, $both); } /** * Fill the response array with the suggest values * * @param String value * Note: the real value is preceeded with "_" so It's necessary to remove * the first character before json decoding . Also It's important to add "_" * to the beggining of the response suggest value because Searchsuggest * expects that it starts with this character. * @param String searchFor * @param &Array response * @param &Array row */ function suggestValue($value, $searchFor, &$response, &$row) { if(!$value) return; //value is preceeded with "_" $value = substr($value, 1); $filesArray = my_json_decode($value); if(!is_array($filesArray) || count($filesArray) == 0) $response[ "_".$value ] = "_".$value; else { for($i = 0; $i < count($filesArray) && count($response) < 10; $i++) { if($this->pageObject->pSetEdit->getNCSearch()) $pos = stripos($filesArray[$i]["usrName"], $searchFor); else $pos = strpos($filesArray[$i]["usrName"], $searchFor); if($pos !== false) $response[ "_".$filesArray[$i]["usrName"] ] = "_".$filesArray[$i]["usrName"]; } } } function afterSuccessfulSave() { if(count($_SESSION["mupload_".$this->formStamp]) > 0) { foreach($_SESSION["mupload_".$this->formStamp] as $fileArray) { if($fileArray["deleted"] === true) { $file_path = $fileArray["file"]["name"]; if (is_file($file_path)) { unlink($file_path); } if ($fileArray["file"]["thumbnail"] != "") { $file_path = $fileArray["file"]["thumbnail"]; if (is_file($file_path)) { unlink($file_path); } } } } } unset($_SESSION["mupload_".$this->formStamp]); } /** * @param String fieldValue * @return String */ function getFieldValueCopy( $fieldValue ) { $this->initUploadHandler(); $uploadFolder = $this->pageObject->pSetEdit->getUploadFolder( $this->field ); $absoluteUploadDirPath = $this->pageObject->pSetEdit->getFinalUploadFolder( $this->field ); $filesData = my_json_decode( $fieldValue ); if( !is_array($filesData) || count($filesData) == 0 ) return $fieldValue; foreach( $filesData as $idx => $fileData ) { $info = $this->upload_handler->pathinfo_local( $fileData["usrName"] ); $newFieldName = $this->upload_handler->tempnam_sfx( $absoluteUploadDirPath, $info['filename'], $info['extension'] ); runner_copy_file( getabspath($fileData["name"]), $absoluteUploadDirPath.$newFieldName ); $filesData[ $idx ]["name"] = $uploadFolder.$newFieldName; if( $this->pageObject->pSetEdit->getCreateThumbnail( $this->field ) ) { $thumbnailPrefix = $this->pageObject->pSetEdit->getStrThumbnail( $this->field ); $newThumbName = $this->upload_handler->tempnam_sfx( $absoluteUploadDirPath, $thumbnailPrefix.$info['filename'], $info['extension'] ); runner_copy_file( getabspath($fileData["thumbnail"]), $absoluteUploadDirPath.$newThumbName ); $filesData[ $idx ]["thumbnail"] = $uploadFolder.$newThumbName; } } return my_json_encode( $filesData ); } } ?>
{ "content_hash": "dff15b24a6b5b899155ce9dcabc4f0e4", "timestamp": "", "source": "github", "line_count": 593, "max_line_length": 170, "avg_line_length": 34.63069139966273, "alnum_prop": 0.5822458122321776, "repo_name": "tony19760619/PHPRunnerProjects", "id": "bd6d133d2871bd57b5993be6973cc646e04b1772", "size": "20536", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Events1/output/classes/controls/FileField.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "22122868" }, { "name": "HTML", "bytes": "10141141" }, { "name": "JavaScript", "bytes": "1149595" }, { "name": "LilyPond", "bytes": "1161" }, { "name": "PHP", "bytes": "45213301" } ], "symlink_target": "" }
package net.moonlithome.game.server.item.dao; /** * Created by moonlithome on 2015/3/18. */ public class ItemOperationDao { }
{ "content_hash": "73eda95e68a7e8bf3722bebe87cdb85f", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 45, "avg_line_length": 14.555555555555555, "alnum_prop": 0.7175572519083969, "repo_name": "aqzwss/Illusory_Journey", "id": "9b3d55b53ca7f374b0162c1562b2fea8ef3eab63", "size": "131", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ij.item.api/src/main/java/net/moonlithome/game/server/item/dao/ItemOperationDao.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "37930" } ], "symlink_target": "" }
package dds //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. // // Code generated by Alibaba Cloud SDK Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests" "github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses" ) // DescribeAuditRecords invokes the dds.DescribeAuditRecords API synchronously // api document: https://help.aliyun.com/api/dds/describeauditrecords.html func (client *Client) DescribeAuditRecords(request *DescribeAuditRecordsRequest) (response *DescribeAuditRecordsResponse, err error) { response = CreateDescribeAuditRecordsResponse() err = client.DoAction(request, response) return } // DescribeAuditRecordsWithChan invokes the dds.DescribeAuditRecords API asynchronously // api document: https://help.aliyun.com/api/dds/describeauditrecords.html // asynchronous document: https://help.aliyun.com/document_detail/66220.html func (client *Client) DescribeAuditRecordsWithChan(request *DescribeAuditRecordsRequest) (<-chan *DescribeAuditRecordsResponse, <-chan error) { responseChan := make(chan *DescribeAuditRecordsResponse, 1) errChan := make(chan error, 1) err := client.AddAsyncTask(func() { defer close(responseChan) defer close(errChan) response, err := client.DescribeAuditRecords(request) if err != nil { errChan <- err } else { responseChan <- response } }) if err != nil { errChan <- err close(responseChan) close(errChan) } return responseChan, errChan } // DescribeAuditRecordsWithCallback invokes the dds.DescribeAuditRecords API asynchronously // api document: https://help.aliyun.com/api/dds/describeauditrecords.html // asynchronous document: https://help.aliyun.com/document_detail/66220.html func (client *Client) DescribeAuditRecordsWithCallback(request *DescribeAuditRecordsRequest, callback func(response *DescribeAuditRecordsResponse, err error)) <-chan int { result := make(chan int, 1) err := client.AddAsyncTask(func() { var response *DescribeAuditRecordsResponse var err error defer close(result) response, err = client.DescribeAuditRecords(request) callback(response, err) result <- 1 }) if err != nil { defer close(result) callback(nil, err) result <- 0 } return result } // DescribeAuditRecordsRequest is the request struct for api DescribeAuditRecords type DescribeAuditRecordsRequest struct { *requests.RpcRequest ResourceOwnerId requests.Integer `position:"Query" name:"ResourceOwnerId"` ResourceOwnerAccount string `position:"Query" name:"ResourceOwnerAccount"` OwnerAccount string `position:"Query" name:"OwnerAccount"` EndTime string `position:"Query" name:"EndTime"` StartTime string `position:"Query" name:"StartTime"` OwnerId requests.Integer `position:"Query" name:"OwnerId"` QueryKeywords string `position:"Query" name:"QueryKeywords"` PageNumber requests.Integer `position:"Query" name:"PageNumber"` Database string `position:"Query" name:"Database"` Form string `position:"Query" name:"Form"` SecurityToken string `position:"Query" name:"SecurityToken"` PageSize requests.Integer `position:"Query" name:"PageSize"` DBInstanceId string `position:"Query" name:"DBInstanceId"` NodeId string `position:"Query" name:"NodeId"` User string `position:"Query" name:"User"` } // DescribeAuditRecordsResponse is the response struct for api DescribeAuditRecords type DescribeAuditRecordsResponse struct { *responses.BaseResponse RequestId string `json:"RequestId" xml:"RequestId"` TotalRecordCount int `json:"TotalRecordCount" xml:"TotalRecordCount"` PageNumber int `json:"PageNumber" xml:"PageNumber"` PageRecordCount int `json:"PageRecordCount" xml:"PageRecordCount"` Items ItemsInDescribeAuditRecords `json:"Items" xml:"Items"` } // CreateDescribeAuditRecordsRequest creates a request to invoke DescribeAuditRecords API func CreateDescribeAuditRecordsRequest() (request *DescribeAuditRecordsRequest) { request = &DescribeAuditRecordsRequest{ RpcRequest: &requests.RpcRequest{}, } request.InitWithApiInfo("Dds", "2015-12-01", "DescribeAuditRecords", "dds", "openAPI") return } // CreateDescribeAuditRecordsResponse creates a response to parse from DescribeAuditRecords response func CreateDescribeAuditRecordsResponse() (response *DescribeAuditRecordsResponse) { response = &DescribeAuditRecordsResponse{ BaseResponse: &responses.BaseResponse{}, } return }
{ "content_hash": "56106a0db009205dc1c336b7af86a47b", "timestamp": "", "source": "github", "line_count": 121, "max_line_length": 171, "avg_line_length": 43.71900826446281, "alnum_prop": 0.7257088846880907, "repo_name": "xiaozhu36/terraform-provider", "id": "c7ca7c57010bfac769927e476590f015a36ac91b", "size": "5290", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vendor/github.com/aliyun/alibaba-cloud-sdk-go/services/dds/describe_audit_records.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "2195403" }, { "name": "HCL", "bytes": "818" }, { "name": "HTML", "bytes": "40750" }, { "name": "Makefile", "bytes": "1899" }, { "name": "Shell", "bytes": "1341" } ], "symlink_target": "" }
<!DOCTYPE HTML> <html> <head> <title>Example</title> <script type="text/javascript"> var firstVal = null; var secondVal; var equality = firstVal == secondVal; var identity = firstVal === secondVal; console.log("Equality: " + equality); console.log("Identity: " + identity); </script> </head> <body> This is a simple example </body> </html>
{ "content_hash": "94ff1ebd67652063d87d09ca5b69e4b7", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 46, "avg_line_length": 20.761904761904763, "alnum_prop": 0.5435779816513762, "repo_name": "longjl/JFinal_Authority", "id": "63d4628ea7c288199c35298c372bd5e28cad3bce", "size": "436", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "jfinal-authority/src/main/webapp/static/代码/Part I/Listing 04-37.html", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "37" }, { "name": "Batchfile", "bytes": "63" }, { "name": "CSS", "bytes": "812169" }, { "name": "HTML", "bytes": "3539978" }, { "name": "Java", "bytes": "554518" }, { "name": "JavaScript", "bytes": "2963146" }, { "name": "PHP", "bytes": "71177" }, { "name": "Python", "bytes": "13446" }, { "name": "Shell", "bytes": "1490" } ], "symlink_target": "" }
num_workers = ENV["NUM_WORKERS"].to_i tor_port = ENV["TOR_PORT"].to_i tor_control_port = ENV["TOR_CONTROL_PORT"].to_i current_dir = File.expand_path File.dirname(__FILE__) log_dir = "#{current_dir}/log" FileUtils.mkdir_p log_dir num_workers.times do |num| God.watch do |w| w.name = "tor-#{num}" w.group = 'tor' w.start = "tor --SocksPort #{tor_port+num} --ControlPort #{tor_control_port+num} --CookieAuthentication 0 --HashedControlPassword \"16:3E49D6163CCA95F2605B339E07F753C8F567DE4200E33FDF4CC6B84E44\" --NewCircuitPeriod 60 --DataDirectory #{current_dir}/data/#{tor_port+num} --Log \"notice syslog\"" w.log = "#{log_dir}/tor.log" w.keepalive end end
{ "content_hash": "8e8a2d7bddcbc809e3e26d7ddc1b9060", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 306, "avg_line_length": 42.05882352941177, "alnum_prop": 0.6573426573426573, "repo_name": "vdaubry/tor-privoxy", "id": "60e76561582e47d41b1dfd28508d43f53f4dabcd", "size": "715", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "god/tor/config.god.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "4644" } ], "symlink_target": "" }
require File.expand_path(File.dirname(__FILE__) + '/spec_helper') if ENV['CABINET'] describe "TokyoStore" do it "should store fragment cache" do HDB.should_receive(:new).and_return(@mock_hdb = mock("HDB")) @mock_hdb.should_receive(:open).with('data.tch', 6).and_return(true) store = ActiveSupport::Cache.lookup_store :tokyo_store, "data.tch" store.should be_kind_of ActiveSupport::Cache::TokyoStore end it "should fail" do tokyo = HDB.new tokyo.open('data.tch') HDB.should_not_receive(:new) store = ActiveSupport::Cache.lookup_store :tokyo_store, tokyo store.should be_kind_of ActiveSupport::Cache::TokyoStore end describe "Similar" do before(:all) do @cache = ActiveSupport::Cache::TokyoStore.new 'data.tcb' end it "test_should_read_and_write_strings" do @cache.write('foo', 'bar') @cache.read('foo').should eql('bar') end it "test_should_read_and_write_hash" do @cache.write('foo', {:a => "b"}) @cache.read('foo').should eql({:a => "b"}) end it "test_should_read_and_write_hash" do @cache.write('foo', {:a => "b", :c => "d"}) @cache.read('foo').should eql({:a => "b", :c => "d"}) end end end end
{ "content_hash": "a86d927760a13b384c17eff66f41c8e4", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 74, "avg_line_length": 30.069767441860463, "alnum_prop": 0.5916473317865429, "repo_name": "jmettraux/tokyo_store", "id": "613148abc8754fa29a0aad25c1705beafe824d8d", "size": "1293", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "spec/tokyo_store_spec.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "34031" } ], "symlink_target": "" }
from time import time from threading import Lock as ThreadLock from tori.graph import DependencyNode as BaseDependencyNode, DependencyManager from tori.db.common import Serializer, PseudoObjectId, ProxyObject from tori.db.entity import BasicAssociation from tori.db.exception import UOWRepeatedRegistrationError, UOWUpdateError, UOWUnknownRecordError, IntegrityConstraintError from tori.db.mapper import CascadingType from tori.db.metadata.helper import EntityMetadataHelper class Record(object): serializer = Serializer(0) STATUS_CLEAN = 1 STATUS_DELETED = 2 STATUS_DIRTY = 3 STATUS_NEW = 4 STATUS_IGNORED = 5 STATUS_LABEL_MAP = { 1: 'clean', 2: 'deleted', 3: 'dirty', 4: 'new', 5: 'ignored' } def __init__(self, entity, status): self.entity = entity self.status = status self.updated = time() self.original_data_set = Record.serializer.encode(self.entity) self.original_extra_association = Record.serializer.extra_associations(self.entity) def mark_as(self, status): self.status = status self.updated = time() def update(self): self.original_data_set = Record.serializer.encode(self.entity) self.original_extra_association = Record.serializer.extra_associations(self.entity) self.mark_as(Record.STATUS_CLEAN) class DependencyNode(BaseDependencyNode): """ Dependency Node This is designed to be bi-directional to maximize flexibility on traversing the graph. """ def __init__(self, record): super(DependencyNode, self).__init__() self.record = record @property def object_id(self): return self.record.entity.id @property def status(self): return self.record.status def _disavow_connection(self, node): return node.status == Record.STATUS_DELETED def __eq__(self, other): return self.record.entity.__class__ == other.record.entity.__class__ and self.object_id == other.object_id def __ne__(self, other): return self.record.entity.__class__ != other.record.entity.__class__ or self.object_id != other.object_id def __hash__(self): return self.created_at class UnitOfWork(object): """ Unit of Work This Unit of Work (UOW) is designed specifically for non-relational databases. .. note:: It is the design decision to make sub-commit methods available so that when it is used with Imagination Framework, the other Imagination entity may intercept before or after actually committing data. In the other word, Imagination Framework acts as an event controller for any actions (public methods) of this class. """ serializer = Serializer(0) def __init__(self, entity_manager): # given property self._em = entity_manager # caching properties self._record_map = {} # Object Hash => Record self._object_id_map = {} # str(ObjectID) => Object Hash self._dependency_map = None # Locks self._blocker_activated = False self._blocking_lock = ThreadLock() self._operational_lock = ThreadLock() def _freeze(self): if not self._blocker_activated: return self._operational_lock.acquire() def _unfreeze(self): if not self._blocker_activated: return self._operational_lock.release() def refresh(self, entity): """ Refresh the entity .. note:: This method :param entity: the target entity :type entity: object """ self._freeze() record = self.retrieve_record(entity) if record.status == Record.STATUS_DELETED: return # Ignore the entity marked as deleted. elif record.status not in [Record.STATUS_CLEAN, Record.STATUS_DIRTY]: raise NonRefreshableEntity('The current record is not refreshable.') collection = self._em.collection(entity.__class__) updated_data_set = collection.driver.find_one(collection.name, {'_id': entity.id}) # Reset the attributes. for attribute_name in updated_data_set: entity.__setattr__(attribute_name, updated_data_set[attribute_name]) # Remove the non-existed attributes. for attribute_name in record.original_data_set: if attribute_name in updated_data_set: continue entity.__delattr__(attribute_name) # Update the original data set and reset the status if necessary. record.original_data_set = Record.serializer.encode(entity) record.extra_association = Record.serializer.extra_associations(entity) if record.status == Record.STATUS_DIRTY: record.mark_as(Record.STATUS_CLEAN) # Remap any one-to-many or many-to-many relationships. self._em.apply_relational_map(entity) self._cascade_operation(entity, CascadingType.REFRESH) self._unfreeze() def register_new(self, entity): """ Register a new entity :param entity: the entity to register :type entity: object """ self._freeze() self._register_new(entity) self._unfreeze() def _register_new(self, entity): """ Register a entity as new (protected) .. warning:: This method bypasses the thread lock imposed in the public method. It is for internal use only. :param entity: the target entity :type entity: object """ uid = self._retrieve_entity_guid(entity) if self.has_record(entity): raise UOWRepeatedRegistrationError('Could not mark the entity as new.') if not entity.id: entity.id = self._generate_pseudo_object_id() self._record_map[uid] = Record(entity, Record.STATUS_NEW) # Map the pseudo object ID to the entity. self._object_id_map[self._convert_object_id_to_str(entity.id, entity)] = uid self._cascade_operation(entity, CascadingType.PERSIST) def register_dirty(self, entity): """ Register the entity with the dirty bit :param entity: the entity to register :type entity: object """ self._freeze() record = self.retrieve_record(entity) if record.status == Record.STATUS_NEW: try: return self.register_new(entity) except UOWRepeatedRegistrationError as exception: pass elif record.status in [Record.STATUS_CLEAN, Record.STATUS_DELETED]: record.mark_as(Record.STATUS_DIRTY) self._cascade_operation(entity, CascadingType.PERSIST) self._unfreeze() def register_clean(self, entity): """ Register the entity with the clean bit :param entity: the entity to register :type entity: object """ uid = self._retrieve_entity_guid(entity) if uid in self._record_map: raise UOWRepeatedRegistrationError('Could not mark the entity as clean') self._record_map[uid] = Record(entity, Record.STATUS_CLEAN) # Map the real object ID to the entity self._object_id_map[self._convert_object_id_to_str(entity.id, entity)] = uid def register_deleted(self, entity): """ Register the entity with the removal bit :param entity: the entity to register :type entity: object """ self._freeze() self._register_deleted(entity) self._unfreeze() def _register_deleted(self, entity): """ Register a entity as deleted (no lock) .. warning:: This method bypasses the thread lock imposed in the public method. It is for internal use only. :param entity: the target entity :type entity: object """ record = self.retrieve_record(entity) if record.status == Record.STATUS_NEW or isinstance(entity.id, PseudoObjectId): record.mark_as(Record.STATUS_IGNORED) else: record.mark_as(Record.STATUS_DELETED) self._cascade_operation(entity, CascadingType.DELETE) def _cascade_operation(self, reference, cascading_type): entity = reference if isinstance(reference, ProxyObject): entity = reference._actual if not EntityMetadataHelper.hasMetadata(entity): return entity_meta = EntityMetadataHelper.extract(entity) relational_map = entity_meta.relational_map for property_name in relational_map: guide = relational_map[property_name] if guide.inverted_by: continue actual_data = entity.__getattribute__(property_name) reference = self.hydrate_entity(actual_data) if not guide.cascading_options\ or cascading_type not in guide.cascading_options\ or not reference: continue if isinstance(reference, list): for sub_reference in actual_data: self._forward_operation( self.hydrate_entity(sub_reference), cascading_type, guide.target_class ) continue self._forward_operation( reference, cascading_type, guide.target_class ) def _forward_operation(self, reference, cascading_type, expected_class): if cascading_type == CascadingType.PERSIST: if type(reference) is not expected_class: reference_type = type(reference) raise IntegrityConstraintError( 'Expected an instance of class {} ({}) but received one of {} ({})'.format( expected_class.__name__, expected_class.__module__, reference_type.__name__, reference_type.__module__ ) ) if self.is_new(reference): try: self.register_new(reference) except UOWRepeatedRegistrationError as exception: pass else: self.register_dirty(reference) elif cascading_type == CascadingType.DELETE: self.register_deleted(reference) elif cascading_type == CascadingType.REFRESH: self.refresh(reference) def is_new(self, reference): return not reference.id or isinstance(reference.id, PseudoObjectId) def hydrate_entity(self, reference): return reference._actual if isinstance(reference, ProxyObject) else reference def commit(self): self._blocking_lock.acquire() self._blocker_activated = True self._freeze() # Make changes on the normal entities. self._commit_changes() # Then, make changes on external associations. self._add_or_remove_associations() self._commit_changes(BasicAssociation) # Synchronize all records self._synchronize_records() self._unfreeze() self._blocker_activated = False self._blocking_lock.release() def _commit_changes(self, expected_class=None): # Load the sub graph of supervised collections. for c in self._em.repositories(): if not c.has_cascading(): continue c.filter(force_loading=True) commit_order = self._compute_order() # Commit changes to nodes. for commit_node in commit_order: uid = self._retrieve_entity_guid_by_id(commit_node.object_id, commit_node.record.entity.__class__) record = self._record_map[uid] if expected_class and not isinstance(record.entity, expected_class): continue collection = self._em.collection(record.entity.__class__) change_set = self._compute_change_set(record) if record.status == Record.STATUS_NEW: self._synchronize_new( collection, record.entity, change_set ) elif record.status == Record.STATUS_DIRTY and change_set: self._synchronize_update( collection, record.entity.id, record.original_data_set, change_set ) elif record.status == Record.STATUS_DIRTY and not change_set: record.mark_as(Record.STATUS_CLEAN) elif record.status == Record.STATUS_DELETED and commit_node.score == 0: self._synchronize_delete(collection, record.entity.id) elif record.status == Record.STATUS_DELETED and commit_node.score > 0: record.mark_as(Record.STATUS_CLEAN) def _synchronize_new(self, repository, entity, change_set): """ Synchronize the new / unsupervised data :param repository: the target repository :param entity: the entity :param change_set: the change_set representing the entity """ pseudo_key = self._convert_object_id_to_str(entity.id, entity) object_id = repository.driver.insert(repository.name, change_set) entity.id = object_id # update the entity ID actual_key = self._convert_object_id_to_str(object_id, entity) self._object_id_map[actual_key] = self._object_id_map[pseudo_key] def _synchronize_update(self, repository, object_id, old_data_set, new_data_set): """ Synchronize the updated data :param repository: the target repository :param object_id: the object ID :param old_data_set: the original data (for event interception) :param new_data_set: the updated data """ repository.driver.update( repository.name, {'_id': object_id}, new_data_set ) def _synchronize_delete(self, repository, object_id): """ Synchronize the deleted data :param repository: the target repository :param object_id: the object ID """ repository.driver.remove(repository.name, {'_id': object_id}) def _synchronize_records(self): writing_statuses = [Record.STATUS_NEW, Record.STATUS_DIRTY] removed_statuses = [Record.STATUS_DELETED, Record.STATUS_IGNORED] uid_list = list(self._record_map.keys()) for uid in uid_list: record = self._record_map[uid] if record.status in removed_statuses: del self._record_map[uid] elif record.status in writing_statuses: record.update() def retrieve_record(self, entity): uid = self._retrieve_entity_guid(self._em._force_load(entity)) if uid not in self._record_map: raise UOWUnknownRecordError('Unable to retrieve the record for this entity.') return self._record_map[uid] def delete_record(self, entity): uid = self._retrieve_entity_guid(entity) if uid not in self._record_map: raise UOWUnknownRecordError('Unable to retrieve the record for this entity.') del self._record_map[uid] def has_record(self, entity): return self._retrieve_entity_guid(entity) in self._record_map def find_recorded_entity(self, object_id, cls): object_key = self._convert_object_id_to_str(object_id, cls=cls) if object_key in self._object_id_map: try: return self._record_map[self._object_id_map[object_key]] except KeyError as exception: # This exception is raised possibly due to that the record is deleted. del self._object_id_map[object_key] return None def _compute_order(self): self._construct_dependency_graph() # After constructing the dependency graph (as a supposedly directed acyclic # graph), do the topological sorting from the dependency graph. return DependencyManager.get_order(self._dependency_map) def _compute_change_set(self, record): current_set = Record.serializer.encode(record.entity) if record.status == Record.STATUS_NEW: return current_set elif record.status == Record.STATUS_DELETED: return record.entity.id original_set = dict(record.original_data_set) change_set = { '$set': {}, '$unset': {} } original_property_set = set(original_set.keys()) current_property_set = set(current_set.keys()) expected_property_list = original_property_set.intersection(current_property_set) expected_property_list = expected_property_list.union(current_property_set.difference(original_property_set)) unexpected_property_list = original_property_set.difference(current_property_set) # Add or update properties for name in expected_property_list: if name in original_set and original_set[name] == current_set[name]: continue change_set['$set'][name] = current_set[name] # Remove unwanted properties for name in unexpected_property_list: change_set['$unset'][name] = 1 directive_list = list(change_set.keys()) # Clean up the change set for directive in directive_list: if change_set[directive]: continue del change_set[directive] return change_set def _compute_connection_changes(self, record): """ Compute changes in external associations originated from the entity of the current record This method is designed specifically to deal with many-to-many association by adding or removing associative entities which their origin is from the entity from the current record. :param record: the UOW record :type record: tori.db.uow.Record """ current = Record.serializer.extra_associations(record.entity) original = dict(record.original_extra_association) change_set = {} original_property_set = set(original.keys()) current_property_set = set(current.keys()) expected_property_list = original_property_set.intersection(current_property_set) expected_property_list = expected_property_list.union(current_property_set.difference(original_property_set)) unexpected_property_list = expected_property_list if record.status == Record.STATUS_DELETED else original_property_set.difference(current_property_set) # Find new associations for name in expected_property_list: current_set = set(current[name]) original_set = set(original[name]) diff_additions = current_set diff_deletions = [] if record.status != Record.STATUS_NEW: diff_additions = current_set.difference(original_set) diff_deletions = original_set.difference(current_set) change_set[name] = { 'action': 'update', 'new': diff_additions, 'deleted': diff_deletions } # Find new associations for name in unexpected_property_list: change_set[name] = { 'action': 'purge' } return change_set def _load_extra_associations(self, record, change_set): origin_id = record.entity.id relational_map = EntityMetadataHelper.extract(record.entity).relational_map for property_name in relational_map: if property_name not in change_set: continue property_change_set = change_set[property_name] guide = relational_map[property_name] repository = self._em.collection(guide.association_class.cls) if property_change_set['action'] == 'update': for unlinked_destination_id in property_change_set['deleted']: association = repository.filter_one({'origin': origin_id, 'destination': unlinked_destination_id}) if not association: continue self._register_deleted(association) for new_destination_id in property_change_set['new']: association = repository.new(origin=origin_id, destination=new_destination_id) self._register_new(association) return elif property_change_set['action'] == 'purge': for association in repository.filter({'origin': origin_id}): self._register_deleted(association) return raise RuntimeError('Unknown changes on external associations for {}'.format(origin_id)) def _add_or_remove_associations(self): # Find out if UOW needs to deal with extra records (associative collection). uid_list = list(self._record_map.keys()) for uid in uid_list: record = self._record_map[uid] if record.status == Record.STATUS_CLEAN: continue change_set = self._compute_connection_changes(record) if not change_set: continue self._load_extra_associations(record, change_set) def _retrieve_entity_guid(self, entity): return self._retrieve_entity_guid_by_id(entity.id, entity.__class__)\ if isinstance(entity, ProxyObject)\ else hash(entity) def _retrieve_entity_guid_by_id(self, id, cls): return self._object_id_map[self._convert_object_id_to_str(id, cls=cls)] def _generate_pseudo_object_id(self): return PseudoObjectId() def _convert_object_id_to_str(self, object_id, entity=None, cls=None): class_hash = 'generic' if not cls and entity: cls = entity.__class__ if cls: metadata = EntityMetadataHelper.extract(cls) class_hash = metadata.collection_name object_key = '{}/{}'.format(class_hash, str(object_id)) return object_key def _construct_dependency_graph(self): self._dependency_map = {} for uid in self._record_map: record = self._record_map[uid] object_id = self._convert_object_id_to_str(record.entity.id, record.entity) current_set = Record.serializer.encode(record.entity) extra_association = Record.serializer.extra_associations(record.entity) # Register the current entity into the dependency map if it's never # been registered or eventually has no dependencies. if object_id not in self._dependency_map: self._dependency_map[object_id] = DependencyNode(record) relational_map = EntityMetadataHelper.extract(record.entity).relational_map if not relational_map: continue # Go through the relational map to establish relationship between dependency nodes. for property_name in relational_map: guide = relational_map[property_name] # Ignore a property from reverse mapping. if guide.inverted_by: continue # ``data`` can be either an object ID or list. data = current_set[property_name] if not data: # Ignore anything evaluated as False. continue elif not isinstance(data, list): other_uid = self._retrieve_entity_guid_by_id(data, guide.target_class) other_record = self._record_map[other_uid] self._register_dependency(record, other_record) continue for dependency_object_id in data: other_uid = self._retrieve_entity_guid_by_id(dependency_object_id, guide.target_class) other_record = self._record_map[other_uid] self._register_dependency(record, other_record) return self._dependency_map def _retrieve_dependency_order(self, node, priority_order): if node.walked: return node.walked = True initial_order = list(node.adjacent_nodes) for adjacent_node in initial_order: self._retrieve_dependency_order(adjacent_node, priority_order) if node not in priority_order: priority_order.append(node) def _register_dependency(self, a, b): key_a = self._convert_object_id_to_str(a.entity.id, a.entity) key_b = self._convert_object_id_to_str(b.entity.id, b.entity) if key_a not in self._dependency_map: self._dependency_map[key_a] = DependencyNode(a) if key_b not in self._dependency_map: self._dependency_map[key_b] = DependencyNode(b) self._dependency_map[key_a].connect(self._dependency_map[key_b])
{ "content_hash": "e28c09e809cf138e96f344922fcca061", "timestamp": "", "source": "github", "line_count": 742, "max_line_length": 159, "avg_line_length": 34.29245283018868, "alnum_prop": 0.5995283945765376, "repo_name": "shiroyuki/Tori", "id": "b2234200471ec71e6c0e91c6a21c9f14f245e0f1", "size": "25469", "binary": false, "copies": "1", "ref": "refs/heads/v3", "path": "tori/db/uow.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "92628" }, { "name": "HTML", "bytes": "571" }, { "name": "Makefile", "bytes": "994" }, { "name": "Python", "bytes": "261193" }, { "name": "Shell", "bytes": "72" } ], "symlink_target": "" }
/** * Omnipotent Sparkline binder for KnockoutJS. * * @author: Mr-Yellow <[email protected]> */ ko.bindingHandlers.sparkLine = { data: [], defaults: { type: 'line', height: 'auto', width: 'auto', fillColor: '#72e572', lineColor: '#000000', spotColor: '#f08000', minSpotColor: undefined, maxSpotColor: undefined, highlightSpotColor: undefined, highlightLineColor: undefined, chartRangeMin: -20, chartRangeMax: 100, normalRangeMin: 5, normalRangeMax: 60, normalRangeColor: '#e55e5b', drawNormalOnTop: true, tooltipFormatter: function (sparkline, options, fields) { console.log([fields.x,fields.y]); return ko.bindingHandlers.sparkLine.epochToLocal(fields.x) + ': ' + fields.y; }, dateFormat: 'LLLL' }, epochToLocal: function(epoch) { if (epoch) { // JS Epoch to localised format return moment(epoch).format(ko.bindingHandlers.sparkLine.defaults.dateFormat); } }, /** * ko binding init */ init: function(element, valueAccessor, allBindingsAccessor, deprecated, bindingContext) { console.log('--INIT:'+element.id); var observable = valueAccessor() || { }; var unwrapped = ko.unwrap(observable); var responsiveTimer = 0; ko.bindingHandlers.sparkLine.data = unwrapped; if (allBindingsAccessor().hasOwnProperty('sparkType')) ko.bindingHandlers.sparkLine.defaults.type = allBindingsAccessor().sparkType; if (allBindingsAccessor().hasOwnProperty('sparkHeight')) ko.bindingHandlers.sparkLine.defaults.height = allBindingsAccessor().sparkHeight; if (allBindingsAccessor().hasOwnProperty('sparkWidth')) ko.bindingHandlers.sparkLine.defaults.width = allBindingsAccessor().sparkWidth; if (allBindingsAccessor().hasOwnProperty('sparkChartRangeMin')) ko.bindingHandlers.sparkLine.defaults.chartRangeMin = allBindingsAccessor().sparkChartRangeMin; if (allBindingsAccessor().hasOwnProperty('sparkChartRangeMax')) ko.bindingHandlers.sparkLine.defaults.chartRangeMax = allBindingsAccessor().sparkChartRangeMax; if (allBindingsAccessor().hasOwnProperty('sparkNormalRangeMin')) ko.bindingHandlers.sparkLine.defaults.normalRangeMin = allBindingsAccessor().sparkNormalRangeMin; if (allBindingsAccessor().hasOwnProperty('sparkNormalRangeMax')) ko.bindingHandlers.sparkLine.defaults.normalRangeMax = allBindingsAccessor().sparkNormalRangeMax; if (allBindingsAccessor().hasOwnProperty('sparkFillColor')) ko.bindingHandlers.sparkLine.defaults.fillColor = allBindingsAccessor().sparkFillColor; if (allBindingsAccessor().hasOwnProperty('sparkLineColor')) ko.bindingHandlers.sparkLine.defaults.lineColor = allBindingsAccessor().sparkLineColor; if (allBindingsAccessor().hasOwnProperty('sparkSpotColor')) ko.bindingHandlers.sparkLine.defaults.spotColor = allBindingsAccessor().sparkSpotColor; if (allBindingsAccessor().hasOwnProperty('sparkNormalRangeColor')) ko.bindingHandlers.sparkLine.defaults.normalRangeColor = allBindingsAccessor().sparkNormalRangeColor; if (allBindingsAccessor().hasOwnProperty('sparkChartRangeMinX')) ko.bindingHandlers.sparkLine.defaults.chartRangeMinX = allBindingsAccessor().sparkChartRangeMinX; if (allBindingsAccessor().hasOwnProperty('sparkChartRangeMaxX')) ko.bindingHandlers.sparkLine.defaults.chartRangeMaxX = allBindingsAccessor().sparkChartRangeMaxX; if (allBindingsAccessor().hasOwnProperty('sparkTimeWindowMin')) ko.bindingHandlers.sparkLine.defaults.timeWindowMin = allBindingsAccessor().sparkTimeWindowMin; if (allBindingsAccessor().hasOwnProperty('sparkTimeWindowMax')) ko.bindingHandlers.sparkLine.defaults.timeWindowMax = allBindingsAccessor().sparkTimeWindowMax; if (allBindingsAccessor().hasOwnProperty('sparkDateformat')) ko.bindingHandlers.sparkLine.defaults.dateFormat = allBindingsAccessor().sparkDateformat; if (allBindingsAccessor().hasOwnProperty('sparkTooltipFormatter')) ko.bindingHandlers.sparkLine.defaults.tooltipFormatter = allBindingsAccessor().sparkTooltipFormatter; /** * Recalculate window. */ if (ko.bindingHandlers.sparkLine.defaults.timeWindowMin && ko.bindingHandlers.sparkLine.defaults.timeWindowMax) { var now = new Date().getTime(); ko.bindingHandlers.sparkLine.defaults.chartRangeMinX = now - ko.bindingHandlers.sparkLine.defaults.timeWindowMin; ko.bindingHandlers.sparkLine.defaults.chartRangeMaxX = now - ko.bindingHandlers.sparkLine.defaults.timeWindowMax; ko.bindingHandlers.sparkLine.defaults.chartRangeClipX = true; } /** * Initalise sparkline. */ var sparkResize; var sparklineInit = function() { $(element).sparkline(ko.bindingHandlers.sparkLine.data, ko.bindingHandlers.sparkLine.defaults); } $(window).resize(function(e) { clearTimeout(sparkResize); sparkResize = setTimeout(sparklineInit, 500); }); sparklineInit(); //return { controlsDescendantBindings: true }; }, /** * ko binding update */ update: function(element, valueAccessor, allBindingsAccessor, deprecated, bindingContext) { console.log('--UPDATE:'+element.id); var observable = valueAccessor() || { }; var unwrapped = ko.unwrap(observable); ko.bindingHandlers.sparkLine.data = unwrapped; /** * Recalculate window. */ if (ko.bindingHandlers.sparkLine.defaults.timeWindowMin && ko.bindingHandlers.sparkLine.defaults.timeWindowMax) { var now = new Date().getTime(); ko.bindingHandlers.sparkLine.defaults.chartRangeMinX = now - ko.bindingHandlers.sparkLine.defaults.timeWindowMin; ko.bindingHandlers.sparkLine.defaults.chartRangeMaxX = now - ko.bindingHandlers.sparkLine.defaults.timeWindowMax; ko.bindingHandlers.sparkLine.defaults.chartRangeClipX = true; } /** * Reinitalise sparkline. */ $(element).sparkline(ko.bindingHandlers.sparkLine.data, ko.bindingHandlers.sparkLine.defaults); } };
{ "content_hash": "5c130d42ee321a123920776325b7e16a", "timestamp": "", "source": "github", "line_count": 132, "max_line_length": 176, "avg_line_length": 47.63636363636363, "alnum_prop": 0.7083333333333334, "repo_name": "mryellow/knockoutjs-sparkline", "id": "742f029ba7699ae98b7c1cc42c19484de243496c", "size": "6288", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/knockoutjs-sparkline.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "214" }, { "name": "JavaScript", "bytes": "13642" } ], "symlink_target": "" }
#include <iostream> #include <boost/property_tree/ptree.hpp> #include <gtest/gtest.h> #include <osquery/core.h> #include <osquery/distributed.h> #include <osquery/enroll.h> #include <osquery/sql.h> #include "osquery/core/json.h" #include "osquery/sql/sqlite_util.h" #include "osquery/tests/test_additional_util.h" #include "osquery/tests/test_util.h" namespace pt = boost::property_tree; DECLARE_string(distributed_tls_read_endpoint); DECLARE_string(distributed_tls_write_endpoint); namespace osquery { class DistributedTests : public testing::Test { protected: void SetUp() { TLSServerRunner::start(); TLSServerRunner::setClientConfig(); clearNodeKey(); distributed_tls_read_endpoint_ = Flag::getValue("distributed_tls_read_endpoint"); Flag::updateValue("distributed_tls_read_endpoint", "/distributed_read"); distributed_tls_write_endpoint_ = Flag::getValue("distributed_tls_write_endpoint"); Flag::updateValue("distributed_tls_write_endpoint", "/distributed_write"); Registry::setActive("distributed", "tls"); } void TearDown() { TLSServerRunner::stop(); TLSServerRunner::unsetClientConfig(); clearNodeKey(); Flag::updateValue("distributed_tls_read_endpoint", distributed_tls_read_endpoint_); Flag::updateValue("distributed_tls_write_endpoint", distributed_tls_write_endpoint_); } protected: std::string distributed_tls_read_endpoint_; std::string distributed_tls_write_endpoint_; }; TEST_F(DistributedTests, test_serialize_distributed_query_request) { DistributedQueryRequest r; r.query = "foo"; r.id = "bar"; pt::ptree tree; auto s = serializeDistributedQueryRequest(r, tree); EXPECT_TRUE(s.ok()); EXPECT_EQ(tree.get<std::string>("query"), "foo"); EXPECT_EQ(tree.get<std::string>("id"), "bar"); } TEST_F(DistributedTests, test_deserialize_distributed_query_request) { pt::ptree tree; tree.put<std::string>("query", "foo"); tree.put<std::string>("id", "bar"); DistributedQueryRequest r; auto s = deserializeDistributedQueryRequest(tree, r); EXPECT_TRUE(s.ok()); EXPECT_EQ(r.query, "foo"); EXPECT_EQ(r.id, "bar"); } TEST_F(DistributedTests, test_deserialize_distributed_query_request_json) { auto json = "{" " \"query\": \"foo\"," " \"id\": \"bar\"" "}"; DistributedQueryRequest r; auto s = deserializeDistributedQueryRequestJSON(json, r); EXPECT_TRUE(s.ok()); EXPECT_EQ(r.query, "foo"); EXPECT_EQ(r.id, "bar"); } TEST_F(DistributedTests, test_serialize_distributed_query_result) { DistributedQueryResult r; r.request.query = "foo"; r.request.id = "bar"; Row r1; r1["foo"] = "bar"; r.results = {r1}; pt::ptree tree; auto s = serializeDistributedQueryResult(r, tree); EXPECT_TRUE(s.ok()); EXPECT_EQ(tree.get<std::string>("request.query"), "foo"); EXPECT_EQ(tree.get<std::string>("request.id"), "bar"); auto& results = tree.get_child("results"); for (const auto& q : results) { for (const auto& row : q.second) { EXPECT_EQ(row.first, "foo"); EXPECT_EQ(q.second.get<std::string>(row.first), "bar"); } } } TEST_F(DistributedTests, test_deserialize_distributed_query_result) { pt::ptree request; request.put<std::string>("id", "foo"); request.put<std::string>("query", "bar"); pt::ptree row; row.put<std::string>("foo", "bar"); pt::ptree results; results.push_back(std::make_pair("", row)); pt::ptree query_result; query_result.put_child("request", request); query_result.put_child("results", results); DistributedQueryResult r; auto s = deserializeDistributedQueryResult(query_result, r); EXPECT_TRUE(s.ok()); EXPECT_EQ(r.request.id, "foo"); EXPECT_EQ(r.request.query, "bar"); EXPECT_EQ(r.results[0]["foo"], "bar"); } TEST_F(DistributedTests, test_deserialize_distributed_query_result_json) { auto json = "{" " \"request\": {" " \"id\": \"foo\"," " \"query\": \"bar\"" " }," " \"results\": [" " {" " \"foo\": \"bar\"" " }" " ]" "}"; DistributedQueryResult r; auto s = deserializeDistributedQueryResultJSON(json, r); EXPECT_TRUE(s.ok()); EXPECT_EQ(r.request.id, "foo"); EXPECT_EQ(r.request.query, "bar"); EXPECT_EQ(r.results[0]["foo"], "bar"); } TEST_F(DistributedTests, test_workflow) { auto dist = Distributed(); auto s = dist.pullUpdates(); EXPECT_TRUE(s.ok()); EXPECT_EQ(s.toString(), "OK"); EXPECT_EQ(dist.getPendingQueryCount(), 2U); EXPECT_EQ(dist.results_.size(), 0U); s = dist.runQueries(); EXPECT_TRUE(s.ok()); EXPECT_EQ(s.toString(), "OK"); EXPECT_EQ(dist.getPendingQueryCount(), 0U); EXPECT_EQ(dist.results_.size(), 0U); } }
{ "content_hash": "b187818ae77fe68d6dcf9f0455bef994", "timestamp": "", "source": "github", "line_count": 180, "max_line_length": 78, "avg_line_length": 26.483333333333334, "alnum_prop": 0.6463184392699811, "repo_name": "friedbutter/osquery", "id": "d2b712c5d3594d7aa7baf83eeab315900c927aae", "size": "5082", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "osquery/distributed/tests/distributed_tests.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "38093" }, { "name": "C++", "bytes": "2038572" }, { "name": "CMake", "bytes": "71896" }, { "name": "Makefile", "bytes": "6070" }, { "name": "Objective-C++", "bytes": "56747" }, { "name": "Shell", "bytes": "2038" }, { "name": "Thrift", "bytes": "2969" } ], "symlink_target": "" }
require 'spec_helper' require 'yaml' YAML_CONF = <<CONF --- folder: default: /tmp aliases: webdev: ~/Development/Webdev work: ~/WorkDev # don't ask where to put it ask: true github: default: true username: trishume description: default: CONF GITHUB_Q = "Create a Github repo [y/n blank for true]" FOLDER_Q = "Project folder path [blank for /tmp]" describe Proj::Configurator do before do conf = YAML::load(YAML_CONF) @creator = Proj::Configurator.new(conf) @asker = lambda {|q,a| ""} end it 'should ask questions in the right format' do qs = @creator.questions qs.size.should == Proj::Attributes::registered.size qs['github'].should == GITHUB_Q qs['folder'].should == FOLDER_Q end it 'shouldn\'t ask the question if ask is false' do @creator.config['github']['ask'] = false qs = @creator.questions qs['github'].should == nil qs['folder'].should == FOLDER_Q end it 'should properly parse question answers' do answers = { 'github' => 'n', 'folder' => '~/Desktop/' } proj = @creator.create(answers,@asker) proj['github'].should == false proj['folder'].should == '~/Desktop/' end it 'should handle aliases in answers' do answers = { 'github' => 'y', 'folder' => 'webdev' } proj = @creator.create(answers,@asker) proj['folder'].should == '~/Development/Webdev' end it 'should handle nil for default' do answers = { 'folder' => nil } proj = @creator.create(answers,@asker) proj['folder'].should == '/tmp' proj['github'].should_not == false end end
{ "content_hash": "21f744fe63003ce36870304fd6d473a7", "timestamp": "", "source": "github", "line_count": 66, "max_line_length": 55, "avg_line_length": 24.954545454545453, "alnum_prop": 0.6089860352155434, "repo_name": "trishume/proj", "id": "f3740dbde76cdc1b12d0d3b4e93639d86dda26d1", "size": "1647", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spec/configurator_spec.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "18715" } ], "symlink_target": "" }
<img src="{{this.image|url}}" {% if this.caption %}alt="{{this.caption}}"{% endif %}> {% if this.caption %} <p class="caption">{{this.caption}}</p> {% endif %}
{ "content_hash": "fa7d506ee9fe034e66d75b45d1c0fc7b", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 85, "avg_line_length": 39.75, "alnum_prop": 0.5849056603773585, "repo_name": "joythewizard/lektor-foundation", "id": "d4bc46279d6f28dcdf1f5e5274944391d9673412", "size": "159", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "templates/blocks/select-image.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "315590" }, { "name": "HTML", "bytes": "3851" } ], "symlink_target": "" }
<?php namespace Awdn\VigilantQueue\Queue; use Traversable; /** * Class PriorityHashQueue * @package Awdn\VigilantQueue\Queue */ class PriorityHashQueue implements \IteratorAggregate, \ArrayAccess, \Countable { /** * @var MinPriorityQueue */ private $queue; /** * @var \ArrayObject */ private $data; /** * @var \ArrayObject */ private $priority; /** * @var \ArrayObject */ private $type; /** * @var int */ private $defaultPriority = 1; /** * @var int */ private $extractPolicy = self::EXTRACT_ALL; const EXTRACT_ALL = 1; const EXTRACT_KEY = 2; const EXTRACT_PRIORITY = 3; const EXTRACT_DATA = 4; const EXTRACT_TYPE = 5; /** * Default data mode * @var int */ private $dataMode = self::DATA_MODE_REPLACE; /** * Data mode depending on the message type * @var array */ private $dataModeByType = []; const DATA_MODE_REPLACE = 1; const DATA_MODE_APPEND = 2; /** * @var int */ private $itemMaxSizeKb = 1024; /** * PriorityHashQueue constructor. */ public function __construct() { $this->queue = new MinPriorityQueue; $this->queue->setExtractFlags(MinPriorityQueue::EXTR_BOTH); $this->data = new \ArrayObject(); $this->priority = new \ArrayObject(); $this->type = new \ArrayObject(); } /** * @param string $key * @param mixed $data * @param int $priority * @param string $type */ public function push($key, $data, $priority, $type = null) { $this->setData($key, $data, $type); $this->priority->offsetSet($key, $priority); $this->type->offsetSet($key, $type); $this->queue->insert($key, $priority); } /** * @param int $threshold * @return string|null|QueueItem */ public function evict($threshold) { if ($this->queue->valid()) { $item = $this->queue->top(); if ($item['priority'] <= $threshold) { $this->queue->next(); $key = $item['data']; // Return only the most recent items which have not been evicted so far. if ($this->priority->offsetExists($key) && $this->priority->offsetGet($key) == $item['priority'] ) { $data = $this->data->offsetGet($key); $type = $this->type->offsetGet($key); // Unset prio, data, type for the key. $this->offsetUnset($key); switch ($this->getExtractPolicy()) { case self::EXTRACT_DATA: return $data; case self::EXTRACT_KEY: return $key; case self::EXTRACT_TYPE: return $type; case self::EXTRACT_PRIORITY: return $item['priority']; case self::EXTRACT_ALL: default: return new QueueItem( $key, $data, $item['priority'], $type ); } } } } return null; } /** * @param string $offset * @param string $data * @param string $type */ private function setData($offset, $data, $type) { // If the global data mode is set to append by default OR if the data mode for the given message type requires // to append, then the data will be appended to existing data instead of replacing the value. if (($this->dataMode == self::DATA_MODE_APPEND || $this->getDataModeByType($type) == self::DATA_MODE_APPEND) && $this->data->offsetExists($offset)) { $d = $this->data->offsetGet($offset) . strlen($data) . ":" . $data; $this->data->offsetSet($offset, $d); // Message size is bigger than the allowed max size. Try to force the eviction. if (strlen($d) > 1024 * $this->getItemMaxSizeKb()) { $this->markForEviction($offset, 0); } } else { $this->data->offsetSet($offset, strlen($data) . ":" . $data); } } /** * @return int */ public function getItemMaxSizeKb() { return $this->itemMaxSizeKb; } /** * @param int $itemMaxSizeKb */ public function setItemMaxSizeKb($itemMaxSizeKb) { $this->itemMaxSizeKb = $itemMaxSizeKb; } /** * Try to enforce the eviction. * @todo Locking mechanism, so that a subsequent push() for the same key won't set the prio to a higher value * @param $offset */ private function markForEviction($offset) { $this->queue->insert($offset, 0); $this->priority->offsetSet($offset, 0); } /** * @return string */ public function getDataModeByType($type) { return isset($this->dataModeByType[$type]) ? $this->dataModeByType[$type] : self::DATA_MODE_REPLACE; } /** * @param string $type * @param string $dataModeByType */ public function setDataModeByType($type, $dataMode) { $this->dataModeByType[$type] = $dataMode; } /** * Whether a offset exists * @link http://php.net/manual/en/arrayaccess.offsetexists.php * @param mixed $offset <p> * An offset to check for. * </p> * @return boolean true on success or false on failure. * </p> * <p> * The return value will be casted to boolean if non-boolean was returned. * @since 5.0.0 */ public function offsetExists($offset) { return $this->data->offsetExists($offset); } /** * Offset to retrieve * @link http://php.net/manual/en/arrayaccess.offsetget.php * @param mixed $offset <p> * The offset to retrieve. * </p> * @return mixed Can return all value types. * @since 5.0.0 */ public function offsetGet($offset) { return [ 'data' => $this->data->offsetGet($offset), 'priority' => $this->priority->offsetGet($offset), 'type' => $this->type->offsetGet($offset) ]; } /** * Offset to set * The value should be an array with two indexes 'data' and 'priority'. If the priority is not * given the method falls back to the default priority. * * @link http://php.net/manual/en/arrayaccess.offsetset.php * @param mixed $offset <p> * The offset to assign the value to. * </p> * @param mixed $value <p> * The value to set. * </p> * @return void * @since 5.0.0 */ public function offsetSet($offset, $value) { if (!is_array($value)) { $this->offsetSet($offset, ['data' => $value, 'priority' => $this->getDefaultPriority(), 'type' => null]); return; } else { if (!isset($value['data'])) { $value['data'] = null; } if (!isset($value['priority'])) { $value['priority'] = $this->getDefaultPriority(); } if (!isset($value['type'])) { $value['type'] = null; } } $this->push($offset, $value['data'], $value['priority'], $value['type']); } /** * Offset to unset * * The method can not remove the data from the PriorityQueue. This could be an issue * if there are entries staying for a long time within the queue without being removed. * * @link http://php.net/manual/en/arrayaccess.offsetunset.php * @param mixed $offset <p> * The offset to unset. * </p> * @return void * @since 5.0.0 */ public function offsetUnset($offset) { $this->data->offsetUnset($offset); $this->priority->offsetUnset($offset); $this->type->offsetUnset($offset); } /** * Count elements of an object * @link http://php.net/manual/en/countable.count.php * @return int The custom count as an integer. * </p> * <p> * The return value is cast to an integer. * @since 5.1.0 */ public function count() { return $this->data->count(); } /** * @return int */ public function getDefaultPriority() { return $this->defaultPriority; } /** * @param int $defaultPriority */ public function setDefaultPriority($defaultPriority) { $this->defaultPriority = $defaultPriority; } /** * @return int */ public function getExtractPolicy() { return $this->extractPolicy; } /** * @param int $extractPolicy */ public function setExtractPolicy($extractPolicy) { $this->extractPolicy = $extractPolicy; } /** * @return int */ public function getDataMode() { return $this->dataMode; } /** * @param int $dataMode */ public function setDataMode($dataMode) { $this->dataMode = $dataMode; } /** * Retrieve an external iterator * @link http://php.net/manual/en/iteratoraggregate.getiterator.php * @return Traversable An instance of an object implementing <b>Iterator</b> or * <b>Traversable</b> * @since 5.0.0 */ public function getIterator() { return $this->data->getIterator(); } /** * @param string $className */ public function setIteratorClass($className) { $this->data->setIteratorClass($className); } /** * @return string */ public function getIteratorClass() { return $this->data->getIteratorClass(); } }
{ "content_hash": "3a1471fda75c59d84e3e42f82588bdd2", "timestamp": "", "source": "github", "line_count": 387, "max_line_length": 157, "avg_line_length": 25.896640826873384, "alnum_prop": 0.51985631610457, "repo_name": "awdn/vigilant-queue", "id": "6a051eb790e443618063c3b6ef08c1c5876c124c", "size": "10022", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Queue/PriorityHashQueue.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "62438" } ], "symlink_target": "" }
export class Comment { Id: number; Grade: number; Text: string; AccId: number USerId: number constructor(grade: number, text: string, accommId: number, userId: number) { this.Grade = grade; this.Text = text; this.AccId = accommId; this.USerId = userId; } }
{ "content_hash": "b768874c16a2819590f7cf8852291a92", "timestamp": "", "source": "github", "line_count": 16, "max_line_length": 78, "avg_line_length": 20, "alnum_prop": 0.584375, "repo_name": "FikiLauda/LookNBookApp", "id": "0303c4c9c86e52fad6d81510d96109d20960786a", "size": "320", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "AngularApp/src/app/comment/comment.model.ts", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "104" }, { "name": "C#", "bytes": "232738" }, { "name": "CSS", "bytes": "2706" }, { "name": "HTML", "bytes": "33320" }, { "name": "JavaScript", "bytes": "148597" }, { "name": "TypeScript", "bytes": "94611" } ], "symlink_target": "" }
namespace Google.Cloud.ArtifactRegistry.V1Beta2.Snippets { // [START artifactregistry_v1beta2_generated_ArtifactRegistry_DeletePackage_sync_flattened] using Google.Cloud.ArtifactRegistry.V1Beta2; using Google.LongRunning; using Google.Protobuf.WellKnownTypes; public sealed partial class GeneratedArtifactRegistryClientSnippets { /// <summary>Snippet for DeletePackage</summary> /// <remarks> /// This snippet has been automatically generated for illustrative purposes only. /// It may require modifications to work in your environment. /// </remarks> public void DeletePackage() { // Create client ArtifactRegistryClient artifactRegistryClient = ArtifactRegistryClient.Create(); // Initialize request argument(s) string name = ""; // Make the request Operation<Empty, OperationMetadata> response = artifactRegistryClient.DeletePackage(name); // Poll until the returned long-running operation is complete Operation<Empty, OperationMetadata> completedResponse = response.PollUntilCompleted(); // Retrieve the operation result Empty result = completedResponse.Result; // Or get the name of the operation string operationName = response.Name; // This name can be stored, then the long-running operation retrieved later by name Operation<Empty, OperationMetadata> retrievedResponse = artifactRegistryClient.PollOnceDeletePackage(operationName); // Check if the retrieved long-running operation has completed if (retrievedResponse.IsCompleted) { // If it has completed, then access the result Empty retrievedResult = retrievedResponse.Result; } } } // [END artifactregistry_v1beta2_generated_ArtifactRegistry_DeletePackage_sync_flattened] }
{ "content_hash": "e886ba717b2549ab0e9c367f2def415a", "timestamp": "", "source": "github", "line_count": 42, "max_line_length": 128, "avg_line_length": 47.214285714285715, "alnum_prop": 0.6732223903177005, "repo_name": "jskeet/google-cloud-dotnet", "id": "b301edd195c78569e45a5ad32edca3904eb476d4", "size": "2605", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "apis/Google.Cloud.ArtifactRegistry.V1Beta2/Google.Cloud.ArtifactRegistry.V1Beta2.GeneratedSnippets/ArtifactRegistryClient.DeletePackageSnippet.g.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "767" }, { "name": "C#", "bytes": "268415427" }, { "name": "CSS", "bytes": "1346" }, { "name": "Dockerfile", "bytes": "3173" }, { "name": "HTML", "bytes": "3823" }, { "name": "JavaScript", "bytes": "226" }, { "name": "PowerShell", "bytes": "3303" }, { "name": "Python", "bytes": "2744" }, { "name": "Shell", "bytes": "65260" }, { "name": "sed", "bytes": "1030" } ], "symlink_target": "" }
package io.intercom.api; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.Maps; import java.util.Map; @SuppressWarnings("UnusedDeclaration") @JsonInclude(JsonInclude.Include.NON_DEFAULT) @JsonIgnoreProperties(ignoreUnknown = true) class CompanyWithStringPlan extends TypedData { @JsonProperty("id") private String id; @JsonProperty("name") private String name; @JsonProperty("company_id") private String companyID; @JsonProperty("session_count") private int sessionCount; @JsonProperty("monthly_spend") private float monthlySpend; @JsonProperty("remote_created_at") private long remoteCreatedAt; @JsonProperty("plan") private String plan; @JsonProperty("size") private int size; @JsonProperty("website") private String website; @JsonProperty("industry") private String industry; @JsonIgnoreProperties(ignoreUnknown = false) @JsonProperty("custom_attributes") private Map<String, CustomAttribute> customAttributes = Maps.newHashMap(); @JsonProperty("remove") @JsonInclude(JsonInclude.Include.NON_NULL) private Boolean remove; public CompanyWithStringPlan() { } public String getType() { return "company"; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getCompanyID() { return companyID; } public void setCompanyID(String companyID) { this.companyID = companyID; } public Integer getSessionCount() { return sessionCount; } public void setSessionCount(Integer sessionCount) { this.sessionCount = sessionCount; } public float getMonthlySpend() { return monthlySpend; } public void setMonthlySpend(float monthlySpend) { this.monthlySpend = monthlySpend; } public long getRemoteCreatedAt() { return remoteCreatedAt; } public void setRemoteCreatedAt(long remoteCreatedAt) { this.remoteCreatedAt = remoteCreatedAt; } public int getSize() { return size; } public void setSize(int size) { this.size = size; } public String getWebsite() { return website; } public void setWebsite(String website) { this.website = website; } public String getIndustry() { return industry; } public void setIndustry(String industry) { this.industry = industry; } public String getPlan() { return plan; } public void setPlan(String plan) { this.plan = plan; } public Map<String, CustomAttribute> getCustomAttributes() { return customAttributes; } public void setCustomAttributes(Map<String, CustomAttribute> customAttributes) { this.customAttributes = customAttributes; } public Boolean getRemove() { return remove; } public CompanyWithStringPlan setRemove(Boolean remove) { this.remove = remove; return this; } }
{ "content_hash": "b8b4da747d87ba2a3bda8dc2d5ab9c94", "timestamp": "", "source": "github", "line_count": 156, "max_line_length": 84, "avg_line_length": 21.44871794871795, "alnum_prop": 0.657501494321578, "repo_name": "intercom/intercom-java", "id": "8f1d8296ac5a7042f8ebcd113b4dbb3b733e0426", "size": "3346", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "intercom-java/src/main/java/io/intercom/api/CompanyWithStringPlan.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "391014" } ], "symlink_target": "" }
Table tents for role playing games Quick and dirty css hacked together to get something working for a local gaming group. # TODO * Backside should probably have something, maybe useful stats for the player needs to be rendered upside down for proper printing/folding * Image selection, some basic character generation features * Add interactive features like turning on/off fields, change field sizes, add remove rows
{ "content_hash": "da5d66055ecca1e170d013ec7b473489", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 90, "avg_line_length": 46.666666666666664, "alnum_prop": 0.8119047619047619, "repo_name": "AlexBarnes86/TableTent", "id": "f9803e64bf2f356de3fc7e30ff555462a2a72afb", "size": "432", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2383" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace Quiz.Domain { [Flags] public enum Operations : int { None = 0, Create = 1, Read = 2, Update = 4, Delete = 8, Execute = 16, All = 31 } }
{ "content_hash": "de6d028ef44327c0057f6c697956fc4c", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 33, "avg_line_length": 16.047619047619047, "alnum_prop": 0.5548961424332344, "repo_name": "qadeer05/Quiz.Net", "id": "0e30f99c41433827f48658d2d94437899aa2ba59", "size": "339", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Quiz.Domain/User/Operations.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ASP", "bytes": "99" }, { "name": "C#", "bytes": "108240" }, { "name": "CSS", "bytes": "16222" }, { "name": "JavaScript", "bytes": "119724" } ], "symlink_target": "" }
class FirstItem < ActiveRecord::Base include RailsSortable::Model set_sortable :sort default_scope -> { order(:sort) } end
{ "content_hash": "f351627d011cb58b322d6a36016ad7b5", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 36, "avg_line_length": 21.666666666666668, "alnum_prop": 0.7230769230769231, "repo_name": "itmammoth/rails_sortable", "id": "b5f8c237b0264e8336424814a7f34243373489d0", "size": "130", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spec/dummy/app/models/first_item.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1462" }, { "name": "HTML", "bytes": "8528" }, { "name": "JavaScript", "bytes": "1202" }, { "name": "Ruby", "bytes": "40863" } ], "symlink_target": "" }
package com.communote.server.persistence.tag; import java.util.Collection; import java.util.List; import com.communote.server.api.core.tag.TagData; import com.communote.server.core.filter.ResultSpecification; import com.communote.server.model.tag.Tag; /** * @see Tag * * @author Communote GmbH - <a href="http://www.communote.com/">http://www.communote.com/</a> */ public interface TagDao { /** * This constant is used as a transformation flag; entities can be converted automatically into * value objects or other types, different methods in a class implementing this interface * support this feature: look for an <code>int</code> parameter called <code>transform</code>. * <p/> * This specific flag denotes no transformation will occur. */ public final static int TRANSFORM_NONE = 0; /** * Creates a new instance of Tag and adds from the passed in <code>entities</code> collection * * @param entities * the collection of Tag instances to create. * * @return the created instances. */ public Collection<Tag> create(Collection<Tag> entities); /** * Does the same thing as {@link #create(Tag)} with an additional flag called * <code>transform</code>. If this flag is set to <code>TRANSFORM_NONE</code> then the returned * entity will <strong>NOT</strong> be transformed. If this flag is any of the other constants * defined here then the result <strong>WILL BE</strong> passed through an operation which can * optionally transform the entities (into value objects for example). By default, * transformation does not occur. */ public Collection<Tag> create(int transform, Collection<Tag> entities); /** * Does the same thing as {@link #create(Tag)} with an additional flag called * <code>transform</code>. If this flag is set to <code>TRANSFORM_NONE</code> then the returned * entity will <strong>NOT</strong> be transformed. If this flag is any of the other constants * defined here then the result <strong>WILL BE</strong> passed through an operation which can * optionally transform the entity (into a value object for example). By default, transformation * does not occur. */ public Object create(int transform, Tag tag); /** * Creates an instance of Tag and adds it to the persistent store. */ public Tag create(Tag tag); /** * Evicts (removes) the entity from the hibernate cache * * @param entity * the entity to evict */ public void evict(Tag entity); /** * @return List of tags with the given prefix. */ public List<TagData> findByPrefix(String prefix, ResultSpecification resultSpecification); /** * Finds a tag by its TagStore definition. */ public Tag findByTagStore(String tagStoreTagId, String tagStoreAlias); /** * Return the IDs of all users that follow the tag with the given ID. The resulting list will be * empty if the tag does not exis. * * @param tagId * the ID of the tag for which the followers should be returned * @return the IDs of the followers */ public List<Long> getFollowers(Long tagId); /** * Does the same thing as {@link #load(Long)} with an additional flag called * <code>transform</code>. If this flag is set to <code>TRANSFORM_NONE</code> then the returned * entity will <strong>NOT</strong> be transformed. If this flag is any of the other constants * defined in this class then the result <strong>WILL BE</strong> passed through an operation * which can optionally transform the entity (into a value object for example). By default, * transformation does not occur. * * @param id * the identifier of the entity to load. * @return either the entity or the object transformed from the entity. */ public Object load(int transform, Long id); /** * Loads an instance of Tag from the persistent store. */ public Tag load(Long id); /** * Loads all entities of type {@link Tag}. * * @return the loaded entities. */ public Collection<? extends Tag> loadAll(); /** * Does the same thing as {@link #loadAll()} with an additional flag called * <code>transform</code>. If this flag is set to <code>TRANSFORM_NONE</code> then the returned * entity will <strong>NOT</strong> be transformed. If this flag is any of the other constants * defined here then the result <strong>WILL BE</strong> passed through an operation which can * optionally transform the entity (into a value object for example). By default, transformation * does not occur. * * @param transform * the flag indicating what transformation to use. * @return the loaded entities. */ public Collection<? extends Tag> loadAll(final int transform); /** * Removes the instance of Tag having the given <code>identifier</code> from the persistent * store. */ public void remove(Long id); /** * Removes the instance of Tag from the persistent store. */ public void remove(Tag tag); /** * This method removes the given tag. The tag may only be a note tag. * * @param oldTagId * Id of the tag to delete. * @param newTagId * Id of an optional new tag, the data of the old tag should be assigned to. */ public void removeNoteTag(long oldTagId, Long newTagId); /** * Updates all instances in the <code>entities</code> collection in the persistent store. */ public void update(Collection<Tag> entities); /** * Updates the <code>tag</code> instance in the persistent store. */ public void update(Tag tag); }
{ "content_hash": "7ff7d9d93db8aa369eb62dd20a147f42", "timestamp": "", "source": "github", "line_count": 159, "max_line_length": 100, "avg_line_length": 36.9811320754717, "alnum_prop": 0.6590136054421769, "repo_name": "Communote/communote-server", "id": "9fe6918d6577ff41dae4d232e5f66f8488628de8", "size": "5880", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "communote/persistence/src/main/java/com/communote/server/persistence/tag/TagDao.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "272" }, { "name": "CSS", "bytes": "294265" }, { "name": "HTML", "bytes": "26978" }, { "name": "Java", "bytes": "13692073" }, { "name": "JavaScript", "bytes": "2460010" }, { "name": "PLSQL", "bytes": "4134" }, { "name": "PLpgSQL", "bytes": "262702" }, { "name": "Rich Text Format", "bytes": "30964" }, { "name": "Shell", "bytes": "274" } ], "symlink_target": "" }
__PROJECT_LIST__ # Define the default target all: # # Target Macro # # Macro defines a phony target for each example, and adds it to a list of # targets. # define TARGET TARGET_LIST+=$(1)_TARGET .PHONY: $(1)_TARGET $(1)_TARGET: +$(MAKE) -C $(1) CLEAN_LIST+=$(1)_CLEAN .PHONY: $(1)_CLEAN $(1)_CLEAN: +$(MAKE) -C $(1) clean endef # Define the various targets via the Macro $(foreach proj,$(PROJECTS),$(eval $(call TARGET,$(proj)))) __DEPENDENCIES__ all: $(TARGET_LIST) echo "Done building targets." clean: $(CLEAN_LIST) echo "Done cleaning targets." .PHONY: RUN RUN: all echo "Starting up python webserver." python ../tools/httpd.py
{ "content_hash": "2873a631215aa074bea5c5a632ec2605", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 73, "avg_line_length": 16.564102564102566, "alnum_prop": 0.6640866873065016, "repo_name": "leighpauls/k2cro4", "id": "2bbcaf6879701fd0fe3c367543afd5d2752b5146", "size": "933", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "native_client_sdk/src/examples/Makefile", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ASP", "bytes": "3062" }, { "name": "AppleScript", "bytes": "25392" }, { "name": "Arduino", "bytes": "464" }, { "name": "Assembly", "bytes": "68131038" }, { "name": "C", "bytes": "242794338" }, { "name": "C#", "bytes": "11024" }, { "name": "C++", "bytes": "353525184" }, { "name": "Common Lisp", "bytes": "3721" }, { "name": "D", "bytes": "1931" }, { "name": "Emacs Lisp", "bytes": "1639" }, { "name": "F#", "bytes": "4992" }, { "name": "FORTRAN", "bytes": "10404" }, { "name": "Java", "bytes": "3845159" }, { "name": "JavaScript", "bytes": "39146656" }, { "name": "Lua", "bytes": "13768" }, { "name": "Matlab", "bytes": "22373" }, { "name": "Objective-C", "bytes": "21887598" }, { "name": "PHP", "bytes": "2344144" }, { "name": "Perl", "bytes": "49033099" }, { "name": "Prolog", "bytes": "2926122" }, { "name": "Python", "bytes": "39863959" }, { "name": "R", "bytes": "262" }, { "name": "Racket", "bytes": "359" }, { "name": "Ruby", "bytes": "304063" }, { "name": "Scheme", "bytes": "14853" }, { "name": "Shell", "bytes": "9195117" }, { "name": "Tcl", "bytes": "1919771" }, { "name": "Verilog", "bytes": "3092" }, { "name": "Visual Basic", "bytes": "1430" }, { "name": "eC", "bytes": "5079" } ], "symlink_target": "" }
<?php namespace App\Http\Controllers; use Input; use Redirect; use App\Category; use App\Faq; use App\Http\Requests; use App\Http\Controllers\Controller; use Illuminate\Http\Request; use App\Http\Requests\CategoryRequest; class CategoriesController extends Controller { public function index() { //$faqs = Faq::all(); $categories = Category::all(); //var_dump($faqs); die(); return view('categories.index', compact('categories')); } public function create() { return view('categories.create'); } public function store(CategoryRequest $category) { $input = Input::all(); Category::create($input); return Redirect::route('categories.index')->with('message', 'New Category has been Created Successfully'); } public function show(Category $category) { return view('categories.show', compact('category')); } public function edit(Category $category) { //dd($category); return view('categories.edit', compact('category')); } public function update(Category $category) { $input = array_except(Input::all(), '_method'); $category->update($input); return Redirect::route('categories.index')->with('message', 'Category is updated Successfully.'); } public function destroy(Category $category) { $category->delete(); return Redirect::route('categories.index')->with('message', 'Selected Category has been Deleted Successfully.'); } public function category() { $categories = Category::all(); return view('categories.category', compact('categories')); } }
{ "content_hash": "7f144478ae2a3179b352bec74eb4c295", "timestamp": "", "source": "github", "line_count": 76, "max_line_length": 120, "avg_line_length": 22.17105263157895, "alnum_prop": 0.6344213649851632, "repo_name": "Shahlal47/finalproject", "id": "bb0b8fed747e68724257cfd4270ec96f8ff471ad", "size": "1685", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/Http/Controllers/CategoriesController.php", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "290" }, { "name": "ApacheConf", "bytes": "822" }, { "name": "CSS", "bytes": "3252726" }, { "name": "CoffeeScript", "bytes": "103688" }, { "name": "Erlang", "bytes": "17720" }, { "name": "Go", "bytes": "7076" }, { "name": "HTML", "bytes": "4317795" }, { "name": "JavaScript", "bytes": "9993606" }, { "name": "PHP", "bytes": "310401" }, { "name": "Perl", "bytes": "32642" }, { "name": "Python", "bytes": "5845" }, { "name": "Shell", "bytes": "1332" } ], "symlink_target": "" }