Goals: Add links that are reasonable and good explanations of how stuff works. No hype and no vendor content if possible. Practical first-hand accounts of models in prod eagerly sought.
| """Concurrent read-process-write example""" | |
| import concurrent.futures | |
| from itertools import islice | |
| from time import sleep | |
| import rasterio | |
| CHUNK = 100 |
| # Copyright 2018 Uber Technologies, Inc. All Rights Reserved. | |
| # | |
| # Licensed under the Apache License, Version 2.0 (the "License"); | |
| # you may not use this file except in compliance with the License. | |
| # You may obtain a copy of the License at | |
| # | |
| # http://www.apache.org/licenses/LICENSE-2.0 | |
| # | |
| # Unless required by applicable law or agreed to in writing, software | |
| # distributed under the License is distributed on an "AS IS" BASIS, |
rebase vs merge).rebase vs merge)reset vs checkout vs revert)git rev-parse)pull vs fetch)stash vs branch)reset vs checkout vs revert)| """ | |
| Create train, valid, test iterators for CIFAR-10 [1]. | |
| Easily extended to MNIST, CIFAR-100 and Imagenet. | |
| [1]: https://discuss.pytorch.org/t/feedback-on-pytorch-for-kaggle-competitions/2252/4 | |
| """ | |
| import torch | |
| import numpy as np |
| #! /bin/bash | |
| #Author : Kiran Murugulla | |
| #Description : Script to create an asset and upload binary from local machine into an S3 bucket | |
| usage="Usage: s3-cp-speed-test.sh bucketname filepath \n" | |
| BUCKET=$1 | |
| FILE=$2 | |
| if [ ! $# -eq 2 ] ; then | |
| echo -e "$usage" | |
| exit 2 |
| import argparse | |
| import os | |
| import shutil | |
| import time | |
| import torch | |
| import torch.nn as nn | |
| import torch.nn.parallel | |
| import torch.backends.cudnn as cudnn | |
| import torch.optim |
| ############ REQUIREMENTS #################### | |
| # sudo apt-get install python-pip | |
| # sudo apt-get install libpq-dev | |
| # sudo pip install psycopg2 | |
| # sudo pip install sqlalchemy | |
| # sudo pip install sqlalchemy-redshift | |
| ############################################## | |
| import sqlalchemy as sa | |
| from sqlalchemy.orm import sessionmaker |
| import numpy as np | |
| from keras.models import Sequential | |
| from keras.layers import Dense, Dropout, Activation, Flatten | |
| from keras.layers import Convolution2D, MaxPooling2D | |
| from keras.optimizers import SGD | |
| from keras.regularizers import l2, activity_l2 | |
| from keras.utils import np_utils | |
| from sklearn import metrics | |
| # to run this code, you'll need to load the following data: |
| import os | |
| import unittest | |
| from airflow.models import DagBag | |
| class TestDags(unittest.TestCase): | |
| """ | |
| Generic tests that all DAGs in the repository should be able to pass. | |
| """ |