SlideShare a Scribd company logo
1 of 82
Download to read offline
.
1 1
2 1 0/ .
.
.
Derek Murray
@mrry
§
G
TransformExtract Load
files = tf.data.Dataset.list_files(file_pattern)
dataset = tf.data.TFRecordDataset(files)
dataset = dataset.shuffle(10000)
dataset = dataset.repeat(NUM_EPOCHS)
dataset = dataset.map(lambda x: tf.parse_single_example(x, features))
dataset = dataset.batch(BATCH_SIZE)
iterator = dataset.make_one_shot_iterator()
features = iterator.get_next()
E
T
L
§ tf.data0 : 1
§ 38 ,
/ . . . / . / .
§ C 0 G tf.contrib.data.prefetch_to_device() .
files = tf.data.Dataset.list_files(file_pattern)
dataset = tf.data.TFRecordDataset(files)
dataset = dataset.shuffle(10000)
dataset = dataset.repeat(NUM_EPOCHS)
dataset = dataset.map(lambda x: tf.parse_single_example(x, features))
dataset = dataset.batch(BATCH_SIZE)
iterator = dataset.make_one_shot_iterator()
features = iterator.get_next()
files = tf.data.Dataset.list_files(file_pattern)
dataset = tf.data.TFRecordDataset(files, num_parallel_reads=32)
dataset = dataset.shuffle(10000)
dataset = dataset.repeat(NUM_EPOCHS)
dataset = dataset.map(lambda x: tf.parse_single_example(x, features))
dataset = dataset.batch(BATCH_SIZE)
iterator = dataset.make_one_shot_iterator()
features = iterator.get_next()
files = tf.data.Dataset.list_files(file_pattern)
dataset = tf.data.TFRecordDataset(files, num_parallel_reads=32)
dataset = dataset.apply(
tf.contrib.data.shuffle_and_repeat(10000, NUM_EPOCHS))
dataset = dataset.apply(
tf.contrib.data.map_and_batch(lambda x: ..., BATCH_SIZE))
iterator = dataset.make_one_shot_iterator()
features = iterator.get_next()
files = tf.data.Dataset.list_files(file_pattern)
dataset = tf.data.TFRecordDataset(files, num_parallel_reads=32)
dataset = dataset.apply(
tf.contrib.data.shuffle_and_repeat(10000, NUM_EPOCHS))
dataset = dataset.apply(
tf.contrib.data.map_and_batch(lambda x: ..., BATCH_SIZE))
dataset = dataset.apply(tf.contrib.data.prefetch_to_device("/gpu:0"))
iterator = dataset.make_one_shot_iterator()
features = iterator.get_next()
files = tf.data.Dataset.list_files(file_pattern)
dataset = tf.data.TFRecordDataset(files, num_parallel_reads=32)
dataset = dataset.apply(
tf.contrib.data.shuffle_and_repeat(10000, NUM_EPOCHS))
dataset = dataset.apply(
tf.contrib.data.map_and_batch(lambda x: ..., BATCH_SIZE))
dataset = dataset.apply(tf.contrib.data.prefetch_to_device("/gpu:0"))
iterator = dataset.make_one_shot_iterator()
features = iterator.get_next()
= C = = ! ==: B = = ? / .
Dataset.map
§ tf.SparseTensor
§ Dataset.from_generator() + Python
§ DatasetOpKernel
§ ( .8 S 1 8 E K
§ 8 . . C K
§ ( 8 C ).8 P ).8 !
tf.enable_eager_execution()
files = tf.data.Dataset.list_files(file_pattern)
dataset = tf.data.TFRecordDataset(files)
dataset = dataset.shuffle(10000)
dataset = dataset.repeat(NUM_EPOCHS)
dataset = dataset.map(lambda x: tf.parse_single_example(x, features))
dataset = dataset.batch(BATCH_SIZE)
# Eager execution makes dataset a normal Python iterable.
for batch in dataset:
train_model(batch)
§ ( .8 S 1 8 E K
§ 8 . . C K
§ ( 8 C ).8 P ).8 !
tf.enable_eager_execution()
files = tf.data.Dataset.list_files(file_pattern)
dataset = tf.data.TFRecordDataset(files)
dataset = dataset.shuffle(10000)
dataset = dataset.repeat(NUM_EPOCHS)
dataset = dataset.map(lambda x: tf.parse_single_example(x, features))
dataset = dataset.batch(BATCH_SIZE)
for batch in dataset:
train_model(batch)
tf.enable_eager_execution()
# Also implements best practices for high performance!
# (See optional args for details.)
dataset = tf.contrib.data.make_batched_features_dataset(
file_pattern, BATCH_SIZE, features, num_epochs=NUM_EPOCHS)
for batch in dataset:
train_model(batch)
tf.enable_eager_execution()
# In a terminal, run the following commands, e.g.:
# $ pip install kaggle
# $ kaggle datasets download -d therohk/million-headlines -p .
dataset = tf.contrib.data.make_csv_dataset(
"*.csv", BATCH_SIZE, num_epochs=NUM_EPOCHS)
for batch in dataset:
train_model(batch["publish_date"], batch["headline_text"])
§ ( .8 S 1 8 E K
§ 8 . . C K
§ ( 8 C ).8 P ).8 !
dataset = tf.contrib.data.make_csv_dataset(
"*.csv", BATCH_SIZE, num_epochs=NUM_EPOCHS)
for batch in dataset:
train_model(batch)
# Wrap the dataset in an input function, and return it directly.
def input_fn():
dataset = tf.contrib.data.make_csv_dataset(
"*.csv", BATCH_SIZE, num_epochs=NUM_EPOCHS)
return dataset
# Train an Estimator on the dataset.
tf.estimator.Estimator(model_fn=train_model).train(input_fn=input_fn)
§
G
§ tf.data
✓
✓ G
✓
§
. //
. / /
§
/ . /
/ . / .
.
Brennan Saeta
@bsaeta
. 2
) , 1.
3
2 3
3
.
( 1
1
def input_fn(batch_size):
files = tf.data.Dataset.list_files(FLAGS.data_dir)
dataset = tf.data.TFRecordDataset(files)
dataset = dataset.shuffle(2048) # Sliding window of 2048 records
dataset = dataset.repeat(NUM_EPOCHS)
dataset = dataset.map(parser_fn).batch(batch_size)
return dataset
0-
E
4. 5,231 03
(
)
§
§
§
saeta@saeta:~$ capture_tpu_profile --tpu_name=saeta --logdir=myprofile/ --duration_ms=10000
Welcome to the Cloud TPU Profiler v1.5.1
Starting to profile TPU traces for 10000 ms. Remaining attempt(s): 3
Limiting the number of trace events to 1000000
2018-03-21 01:13:12.350004: I tensorflow/contrib/tpu/profiler/dump_tpu_profile.cc:155] Converting trace events to TraceViewer JSON.
2018-03-21 01:13:12.392162: I tensorflow/contrib/tpu/profiler/dump_tpu_profile.cc:69] Dumped raw-proto trace data to profiles/5/plugins/profile/2018-03-21_01:13:12/tr
ace
Trace contains 998114 events.
Dumped JSON trace data to myprofile/plugins/profile/2018-03-21_01:13:12/trace.json.gz
Dumped json op profile data to myprofile/plugins/profile/2018-03-21_01:13:12/op_profile.json
Dumped tool data for input_pipeline.json to myprofile/plugins/profile/2018-03-21_01:13:12/input_pipeline.json
Dumped tool data for overview_page.json to myprofile/plugins/profile/2018-03-21_01:13:12/overview_page.json
NOTE: using the trace duration 10000ms.
Set an appropriate duration (with --duration_ms) if you don't see a full step in your trace or the captured trace is too large.
saeta@saeta:~$ tensorboard --logdir=myprofile/
TensorBoard 1.6.0 at <redacted> (Press CTRL+C to quit)
/: // # / : # : /: : -/
/: // - .# /#- . - - . - :/ : -/
def input_fn(batch_size):
files = tf.data.Dataset.list_files(FLAGS.data_dir)
dataset = tf.data.TFRecordDataset(files)
dataset = dataset.shuffle(2048) # Sliding window of 2048 records
dataset = dataset.repeat(NUM_EPOCHS)
dataset = dataset.map(parser_fn, num_parallel_calls=64)
dataset = dataset.batch(batch_size)
return dataset
- 0
0 !
§
§
§
§
§
§
(.,
(.,
Extract
I
Transform
Load
) / ) /
HGA
C D
M
HGA
1 1
3
2 3
3
0 0 0 %
5
7
%
5 5
%
0
5
0 0
5
7 7 % 7
0
1 31 C 4
6 2 2
2
def input_fn(batch_size):
files = tf.data.Dataset.list_files(FLAGS.data_dir)
dataset = tf.data.TFRecordDataset(files, num_parallel_reads=32)
dataset = dataset.shuffle(10000)
dataset = dataset.repeat(NUM_EPOCHS)
dataset = dataset.map(parser_fn, num_parallel_calls=64)
dataset = dataset.batch(batch_size)
dataset = dataset.prefetch(2)
return dataset
. . T
bi n D c
i
Fd_ e a lfR D
§ .
§
§
def input_fn(batch_size):
files = tf.data.Dataset.list_files(FLAGS.data_dir)
dataset = tf.data.TFRecordDataset(files, num_parallel_reads=32)
dataset = dataset.shuffle(10000)
dataset = dataset.repeat(NUM_EPOCHS)
dataset = dataset.map(parser_fn, num_parallel_calls=64)
dataset = dataset.batch(batch_size)
dataset = dataset.prefetch(2)
return dataset
!
§
§
§
6 6
53
4
6
53 53
6
53
6 6
53
4 4 4 4
1 7 2
§
-
§ ? F )
§
§ ( )
def input_fn(batch_size):
files = tf.data.Dataset.list_files(FLAGS.data_dir)
def tfrecord_dataset(filename):
buffer_size = 8 * 1024 * 1024 # 8 MiB per file
return tf.data.TFRecordDataset(filename, buffer_size=buffer_size)
dataset = files.apply(tf.contrib.data.parallel_interleave(
tfrecord_dataset, cycle_length=32, sloppy=True))
dataset = dataset.apply(tf.contrib.data.shuffle_and_repeat(10000, NUM_EPOCHS))
dataset = dataset.apply(tf.contrib.data.map_and_batch(parser_fn,
batch_size, num_parallel_batches=4))
dataset = dataset.prefetch(4)
return dataset
!
) ( ) +
Jeremiah Harmsen
@JeremiahHarmsen
Andrew Gasparovic
@agasparovic_
Repositories
TensorFlow
Hub
Module
TensorFlow
Hub
Model
, ,
CHAIRPERSONFLOWER
ANGORAFUZZYLOP
EASTERBUNNY
# Download and use NASNet feature vector module.
module = hub.Module(
"https://tfhub.dev/google/imagenet/nasnet_large/feature_vector/1")
features = module(my_images)
logits = tf.layers.dense(features, NUM_CLASSES)
probabilities = tf.nn.softmax(logits)
+ !
# Download and use NASNet feature vector module.
module = hub.Module(
"https://tfhub.dev/google/imagenet/nasnet_large/feature_vector/1",
trainable=True, tags={“train”})
features = module(my_images)
logits = tf.layers.dense(features, NUM_CLASSES)
probabilities = tf.nn.softmax(logits)
§ /,2/ A , 5
§ 0/,2/ A
§ 5 / A 3)
§ - A 3( 3) 3
§ - A 1 / A 3)
§ 1 / A 3( 3) &( (&( )
§ 5 / A 3(
“The quick brown fox”
“The shallots were simply underwhelming”
POSITIVE
NEGATIVE
§ L
§ M ( ,
§ E 2 ,, 2 ) ) ,
# Use pre-trained universal sentence encoder to build text vector column.
review = hub.text_embedding_column(
"review", "https://tfhub.dev/google/universal-sentence-encoder/1")
features = {
"review": np.array(["an arugula masterpiece", "inedible shoe leather", ...])
}
labels = np.array([[1], [0], ...])
input_fn = tf.estimator.input.numpy_input_fn(features, labels, shuffle=True)
estimator = tf.estimator.DNNClassifier(hidden_units, [review])
estimator.train(input_fn, max_steps=100)
# Use pre-trained universal sentence encoder to build text vector column.
review = hub.text_embedding_column(
"review", "https://tfhub.dev/google/universal-sentence-encoder/1",
trainable=True)
features = {
"review": np.array(["an arugula masterpiece", "inedible shoe leather", ...])
}
labels = np.array([[1], [0], ...])
input_fn = tf.estimator.input.numpy_input_fn(features, labels, shuffle=True)
estimator = tf.estimator.DNNClassifier(hidden_units, [review])
estimator.train(input_fn, max_steps=100)
# Use pre-trained universal sentence encoder to build text vector column.
review = hub.text_embedding_column(
"review", "https://tfhub.dev/google/universal-sentence-encoder/1")
features = {
"review": np.array(["an arugula masterpiece", "inedible shoe leather", ...])
}
labels = np.array([[1], [0], ...])
input_fn = tf.estimator.input.numpy_input_fn(features, labels, shuffle=True)
estimator = tf.estimator.DNNClassifier(hidden_units, [review])
estimator.train(input_fn, max_steps=100)
§ (
§ ( ), , ) , , )
§ A D F E

More Related Content

What's hot

Tensorflow internal
Tensorflow internalTensorflow internal
Tensorflow internalHyunghun Cho
 
DIY Deep Learning with Caffe Workshop
DIY Deep Learning with Caffe WorkshopDIY Deep Learning with Caffe Workshop
DIY Deep Learning with Caffe Workshopodsc
 
Tensorflow presentation
Tensorflow presentationTensorflow presentation
Tensorflow presentationAhmed rebai
 
TENSORFLOW: ARCHITECTURE AND USE CASE - NASA SPACE APPS CHALLENGE by Gema Par...
TENSORFLOW: ARCHITECTURE AND USE CASE - NASA SPACE APPS CHALLENGE by Gema Par...TENSORFLOW: ARCHITECTURE AND USE CASE - NASA SPACE APPS CHALLENGE by Gema Par...
TENSORFLOW: ARCHITECTURE AND USE CASE - NASA SPACE APPS CHALLENGE by Gema Par...Big Data Spain
 
Advanced Spark and TensorFlow Meetup May 26, 2016
Advanced Spark and TensorFlow Meetup May 26, 2016Advanced Spark and TensorFlow Meetup May 26, 2016
Advanced Spark and TensorFlow Meetup May 26, 2016Chris Fregly
 
Neural Networks with Google TensorFlow
Neural Networks with Google TensorFlowNeural Networks with Google TensorFlow
Neural Networks with Google TensorFlowDarshan Patel
 
TensorFlow Dev Summit 2017 요약
TensorFlow Dev Summit 2017 요약TensorFlow Dev Summit 2017 요약
TensorFlow Dev Summit 2017 요약Jin Joong Kim
 
An Introduction to TensorFlow architecture
An Introduction to TensorFlow architectureAn Introduction to TensorFlow architecture
An Introduction to TensorFlow architectureMani Goswami
 
Alex Smola at AI Frontiers: Scalable Deep Learning Using MXNet
Alex Smola at AI Frontiers: Scalable Deep Learning Using MXNetAlex Smola at AI Frontiers: Scalable Deep Learning Using MXNet
Alex Smola at AI Frontiers: Scalable Deep Learning Using MXNetAI Frontiers
 
Introduction to Deep Learning, Keras, and TensorFlow
Introduction to Deep Learning, Keras, and TensorFlowIntroduction to Deep Learning, Keras, and TensorFlow
Introduction to Deep Learning, Keras, and TensorFlowSri Ambati
 
Attention mechanisms with tensorflow
Attention mechanisms with tensorflowAttention mechanisms with tensorflow
Attention mechanisms with tensorflowKeon Kim
 
Scaling Deep Learning with MXNet
Scaling Deep Learning with MXNetScaling Deep Learning with MXNet
Scaling Deep Learning with MXNetAI Frontiers
 
Introduction To TensorFlow | Deep Learning with TensorFlow | TensorFlow For B...
Introduction To TensorFlow | Deep Learning with TensorFlow | TensorFlow For B...Introduction To TensorFlow | Deep Learning with TensorFlow | TensorFlow For B...
Introduction To TensorFlow | Deep Learning with TensorFlow | TensorFlow For B...Edureka!
 
Hack Like It's 2013 (The Workshop)
Hack Like It's 2013 (The Workshop)Hack Like It's 2013 (The Workshop)
Hack Like It's 2013 (The Workshop)Itzik Kotler
 
Text Classification in Python – using Pandas, scikit-learn, IPython Notebook ...
Text Classification in Python – using Pandas, scikit-learn, IPython Notebook ...Text Classification in Python – using Pandas, scikit-learn, IPython Notebook ...
Text Classification in Python – using Pandas, scikit-learn, IPython Notebook ...Jimmy Lai
 
Rajat Monga, Engineering Director, TensorFlow, Google at MLconf 2016
Rajat Monga, Engineering Director, TensorFlow, Google at MLconf 2016Rajat Monga, Engineering Director, TensorFlow, Google at MLconf 2016
Rajat Monga, Engineering Director, TensorFlow, Google at MLconf 2016MLconf
 
Deep Learning through Examples
Deep Learning through ExamplesDeep Learning through Examples
Deep Learning through ExamplesSri Ambati
 
Making fitting in RooFit faster
Making fitting in RooFit fasterMaking fitting in RooFit faster
Making fitting in RooFit fasterPatrick Bos
 
Rajat Monga at AI Frontiers: Deep Learning with TensorFlow
Rajat Monga at AI Frontiers: Deep Learning with TensorFlowRajat Monga at AI Frontiers: Deep Learning with TensorFlow
Rajat Monga at AI Frontiers: Deep Learning with TensorFlowAI Frontiers
 

What's hot (20)

Tensorflow internal
Tensorflow internalTensorflow internal
Tensorflow internal
 
DIY Deep Learning with Caffe Workshop
DIY Deep Learning with Caffe WorkshopDIY Deep Learning with Caffe Workshop
DIY Deep Learning with Caffe Workshop
 
Tensorflow presentation
Tensorflow presentationTensorflow presentation
Tensorflow presentation
 
TENSORFLOW: ARCHITECTURE AND USE CASE - NASA SPACE APPS CHALLENGE by Gema Par...
TENSORFLOW: ARCHITECTURE AND USE CASE - NASA SPACE APPS CHALLENGE by Gema Par...TENSORFLOW: ARCHITECTURE AND USE CASE - NASA SPACE APPS CHALLENGE by Gema Par...
TENSORFLOW: ARCHITECTURE AND USE CASE - NASA SPACE APPS CHALLENGE by Gema Par...
 
Advanced Spark and TensorFlow Meetup May 26, 2016
Advanced Spark and TensorFlow Meetup May 26, 2016Advanced Spark and TensorFlow Meetup May 26, 2016
Advanced Spark and TensorFlow Meetup May 26, 2016
 
Neural Networks with Google TensorFlow
Neural Networks with Google TensorFlowNeural Networks with Google TensorFlow
Neural Networks with Google TensorFlow
 
TensorFlow
TensorFlowTensorFlow
TensorFlow
 
TensorFlow Dev Summit 2017 요약
TensorFlow Dev Summit 2017 요약TensorFlow Dev Summit 2017 요약
TensorFlow Dev Summit 2017 요약
 
An Introduction to TensorFlow architecture
An Introduction to TensorFlow architectureAn Introduction to TensorFlow architecture
An Introduction to TensorFlow architecture
 
Alex Smola at AI Frontiers: Scalable Deep Learning Using MXNet
Alex Smola at AI Frontiers: Scalable Deep Learning Using MXNetAlex Smola at AI Frontiers: Scalable Deep Learning Using MXNet
Alex Smola at AI Frontiers: Scalable Deep Learning Using MXNet
 
Introduction to Deep Learning, Keras, and TensorFlow
Introduction to Deep Learning, Keras, and TensorFlowIntroduction to Deep Learning, Keras, and TensorFlow
Introduction to Deep Learning, Keras, and TensorFlow
 
Attention mechanisms with tensorflow
Attention mechanisms with tensorflowAttention mechanisms with tensorflow
Attention mechanisms with tensorflow
 
Scaling Deep Learning with MXNet
Scaling Deep Learning with MXNetScaling Deep Learning with MXNet
Scaling Deep Learning with MXNet
 
Introduction To TensorFlow | Deep Learning with TensorFlow | TensorFlow For B...
Introduction To TensorFlow | Deep Learning with TensorFlow | TensorFlow For B...Introduction To TensorFlow | Deep Learning with TensorFlow | TensorFlow For B...
Introduction To TensorFlow | Deep Learning with TensorFlow | TensorFlow For B...
 
Hack Like It's 2013 (The Workshop)
Hack Like It's 2013 (The Workshop)Hack Like It's 2013 (The Workshop)
Hack Like It's 2013 (The Workshop)
 
Text Classification in Python – using Pandas, scikit-learn, IPython Notebook ...
Text Classification in Python – using Pandas, scikit-learn, IPython Notebook ...Text Classification in Python – using Pandas, scikit-learn, IPython Notebook ...
Text Classification in Python – using Pandas, scikit-learn, IPython Notebook ...
 
Rajat Monga, Engineering Director, TensorFlow, Google at MLconf 2016
Rajat Monga, Engineering Director, TensorFlow, Google at MLconf 2016Rajat Monga, Engineering Director, TensorFlow, Google at MLconf 2016
Rajat Monga, Engineering Director, TensorFlow, Google at MLconf 2016
 
Deep Learning through Examples
Deep Learning through ExamplesDeep Learning through Examples
Deep Learning through Examples
 
Making fitting in RooFit faster
Making fitting in RooFit fasterMaking fitting in RooFit faster
Making fitting in RooFit faster
 
Rajat Monga at AI Frontiers: Deep Learning with TensorFlow
Rajat Monga at AI Frontiers: Deep Learning with TensorFlowRajat Monga at AI Frontiers: Deep Learning with TensorFlow
Rajat Monga at AI Frontiers: Deep Learning with TensorFlow
 

Similar to TensorFlow.Data 및 TensorFlow Hub

tf.data: TensorFlow Input Pipeline
tf.data: TensorFlow Input Pipelinetf.data: TensorFlow Input Pipeline
tf.data: TensorFlow Input PipelineAlluxio, Inc.
 
Certification Study Group -Professional ML Engineer Session 2 (GCP-TensorFlow...
Certification Study Group -Professional ML Engineer Session 2 (GCP-TensorFlow...Certification Study Group -Professional ML Engineer Session 2 (GCP-TensorFlow...
Certification Study Group -Professional ML Engineer Session 2 (GCP-TensorFlow...gdgsurrey
 
Anirudh Koul. 30 Golden Rules of Deep Learning Performance
Anirudh Koul. 30 Golden Rules of Deep Learning PerformanceAnirudh Koul. 30 Golden Rules of Deep Learning Performance
Anirudh Koul. 30 Golden Rules of Deep Learning PerformanceLviv Startup Club
 
What's new in Python 3.11
What's new in Python 3.11What's new in Python 3.11
What's new in Python 3.11Henry Schreiner
 
Hands-on Learning with KubeFlow + Keras/TensorFlow 2.0 + TF Extended (TFX) + ...
Hands-on Learning with KubeFlow + Keras/TensorFlow 2.0 + TF Extended (TFX) + ...Hands-on Learning with KubeFlow + Keras/TensorFlow 2.0 + TF Extended (TFX) + ...
Hands-on Learning with KubeFlow + Keras/TensorFlow 2.0 + TF Extended (TFX) + ...Chris Fregly
 
Real Time Streaming Data with Kafka and TensorFlow (Yong Tang, MobileIron) Ka...
Real Time Streaming Data with Kafka and TensorFlow (Yong Tang, MobileIron) Ka...Real Time Streaming Data with Kafka and TensorFlow (Yong Tang, MobileIron) Ka...
Real Time Streaming Data with Kafka and TensorFlow (Yong Tang, MobileIron) Ka...confluent
 
What's new with Apache Spark's Structured Streaming?
What's new with Apache Spark's Structured Streaming?What's new with Apache Spark's Structured Streaming?
What's new with Apache Spark's Structured Streaming?Miklos Christine
 
CSI conference PPT on Performance Analysis of Map/Reduce to compute the frequ...
CSI conference PPT on Performance Analysis of Map/Reduce to compute the frequ...CSI conference PPT on Performance Analysis of Map/Reduce to compute the frequ...
CSI conference PPT on Performance Analysis of Map/Reduce to compute the frequ...shravanthium111
 
No more struggles with Apache Spark workloads in production
No more struggles with Apache Spark workloads in productionNo more struggles with Apache Spark workloads in production
No more struggles with Apache Spark workloads in productionChetan Khatri
 
Monitoring InfluxEnterprise
Monitoring InfluxEnterpriseMonitoring InfluxEnterprise
Monitoring InfluxEnterpriseInfluxData
 
Stat Design3 18 09
Stat Design3 18 09Stat Design3 18 09
Stat Design3 18 09stat
 
r,rstats,r language,r packages
r,rstats,r language,r packagesr,rstats,r language,r packages
r,rstats,r language,r packagesAjay Ohri
 
Group analyses with FieldTrip
Group analyses with FieldTripGroup analyses with FieldTrip
Group analyses with FieldTripRobert Oostenveld
 
Machine Learning Game Changer for IT - Maartens Lourens
Machine Learning Game Changer for IT - Maartens LourensMachine Learning Game Changer for IT - Maartens Lourens
Machine Learning Game Changer for IT - Maartens LourensOpenCredo
 
Metadata and Provenance for ML Pipelines with Hopsworks
Metadata and Provenance for ML Pipelines with Hopsworks Metadata and Provenance for ML Pipelines with Hopsworks
Metadata and Provenance for ML Pipelines with Hopsworks Jim Dowling
 
A Tale of Three Deep Learning Frameworks: TensorFlow, Keras, & PyTorch with B...
A Tale of Three Deep Learning Frameworks: TensorFlow, Keras, & PyTorch with B...A Tale of Three Deep Learning Frameworks: TensorFlow, Keras, & PyTorch with B...
A Tale of Three Deep Learning Frameworks: TensorFlow, Keras, & PyTorch with B...Databricks
 
Pumps, Compressors and Turbine Fault Frequency Analysis
Pumps, Compressors and Turbine Fault Frequency AnalysisPumps, Compressors and Turbine Fault Frequency Analysis
Pumps, Compressors and Turbine Fault Frequency AnalysisUniversity of Illinois,Chicago
 
Pumps, Compressors and Turbine Fault Frequency Analysis
Pumps, Compressors and Turbine Fault Frequency AnalysisPumps, Compressors and Turbine Fault Frequency Analysis
Pumps, Compressors and Turbine Fault Frequency AnalysisUniversity of Illinois,Chicago
 
Flux - Open Machine Learning Stack / Pipeline
Flux - Open Machine Learning Stack / PipelineFlux - Open Machine Learning Stack / Pipeline
Flux - Open Machine Learning Stack / PipelineJan Wiegelmann
 
Unafraid of Change: Optimizing ETL, ML, and AI in Fast-Paced Environments wit...
Unafraid of Change: Optimizing ETL, ML, and AI in Fast-Paced Environments wit...Unafraid of Change: Optimizing ETL, ML, and AI in Fast-Paced Environments wit...
Unafraid of Change: Optimizing ETL, ML, and AI in Fast-Paced Environments wit...Databricks
 

Similar to TensorFlow.Data 및 TensorFlow Hub (20)

tf.data: TensorFlow Input Pipeline
tf.data: TensorFlow Input Pipelinetf.data: TensorFlow Input Pipeline
tf.data: TensorFlow Input Pipeline
 
Certification Study Group -Professional ML Engineer Session 2 (GCP-TensorFlow...
Certification Study Group -Professional ML Engineer Session 2 (GCP-TensorFlow...Certification Study Group -Professional ML Engineer Session 2 (GCP-TensorFlow...
Certification Study Group -Professional ML Engineer Session 2 (GCP-TensorFlow...
 
Anirudh Koul. 30 Golden Rules of Deep Learning Performance
Anirudh Koul. 30 Golden Rules of Deep Learning PerformanceAnirudh Koul. 30 Golden Rules of Deep Learning Performance
Anirudh Koul. 30 Golden Rules of Deep Learning Performance
 
What's new in Python 3.11
What's new in Python 3.11What's new in Python 3.11
What's new in Python 3.11
 
Hands-on Learning with KubeFlow + Keras/TensorFlow 2.0 + TF Extended (TFX) + ...
Hands-on Learning with KubeFlow + Keras/TensorFlow 2.0 + TF Extended (TFX) + ...Hands-on Learning with KubeFlow + Keras/TensorFlow 2.0 + TF Extended (TFX) + ...
Hands-on Learning with KubeFlow + Keras/TensorFlow 2.0 + TF Extended (TFX) + ...
 
Real Time Streaming Data with Kafka and TensorFlow (Yong Tang, MobileIron) Ka...
Real Time Streaming Data with Kafka and TensorFlow (Yong Tang, MobileIron) Ka...Real Time Streaming Data with Kafka and TensorFlow (Yong Tang, MobileIron) Ka...
Real Time Streaming Data with Kafka and TensorFlow (Yong Tang, MobileIron) Ka...
 
What's new with Apache Spark's Structured Streaming?
What's new with Apache Spark's Structured Streaming?What's new with Apache Spark's Structured Streaming?
What's new with Apache Spark's Structured Streaming?
 
CSI conference PPT on Performance Analysis of Map/Reduce to compute the frequ...
CSI conference PPT on Performance Analysis of Map/Reduce to compute the frequ...CSI conference PPT on Performance Analysis of Map/Reduce to compute the frequ...
CSI conference PPT on Performance Analysis of Map/Reduce to compute the frequ...
 
No more struggles with Apache Spark workloads in production
No more struggles with Apache Spark workloads in productionNo more struggles with Apache Spark workloads in production
No more struggles with Apache Spark workloads in production
 
Monitoring InfluxEnterprise
Monitoring InfluxEnterpriseMonitoring InfluxEnterprise
Monitoring InfluxEnterprise
 
Stat Design3 18 09
Stat Design3 18 09Stat Design3 18 09
Stat Design3 18 09
 
r,rstats,r language,r packages
r,rstats,r language,r packagesr,rstats,r language,r packages
r,rstats,r language,r packages
 
Group analyses with FieldTrip
Group analyses with FieldTripGroup analyses with FieldTrip
Group analyses with FieldTrip
 
Machine Learning Game Changer for IT - Maartens Lourens
Machine Learning Game Changer for IT - Maartens LourensMachine Learning Game Changer for IT - Maartens Lourens
Machine Learning Game Changer for IT - Maartens Lourens
 
Metadata and Provenance for ML Pipelines with Hopsworks
Metadata and Provenance for ML Pipelines with Hopsworks Metadata and Provenance for ML Pipelines with Hopsworks
Metadata and Provenance for ML Pipelines with Hopsworks
 
A Tale of Three Deep Learning Frameworks: TensorFlow, Keras, & PyTorch with B...
A Tale of Three Deep Learning Frameworks: TensorFlow, Keras, & PyTorch with B...A Tale of Three Deep Learning Frameworks: TensorFlow, Keras, & PyTorch with B...
A Tale of Three Deep Learning Frameworks: TensorFlow, Keras, & PyTorch with B...
 
Pumps, Compressors and Turbine Fault Frequency Analysis
Pumps, Compressors and Turbine Fault Frequency AnalysisPumps, Compressors and Turbine Fault Frequency Analysis
Pumps, Compressors and Turbine Fault Frequency Analysis
 
Pumps, Compressors and Turbine Fault Frequency Analysis
Pumps, Compressors and Turbine Fault Frequency AnalysisPumps, Compressors and Turbine Fault Frequency Analysis
Pumps, Compressors and Turbine Fault Frequency Analysis
 
Flux - Open Machine Learning Stack / Pipeline
Flux - Open Machine Learning Stack / PipelineFlux - Open Machine Learning Stack / Pipeline
Flux - Open Machine Learning Stack / Pipeline
 
Unafraid of Change: Optimizing ETL, ML, and AI in Fast-Paced Environments wit...
Unafraid of Change: Optimizing ETL, ML, and AI in Fast-Paced Environments wit...Unafraid of Change: Optimizing ETL, ML, and AI in Fast-Paced Environments wit...
Unafraid of Change: Optimizing ETL, ML, and AI in Fast-Paced Environments wit...
 

More from Jeongkyu Shin

Boosting machine learning workflow with TensorFlow 2.0
Boosting machine learning workflow with TensorFlow 2.0Boosting machine learning workflow with TensorFlow 2.0
Boosting machine learning workflow with TensorFlow 2.0Jeongkyu Shin
 
Machine Learning in Google I/O 19
Machine Learning in Google I/O 19Machine Learning in Google I/O 19
Machine Learning in Google I/O 19Jeongkyu Shin
 
머신러닝 및 데이터 과학 연구자를 위한 python 기반 컨테이너 분산처리 플랫폼 설계 및 개발
머신러닝 및 데이터 과학 연구자를 위한 python 기반 컨테이너 분산처리 플랫폼 설계 및 개발머신러닝 및 데이터 과학 연구자를 위한 python 기반 컨테이너 분산처리 플랫폼 설계 및 개발
머신러닝 및 데이터 과학 연구자를 위한 python 기반 컨테이너 분산처리 플랫폼 설계 및 개발Jeongkyu Shin
 
TensorFlow 2: New Era of Developing Deep Learning Models
TensorFlow 2: New Era of Developing Deep Learning ModelsTensorFlow 2: New Era of Developing Deep Learning Models
TensorFlow 2: New Era of Developing Deep Learning ModelsJeongkyu Shin
 
Machine Learning Model Serving with Backend.AI
Machine Learning Model Serving with Backend.AIMachine Learning Model Serving with Backend.AI
Machine Learning Model Serving with Backend.AIJeongkyu Shin
 
그렇게 커미터가 된다: Python을 통해 오픈소스 생태계 가르치기
그렇게 커미터가 된다: Python을 통해 오픈소스 생태계 가르치기그렇게 커미터가 된다: Python을 통해 오픈소스 생태계 가르치기
그렇게 커미터가 된다: Python을 통해 오픈소스 생태계 가르치기Jeongkyu Shin
 
오픈소스 라이선스를 둘러싼 소송들
오픈소스 라이선스를 둘러싼 소송들오픈소스 라이선스를 둘러싼 소송들
오픈소스 라이선스를 둘러싼 소송들Jeongkyu Shin
 
Backend.AI: 오픈소스 머신러닝 인프라 프레임워크
Backend.AI: 오픈소스 머신러닝 인프라 프레임워크Backend.AI: 오픈소스 머신러닝 인프라 프레임워크
Backend.AI: 오픈소스 머신러닝 인프라 프레임워크Jeongkyu Shin
 
모바일 개발자를 위한 ML Kit: Machine Learning SDK 소개
모바일 개발자를 위한 ML Kit: Machine Learning SDK 소개모바일 개발자를 위한 ML Kit: Machine Learning SDK 소개
모바일 개발자를 위한 ML Kit: Machine Learning SDK 소개Jeongkyu Shin
 
회색지대: 이상과 현실 - 오픈소스 저작권
회색지대: 이상과 현실 - 오픈소스 저작권회색지대: 이상과 현실 - 오픈소스 저작권
회색지대: 이상과 현실 - 오픈소스 저작권Jeongkyu Shin
 
Google Polymer in Action
Google Polymer in ActionGoogle Polymer in Action
Google Polymer in ActionJeongkyu Shin
 
구글의 머신러닝 비전: TPU부터 모바일까지 (Google I/O Extended Seoul 2017)
구글의 머신러닝 비전: TPU부터 모바일까지 (Google I/O Extended Seoul 2017)구글의 머신러닝 비전: TPU부터 모바일까지 (Google I/O Extended Seoul 2017)
구글의 머신러닝 비전: TPU부터 모바일까지 (Google I/O Extended Seoul 2017)Jeongkyu Shin
 
기술 관심 갖기: 스타트업 기술 101 (Interested in Tech?: Startup Technology 101)
기술 관심 갖기: 스타트업 기술 101 (Interested in Tech?: Startup Technology 101)기술 관심 갖기: 스타트업 기술 101 (Interested in Tech?: Startup Technology 101)
기술 관심 갖기: 스타트업 기술 101 (Interested in Tech?: Startup Technology 101)Jeongkyu Shin
 
OSS SW Basics Lecture 14: Open source hardware
OSS SW Basics Lecture 14: Open source hardwareOSS SW Basics Lecture 14: Open source hardware
OSS SW Basics Lecture 14: Open source hardwareJeongkyu Shin
 
OSS SW Basics Lecture 12: Open source in research fields
OSS SW Basics Lecture 12: Open source in research fieldsOSS SW Basics Lecture 12: Open source in research fields
OSS SW Basics Lecture 12: Open source in research fieldsJeongkyu Shin
 
OSS SW Basics Lecture 10: Setting up term project
OSS SW Basics Lecture 10: Setting up term projectOSS SW Basics Lecture 10: Setting up term project
OSS SW Basics Lecture 10: Setting up term projectJeongkyu Shin
 
OSS SW Basics Lecture 09: Communications in open-source developments
OSS SW Basics Lecture 09: Communications in open-source developmentsOSS SW Basics Lecture 09: Communications in open-source developments
OSS SW Basics Lecture 09: Communications in open-source developmentsJeongkyu Shin
 
OSS SW Basics Lecture 08: Software Configuration Management (2)
OSS SW Basics Lecture 08: Software Configuration Management (2)OSS SW Basics Lecture 08: Software Configuration Management (2)
OSS SW Basics Lecture 08: Software Configuration Management (2)Jeongkyu Shin
 
OSS SW Basics Lecture 06: Software Configuration Management
OSS SW Basics Lecture 06: Software Configuration ManagementOSS SW Basics Lecture 06: Software Configuration Management
OSS SW Basics Lecture 06: Software Configuration ManagementJeongkyu Shin
 
OSS SW Basics Lecture 04: OSS Licenses and documentation
OSS SW Basics Lecture 04: OSS Licenses and documentationOSS SW Basics Lecture 04: OSS Licenses and documentation
OSS SW Basics Lecture 04: OSS Licenses and documentationJeongkyu Shin
 

More from Jeongkyu Shin (20)

Boosting machine learning workflow with TensorFlow 2.0
Boosting machine learning workflow with TensorFlow 2.0Boosting machine learning workflow with TensorFlow 2.0
Boosting machine learning workflow with TensorFlow 2.0
 
Machine Learning in Google I/O 19
Machine Learning in Google I/O 19Machine Learning in Google I/O 19
Machine Learning in Google I/O 19
 
머신러닝 및 데이터 과학 연구자를 위한 python 기반 컨테이너 분산처리 플랫폼 설계 및 개발
머신러닝 및 데이터 과학 연구자를 위한 python 기반 컨테이너 분산처리 플랫폼 설계 및 개발머신러닝 및 데이터 과학 연구자를 위한 python 기반 컨테이너 분산처리 플랫폼 설계 및 개발
머신러닝 및 데이터 과학 연구자를 위한 python 기반 컨테이너 분산처리 플랫폼 설계 및 개발
 
TensorFlow 2: New Era of Developing Deep Learning Models
TensorFlow 2: New Era of Developing Deep Learning ModelsTensorFlow 2: New Era of Developing Deep Learning Models
TensorFlow 2: New Era of Developing Deep Learning Models
 
Machine Learning Model Serving with Backend.AI
Machine Learning Model Serving with Backend.AIMachine Learning Model Serving with Backend.AI
Machine Learning Model Serving with Backend.AI
 
그렇게 커미터가 된다: Python을 통해 오픈소스 생태계 가르치기
그렇게 커미터가 된다: Python을 통해 오픈소스 생태계 가르치기그렇게 커미터가 된다: Python을 통해 오픈소스 생태계 가르치기
그렇게 커미터가 된다: Python을 통해 오픈소스 생태계 가르치기
 
오픈소스 라이선스를 둘러싼 소송들
오픈소스 라이선스를 둘러싼 소송들오픈소스 라이선스를 둘러싼 소송들
오픈소스 라이선스를 둘러싼 소송들
 
Backend.AI: 오픈소스 머신러닝 인프라 프레임워크
Backend.AI: 오픈소스 머신러닝 인프라 프레임워크Backend.AI: 오픈소스 머신러닝 인프라 프레임워크
Backend.AI: 오픈소스 머신러닝 인프라 프레임워크
 
모바일 개발자를 위한 ML Kit: Machine Learning SDK 소개
모바일 개발자를 위한 ML Kit: Machine Learning SDK 소개모바일 개발자를 위한 ML Kit: Machine Learning SDK 소개
모바일 개발자를 위한 ML Kit: Machine Learning SDK 소개
 
회색지대: 이상과 현실 - 오픈소스 저작권
회색지대: 이상과 현실 - 오픈소스 저작권회색지대: 이상과 현실 - 오픈소스 저작권
회색지대: 이상과 현실 - 오픈소스 저작권
 
Google Polymer in Action
Google Polymer in ActionGoogle Polymer in Action
Google Polymer in Action
 
구글의 머신러닝 비전: TPU부터 모바일까지 (Google I/O Extended Seoul 2017)
구글의 머신러닝 비전: TPU부터 모바일까지 (Google I/O Extended Seoul 2017)구글의 머신러닝 비전: TPU부터 모바일까지 (Google I/O Extended Seoul 2017)
구글의 머신러닝 비전: TPU부터 모바일까지 (Google I/O Extended Seoul 2017)
 
기술 관심 갖기: 스타트업 기술 101 (Interested in Tech?: Startup Technology 101)
기술 관심 갖기: 스타트업 기술 101 (Interested in Tech?: Startup Technology 101)기술 관심 갖기: 스타트업 기술 101 (Interested in Tech?: Startup Technology 101)
기술 관심 갖기: 스타트업 기술 101 (Interested in Tech?: Startup Technology 101)
 
OSS SW Basics Lecture 14: Open source hardware
OSS SW Basics Lecture 14: Open source hardwareOSS SW Basics Lecture 14: Open source hardware
OSS SW Basics Lecture 14: Open source hardware
 
OSS SW Basics Lecture 12: Open source in research fields
OSS SW Basics Lecture 12: Open source in research fieldsOSS SW Basics Lecture 12: Open source in research fields
OSS SW Basics Lecture 12: Open source in research fields
 
OSS SW Basics Lecture 10: Setting up term project
OSS SW Basics Lecture 10: Setting up term projectOSS SW Basics Lecture 10: Setting up term project
OSS SW Basics Lecture 10: Setting up term project
 
OSS SW Basics Lecture 09: Communications in open-source developments
OSS SW Basics Lecture 09: Communications in open-source developmentsOSS SW Basics Lecture 09: Communications in open-source developments
OSS SW Basics Lecture 09: Communications in open-source developments
 
OSS SW Basics Lecture 08: Software Configuration Management (2)
OSS SW Basics Lecture 08: Software Configuration Management (2)OSS SW Basics Lecture 08: Software Configuration Management (2)
OSS SW Basics Lecture 08: Software Configuration Management (2)
 
OSS SW Basics Lecture 06: Software Configuration Management
OSS SW Basics Lecture 06: Software Configuration ManagementOSS SW Basics Lecture 06: Software Configuration Management
OSS SW Basics Lecture 06: Software Configuration Management
 
OSS SW Basics Lecture 04: OSS Licenses and documentation
OSS SW Basics Lecture 04: OSS Licenses and documentationOSS SW Basics Lecture 04: OSS Licenses and documentation
OSS SW Basics Lecture 04: OSS Licenses and documentation
 

Recently uploaded

Manual 508 Accessibility Compliance Audit
Manual 508 Accessibility Compliance AuditManual 508 Accessibility Compliance Audit
Manual 508 Accessibility Compliance AuditSkynet Technologies
 
Emixa Mendix Meetup 11 April 2024 about Mendix Native development
Emixa Mendix Meetup 11 April 2024 about Mendix Native developmentEmixa Mendix Meetup 11 April 2024 about Mendix Native development
Emixa Mendix Meetup 11 April 2024 about Mendix Native developmentPim van der Noll
 
Rise of the Machines: Known As Drones...
Rise of the Machines: Known As Drones...Rise of the Machines: Known As Drones...
Rise of the Machines: Known As Drones...Rick Flair
 
How to Effectively Monitor SD-WAN and SASE Environments with ThousandEyes
How to Effectively Monitor SD-WAN and SASE Environments with ThousandEyesHow to Effectively Monitor SD-WAN and SASE Environments with ThousandEyes
How to Effectively Monitor SD-WAN and SASE Environments with ThousandEyesThousandEyes
 
How to write a Business Continuity Plan
How to write a Business Continuity PlanHow to write a Business Continuity Plan
How to write a Business Continuity PlanDatabarracks
 
Take control of your SAP testing with UiPath Test Suite
Take control of your SAP testing with UiPath Test SuiteTake control of your SAP testing with UiPath Test Suite
Take control of your SAP testing with UiPath Test SuiteDianaGray10
 
2024 April Patch Tuesday
2024 April Patch Tuesday2024 April Patch Tuesday
2024 April Patch TuesdayIvanti
 
A Deep Dive on Passkeys: FIDO Paris Seminar.pptx
A Deep Dive on Passkeys: FIDO Paris Seminar.pptxA Deep Dive on Passkeys: FIDO Paris Seminar.pptx
A Deep Dive on Passkeys: FIDO Paris Seminar.pptxLoriGlavin3
 
Unleashing Real-time Insights with ClickHouse_ Navigating the Landscape in 20...
Unleashing Real-time Insights with ClickHouse_ Navigating the Landscape in 20...Unleashing Real-time Insights with ClickHouse_ Navigating the Landscape in 20...
Unleashing Real-time Insights with ClickHouse_ Navigating the Landscape in 20...Alkin Tezuysal
 
The Ultimate Guide to Choosing WordPress Pros and Cons
The Ultimate Guide to Choosing WordPress Pros and ConsThe Ultimate Guide to Choosing WordPress Pros and Cons
The Ultimate Guide to Choosing WordPress Pros and ConsPixlogix Infotech
 
[Webinar] SpiraTest - Setting New Standards in Quality Assurance
[Webinar] SpiraTest - Setting New Standards in Quality Assurance[Webinar] SpiraTest - Setting New Standards in Quality Assurance
[Webinar] SpiraTest - Setting New Standards in Quality AssuranceInflectra
 
Time Series Foundation Models - current state and future directions
Time Series Foundation Models - current state and future directionsTime Series Foundation Models - current state and future directions
Time Series Foundation Models - current state and future directionsNathaniel Shimoni
 
The Fit for Passkeys for Employee and Consumer Sign-ins: FIDO Paris Seminar.pptx
The Fit for Passkeys for Employee and Consumer Sign-ins: FIDO Paris Seminar.pptxThe Fit for Passkeys for Employee and Consumer Sign-ins: FIDO Paris Seminar.pptx
The Fit for Passkeys for Employee and Consumer Sign-ins: FIDO Paris Seminar.pptxLoriGlavin3
 
The Future Roadmap for the Composable Data Stack - Wes McKinney - Data Counci...
The Future Roadmap for the Composable Data Stack - Wes McKinney - Data Counci...The Future Roadmap for the Composable Data Stack - Wes McKinney - Data Counci...
The Future Roadmap for the Composable Data Stack - Wes McKinney - Data Counci...Wes McKinney
 
Testing tools and AI - ideas what to try with some tool examples
Testing tools and AI - ideas what to try with some tool examplesTesting tools and AI - ideas what to try with some tool examples
Testing tools and AI - ideas what to try with some tool examplesKari Kakkonen
 
Modern Roaming for Notes and Nomad – Cheaper Faster Better Stronger
Modern Roaming for Notes and Nomad – Cheaper Faster Better StrongerModern Roaming for Notes and Nomad – Cheaper Faster Better Stronger
Modern Roaming for Notes and Nomad – Cheaper Faster Better Strongerpanagenda
 
How AI, OpenAI, and ChatGPT impact business and software.
How AI, OpenAI, and ChatGPT impact business and software.How AI, OpenAI, and ChatGPT impact business and software.
How AI, OpenAI, and ChatGPT impact business and software.Curtis Poe
 
Use of FIDO in the Payments and Identity Landscape: FIDO Paris Seminar.pptx
Use of FIDO in the Payments and Identity Landscape: FIDO Paris Seminar.pptxUse of FIDO in the Payments and Identity Landscape: FIDO Paris Seminar.pptx
Use of FIDO in the Payments and Identity Landscape: FIDO Paris Seminar.pptxLoriGlavin3
 
Why device, WIFI, and ISP insights are crucial to supporting remote Microsoft...
Why device, WIFI, and ISP insights are crucial to supporting remote Microsoft...Why device, WIFI, and ISP insights are crucial to supporting remote Microsoft...
Why device, WIFI, and ISP insights are crucial to supporting remote Microsoft...panagenda
 
UiPath Community: Communication Mining from Zero to Hero
UiPath Community: Communication Mining from Zero to HeroUiPath Community: Communication Mining from Zero to Hero
UiPath Community: Communication Mining from Zero to HeroUiPathCommunity
 

Recently uploaded (20)

Manual 508 Accessibility Compliance Audit
Manual 508 Accessibility Compliance AuditManual 508 Accessibility Compliance Audit
Manual 508 Accessibility Compliance Audit
 
Emixa Mendix Meetup 11 April 2024 about Mendix Native development
Emixa Mendix Meetup 11 April 2024 about Mendix Native developmentEmixa Mendix Meetup 11 April 2024 about Mendix Native development
Emixa Mendix Meetup 11 April 2024 about Mendix Native development
 
Rise of the Machines: Known As Drones...
Rise of the Machines: Known As Drones...Rise of the Machines: Known As Drones...
Rise of the Machines: Known As Drones...
 
How to Effectively Monitor SD-WAN and SASE Environments with ThousandEyes
How to Effectively Monitor SD-WAN and SASE Environments with ThousandEyesHow to Effectively Monitor SD-WAN and SASE Environments with ThousandEyes
How to Effectively Monitor SD-WAN and SASE Environments with ThousandEyes
 
How to write a Business Continuity Plan
How to write a Business Continuity PlanHow to write a Business Continuity Plan
How to write a Business Continuity Plan
 
Take control of your SAP testing with UiPath Test Suite
Take control of your SAP testing with UiPath Test SuiteTake control of your SAP testing with UiPath Test Suite
Take control of your SAP testing with UiPath Test Suite
 
2024 April Patch Tuesday
2024 April Patch Tuesday2024 April Patch Tuesday
2024 April Patch Tuesday
 
A Deep Dive on Passkeys: FIDO Paris Seminar.pptx
A Deep Dive on Passkeys: FIDO Paris Seminar.pptxA Deep Dive on Passkeys: FIDO Paris Seminar.pptx
A Deep Dive on Passkeys: FIDO Paris Seminar.pptx
 
Unleashing Real-time Insights with ClickHouse_ Navigating the Landscape in 20...
Unleashing Real-time Insights with ClickHouse_ Navigating the Landscape in 20...Unleashing Real-time Insights with ClickHouse_ Navigating the Landscape in 20...
Unleashing Real-time Insights with ClickHouse_ Navigating the Landscape in 20...
 
The Ultimate Guide to Choosing WordPress Pros and Cons
The Ultimate Guide to Choosing WordPress Pros and ConsThe Ultimate Guide to Choosing WordPress Pros and Cons
The Ultimate Guide to Choosing WordPress Pros and Cons
 
[Webinar] SpiraTest - Setting New Standards in Quality Assurance
[Webinar] SpiraTest - Setting New Standards in Quality Assurance[Webinar] SpiraTest - Setting New Standards in Quality Assurance
[Webinar] SpiraTest - Setting New Standards in Quality Assurance
 
Time Series Foundation Models - current state and future directions
Time Series Foundation Models - current state and future directionsTime Series Foundation Models - current state and future directions
Time Series Foundation Models - current state and future directions
 
The Fit for Passkeys for Employee and Consumer Sign-ins: FIDO Paris Seminar.pptx
The Fit for Passkeys for Employee and Consumer Sign-ins: FIDO Paris Seminar.pptxThe Fit for Passkeys for Employee and Consumer Sign-ins: FIDO Paris Seminar.pptx
The Fit for Passkeys for Employee and Consumer Sign-ins: FIDO Paris Seminar.pptx
 
The Future Roadmap for the Composable Data Stack - Wes McKinney - Data Counci...
The Future Roadmap for the Composable Data Stack - Wes McKinney - Data Counci...The Future Roadmap for the Composable Data Stack - Wes McKinney - Data Counci...
The Future Roadmap for the Composable Data Stack - Wes McKinney - Data Counci...
 
Testing tools and AI - ideas what to try with some tool examples
Testing tools and AI - ideas what to try with some tool examplesTesting tools and AI - ideas what to try with some tool examples
Testing tools and AI - ideas what to try with some tool examples
 
Modern Roaming for Notes and Nomad – Cheaper Faster Better Stronger
Modern Roaming for Notes and Nomad – Cheaper Faster Better StrongerModern Roaming for Notes and Nomad – Cheaper Faster Better Stronger
Modern Roaming for Notes and Nomad – Cheaper Faster Better Stronger
 
How AI, OpenAI, and ChatGPT impact business and software.
How AI, OpenAI, and ChatGPT impact business and software.How AI, OpenAI, and ChatGPT impact business and software.
How AI, OpenAI, and ChatGPT impact business and software.
 
Use of FIDO in the Payments and Identity Landscape: FIDO Paris Seminar.pptx
Use of FIDO in the Payments and Identity Landscape: FIDO Paris Seminar.pptxUse of FIDO in the Payments and Identity Landscape: FIDO Paris Seminar.pptx
Use of FIDO in the Payments and Identity Landscape: FIDO Paris Seminar.pptx
 
Why device, WIFI, and ISP insights are crucial to supporting remote Microsoft...
Why device, WIFI, and ISP insights are crucial to supporting remote Microsoft...Why device, WIFI, and ISP insights are crucial to supporting remote Microsoft...
Why device, WIFI, and ISP insights are crucial to supporting remote Microsoft...
 
UiPath Community: Communication Mining from Zero to Hero
UiPath Community: Communication Mining from Zero to HeroUiPath Community: Communication Mining from Zero to Hero
UiPath Community: Communication Mining from Zero to Hero
 

TensorFlow.Data 및 TensorFlow Hub

  • 1. . 1 1 2 1 0/ .
  • 2.
  • 3.
  • 4.
  • 5.
  • 6.
  • 7.
  • 8.
  • 9. . .
  • 11. § G
  • 12.
  • 14. files = tf.data.Dataset.list_files(file_pattern) dataset = tf.data.TFRecordDataset(files) dataset = dataset.shuffle(10000) dataset = dataset.repeat(NUM_EPOCHS) dataset = dataset.map(lambda x: tf.parse_single_example(x, features)) dataset = dataset.batch(BATCH_SIZE) iterator = dataset.make_one_shot_iterator() features = iterator.get_next() E T L
  • 15.
  • 16. § tf.data0 : 1 § 38 , / . . . / . / . § C 0 G tf.contrib.data.prefetch_to_device() .
  • 17. files = tf.data.Dataset.list_files(file_pattern) dataset = tf.data.TFRecordDataset(files) dataset = dataset.shuffle(10000) dataset = dataset.repeat(NUM_EPOCHS) dataset = dataset.map(lambda x: tf.parse_single_example(x, features)) dataset = dataset.batch(BATCH_SIZE) iterator = dataset.make_one_shot_iterator() features = iterator.get_next()
  • 18. files = tf.data.Dataset.list_files(file_pattern) dataset = tf.data.TFRecordDataset(files, num_parallel_reads=32) dataset = dataset.shuffle(10000) dataset = dataset.repeat(NUM_EPOCHS) dataset = dataset.map(lambda x: tf.parse_single_example(x, features)) dataset = dataset.batch(BATCH_SIZE) iterator = dataset.make_one_shot_iterator() features = iterator.get_next()
  • 19. files = tf.data.Dataset.list_files(file_pattern) dataset = tf.data.TFRecordDataset(files, num_parallel_reads=32) dataset = dataset.apply( tf.contrib.data.shuffle_and_repeat(10000, NUM_EPOCHS)) dataset = dataset.apply( tf.contrib.data.map_and_batch(lambda x: ..., BATCH_SIZE)) iterator = dataset.make_one_shot_iterator() features = iterator.get_next()
  • 20. files = tf.data.Dataset.list_files(file_pattern) dataset = tf.data.TFRecordDataset(files, num_parallel_reads=32) dataset = dataset.apply( tf.contrib.data.shuffle_and_repeat(10000, NUM_EPOCHS)) dataset = dataset.apply( tf.contrib.data.map_and_batch(lambda x: ..., BATCH_SIZE)) dataset = dataset.apply(tf.contrib.data.prefetch_to_device("/gpu:0")) iterator = dataset.make_one_shot_iterator() features = iterator.get_next()
  • 21. files = tf.data.Dataset.list_files(file_pattern) dataset = tf.data.TFRecordDataset(files, num_parallel_reads=32) dataset = dataset.apply( tf.contrib.data.shuffle_and_repeat(10000, NUM_EPOCHS)) dataset = dataset.apply( tf.contrib.data.map_and_batch(lambda x: ..., BATCH_SIZE)) dataset = dataset.apply(tf.contrib.data.prefetch_to_device("/gpu:0")) iterator = dataset.make_one_shot_iterator() features = iterator.get_next() = C = = ! ==: B = = ? / .
  • 22.
  • 24. § tf.SparseTensor § Dataset.from_generator() + Python § DatasetOpKernel
  • 25.
  • 26. § ( .8 S 1 8 E K § 8 . . C K § ( 8 C ).8 P ).8 !
  • 27. tf.enable_eager_execution() files = tf.data.Dataset.list_files(file_pattern) dataset = tf.data.TFRecordDataset(files) dataset = dataset.shuffle(10000) dataset = dataset.repeat(NUM_EPOCHS) dataset = dataset.map(lambda x: tf.parse_single_example(x, features)) dataset = dataset.batch(BATCH_SIZE) # Eager execution makes dataset a normal Python iterable. for batch in dataset: train_model(batch)
  • 28. § ( .8 S 1 8 E K § 8 . . C K § ( 8 C ).8 P ).8 !
  • 29. tf.enable_eager_execution() files = tf.data.Dataset.list_files(file_pattern) dataset = tf.data.TFRecordDataset(files) dataset = dataset.shuffle(10000) dataset = dataset.repeat(NUM_EPOCHS) dataset = dataset.map(lambda x: tf.parse_single_example(x, features)) dataset = dataset.batch(BATCH_SIZE) for batch in dataset: train_model(batch)
  • 30. tf.enable_eager_execution() # Also implements best practices for high performance! # (See optional args for details.) dataset = tf.contrib.data.make_batched_features_dataset( file_pattern, BATCH_SIZE, features, num_epochs=NUM_EPOCHS) for batch in dataset: train_model(batch)
  • 31. tf.enable_eager_execution() # In a terminal, run the following commands, e.g.: # $ pip install kaggle # $ kaggle datasets download -d therohk/million-headlines -p . dataset = tf.contrib.data.make_csv_dataset( "*.csv", BATCH_SIZE, num_epochs=NUM_EPOCHS) for batch in dataset: train_model(batch["publish_date"], batch["headline_text"])
  • 32. § ( .8 S 1 8 E K § 8 . . C K § ( 8 C ).8 P ).8 !
  • 33. dataset = tf.contrib.data.make_csv_dataset( "*.csv", BATCH_SIZE, num_epochs=NUM_EPOCHS) for batch in dataset: train_model(batch)
  • 34. # Wrap the dataset in an input function, and return it directly. def input_fn(): dataset = tf.contrib.data.make_csv_dataset( "*.csv", BATCH_SIZE, num_epochs=NUM_EPOCHS) return dataset # Train an Estimator on the dataset. tf.estimator.Estimator(model_fn=train_model).train(input_fn=input_fn)
  • 35. § G
  • 37. § . // . / / § / . / / . / .
  • 39. . 2 ) , 1. 3 2 3 3 . ( 1 1
  • 40. def input_fn(batch_size): files = tf.data.Dataset.list_files(FLAGS.data_dir) dataset = tf.data.TFRecordDataset(files) dataset = dataset.shuffle(2048) # Sliding window of 2048 records dataset = dataset.repeat(NUM_EPOCHS) dataset = dataset.map(parser_fn).batch(batch_size) return dataset 0- E 4. 5,231 03 ( )
  • 42. saeta@saeta:~$ capture_tpu_profile --tpu_name=saeta --logdir=myprofile/ --duration_ms=10000 Welcome to the Cloud TPU Profiler v1.5.1 Starting to profile TPU traces for 10000 ms. Remaining attempt(s): 3 Limiting the number of trace events to 1000000 2018-03-21 01:13:12.350004: I tensorflow/contrib/tpu/profiler/dump_tpu_profile.cc:155] Converting trace events to TraceViewer JSON. 2018-03-21 01:13:12.392162: I tensorflow/contrib/tpu/profiler/dump_tpu_profile.cc:69] Dumped raw-proto trace data to profiles/5/plugins/profile/2018-03-21_01:13:12/tr ace Trace contains 998114 events. Dumped JSON trace data to myprofile/plugins/profile/2018-03-21_01:13:12/trace.json.gz Dumped json op profile data to myprofile/plugins/profile/2018-03-21_01:13:12/op_profile.json Dumped tool data for input_pipeline.json to myprofile/plugins/profile/2018-03-21_01:13:12/input_pipeline.json Dumped tool data for overview_page.json to myprofile/plugins/profile/2018-03-21_01:13:12/overview_page.json NOTE: using the trace duration 10000ms. Set an appropriate duration (with --duration_ms) if you don't see a full step in your trace or the captured trace is too large. saeta@saeta:~$ tensorboard --logdir=myprofile/ TensorBoard 1.6.0 at <redacted> (Press CTRL+C to quit) /: // # / : # : /: : -/ /: // - .# /#- . - - . - :/ : -/
  • 43. def input_fn(batch_size): files = tf.data.Dataset.list_files(FLAGS.data_dir) dataset = tf.data.TFRecordDataset(files) dataset = dataset.shuffle(2048) # Sliding window of 2048 records dataset = dataset.repeat(NUM_EPOCHS) dataset = dataset.map(parser_fn, num_parallel_calls=64) dataset = dataset.batch(batch_size) return dataset - 0 0 !
  • 45.
  • 46.
  • 50. 0 0 0 % 5 7 % 5 5 % 0 5 0 0 5 7 7 % 7 0 1 31 C 4 6 2 2 2
  • 51. def input_fn(batch_size): files = tf.data.Dataset.list_files(FLAGS.data_dir) dataset = tf.data.TFRecordDataset(files, num_parallel_reads=32) dataset = dataset.shuffle(10000) dataset = dataset.repeat(NUM_EPOCHS) dataset = dataset.map(parser_fn, num_parallel_calls=64) dataset = dataset.batch(batch_size) dataset = dataset.prefetch(2) return dataset . . T bi n D c i Fd_ e a lfR D
  • 53. def input_fn(batch_size): files = tf.data.Dataset.list_files(FLAGS.data_dir) dataset = tf.data.TFRecordDataset(files, num_parallel_reads=32) dataset = dataset.shuffle(10000) dataset = dataset.repeat(NUM_EPOCHS) dataset = dataset.map(parser_fn, num_parallel_calls=64) dataset = dataset.batch(batch_size) dataset = dataset.prefetch(2) return dataset !
  • 55. 6 6 53 4 6 53 53 6 53 6 6 53 4 4 4 4 1 7 2
  • 56. § - § ? F ) § § ( )
  • 57. def input_fn(batch_size): files = tf.data.Dataset.list_files(FLAGS.data_dir) def tfrecord_dataset(filename): buffer_size = 8 * 1024 * 1024 # 8 MiB per file return tf.data.TFRecordDataset(filename, buffer_size=buffer_size) dataset = files.apply(tf.contrib.data.parallel_interleave( tfrecord_dataset, cycle_length=32, sloppy=True)) dataset = dataset.apply(tf.contrib.data.shuffle_and_repeat(10000, NUM_EPOCHS)) dataset = dataset.apply(tf.contrib.data.map_and_batch(parser_fn, batch_size, num_parallel_batches=4)) dataset = dataset.prefetch(4) return dataset !
  • 58.
  • 59. ) ( ) +
  • 63.
  • 65.
  • 66. , ,
  • 67.
  • 69.
  • 70. # Download and use NASNet feature vector module. module = hub.Module( "https://tfhub.dev/google/imagenet/nasnet_large/feature_vector/1") features = module(my_images) logits = tf.layers.dense(features, NUM_CLASSES) probabilities = tf.nn.softmax(logits) + !
  • 71. # Download and use NASNet feature vector module. module = hub.Module( "https://tfhub.dev/google/imagenet/nasnet_large/feature_vector/1", trainable=True, tags={“train”}) features = module(my_images) logits = tf.layers.dense(features, NUM_CLASSES) probabilities = tf.nn.softmax(logits)
  • 72. § /,2/ A , 5 § 0/,2/ A § 5 / A 3) § - A 3( 3) 3 § - A 1 / A 3) § 1 / A 3( 3) &( (&( ) § 5 / A 3(
  • 73.
  • 75. “The shallots were simply underwhelming” POSITIVE NEGATIVE
  • 76. § L § M ( , § E 2 ,, 2 ) ) ,
  • 77.
  • 78. # Use pre-trained universal sentence encoder to build text vector column. review = hub.text_embedding_column( "review", "https://tfhub.dev/google/universal-sentence-encoder/1") features = { "review": np.array(["an arugula masterpiece", "inedible shoe leather", ...]) } labels = np.array([[1], [0], ...]) input_fn = tf.estimator.input.numpy_input_fn(features, labels, shuffle=True) estimator = tf.estimator.DNNClassifier(hidden_units, [review]) estimator.train(input_fn, max_steps=100)
  • 79. # Use pre-trained universal sentence encoder to build text vector column. review = hub.text_embedding_column( "review", "https://tfhub.dev/google/universal-sentence-encoder/1", trainable=True) features = { "review": np.array(["an arugula masterpiece", "inedible shoe leather", ...]) } labels = np.array([[1], [0], ...]) input_fn = tf.estimator.input.numpy_input_fn(features, labels, shuffle=True) estimator = tf.estimator.DNNClassifier(hidden_units, [review]) estimator.train(input_fn, max_steps=100)
  • 80. # Use pre-trained universal sentence encoder to build text vector column. review = hub.text_embedding_column( "review", "https://tfhub.dev/google/universal-sentence-encoder/1") features = { "review": np.array(["an arugula masterpiece", "inedible shoe leather", ...]) } labels = np.array([[1], [0], ...]) input_fn = tf.estimator.input.numpy_input_fn(features, labels, shuffle=True) estimator = tf.estimator.DNNClassifier(hidden_units, [review]) estimator.train(input_fn, max_steps=100)
  • 81.
  • 82. § ( § ( ), , ) , , ) § A D F E