Answer the question
In order to leave comments, you need to log in
ValueError: Input 0 of layer sequential_414 is incompatible with the layer: expected axis -1 of input shape to have value 1 but received input?
Tell me, please, how to solve this problem? If necessary, I can attach a Jupyter Notebook and a folder with files (there are images).
def define_discriminator(in_shape = (106, 106, 1)):
model = Sequential()
model.add(Conv2D(64, (3,3), strides = (2,2), padding = "same", input_shape = in_shape))
model.add(LeakyReLU(alpha = 0.2))
model.add(Dropout(0.5))
model.add(Conv2D(64, (3,3), strides = (2,2), padding = "same"))
model.add(LeakyReLU(alpha = 0.2))
model.add(Dropout(0.5))
model.add(Flatten())
model.add(Dense(1, activation = "sigmoid"))
opt = Adam(learning_rate = 0.0002, beta_1 = 0.5)
model.compile(loss = "binary_crossentropy", optimizer = opt, metrics = ["accuracy"])
return model
def define_generator(latent_dim):
model = Sequential()
n_nodes = 128 * 53 * 53
model.add(Dense(n_nodes, input_dim = latent_dim))
model.add(LeakyReLU(alpha = 0.2))
model.add(Reshape((53, 53, 128)))
model.add(Dense(1024))
model.add(Conv2DTranspose(1024, (4,4), strides = (2,2), padding = "same"))
model.add(Dense(1024))
model.add(LeakyReLU(alpha = 0.2))
model.add(Dense(1024))
model.add(Conv2D(1, (7,7), padding = "same", activation = "sigmoid"))
return model
def define_gan(g_model, d_model):
d_model.trianabel = False
model = Sequential()
model.add(g_model)
model.add(d_model)
opt = Adam(learning_rate = 0.0002, beta_1 = 0.5)
model.compile(loss = "binary_crossentropy", optimizer = opt)
return model
def generate_real_samples(dataset, n_samples):
ix = randint(0, dataset.shape[0], n_samples)
X = dataset[ix].T
Y = ones((n_samples, 1)).T
return X, Y
def generate_latent_points(latent_dim, n_samples):
x_input = randn(latent_dim * n_samples)
x_input = x_input.reshape(n_samples, latent_dim)
return x_input
def generate_fake_samples(g_model, latent_dim, n_samples):
x_input = generate_latent_points(latent_dim, n_samples)
X = g_model.predict(x_input).T
Y = zeros((n_samples, 1)).T
return X, Y
import tensorflow as tf
def train(g_model, d_model, gan_model, dataset, latent_dim, n_epochs=51, n_batch=10):
bat_per_epo = int(dataset.shape[0] / n_batch)
half_batch = int(n_batch / 2)
for i in range(n_epochs):
for j in range(bat_per_epo):
X_real, y_real = generate_real_samples(dataset, half_batch)
X_fake, y_fake = generate_fake_samples(g_model, latent_dim, half_batch)
print(X_real, X_fake)
print(y_real, y_fake)
X, y = vstack((X_real, X_fake)), vstack((y_real, y_fake))
d_loss, _ = d_model.train_on_batch(X, y)
X_gan = generate_latent_points(latent_dim, n_batch)
y_gan = ones((n_batch, 1))
g_loss = gan_model.train_on_batch(X_gan, y_gan)
print('>%d, %d/%d, d=%.3f, g=%.3f' % (i+1, j+1, bat_per_epo, d_loss, g_loss))
if (i+1) % 10 == 0:
summarize_performance(i, g_model, d_model, dataset, latent_dim)
clear_output()
latent_dim = 100
d_model = define_discriminator()
g_model = define_generator(latent_dim)
gan_model = define_gan(g_model, d_model)
print(pixels.shape)
train(g_model, d_model, gan_model, np.array(pixels), latent_dim)
Код ошибки:
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-611-b3360c520333> in <module>
4 gan_model = define_gan(g_model, d_model)
5 print(pixels.shape)
----> 6 train(g_model, d_model, gan_model, np.array(pixels), latent_dim)
<ipython-input-610-d7d51b905847> in train(g_model, d_model, gan_model, dataset, latent_dim,
n_epochs, n_batch)
13 print(X.shape)
14 print(y.shape)
---> 15 d_loss, _ = d_model.train_on_batch(X, y)
16 X_gan = generate_latent_points(latent_dim, n_batch)
17 y_gan = ones((n_batch, 1))
~\anaconda3\envs\LikeProject\lib\site-packages\keras\engine\training.py in
train_on_batch(self, x,
y, sample_weight, class_weight, reset_metrics, return_dict)
1854 class_weight)
1855 self.train_function = self.make_train_function()
-> 1856 logs = self.train_function(iterator)
1857
1858 if reset_metrics:
~\anaconda3\envs\LikeProject\lib\site-packages\tensorflow\python\eager\def_function.py in
__call__(self, *args, **kwds)
883
884 with OptionalXlaContext(self._jit_compile):
--> 885 result = self._call(*args, **kwds)
886
887 new_tracing_count = self.experimental_get_tracing_count()
~\anaconda3\envs\LikeProject\lib\site-packages\tensorflow\python\eager\def_function.py in
_call(self, *args, **kwds)
931 # This is the first call of __call__, so we have to initialize.
932 initializers = []
--> 933 self._initialize(args, kwds, add_initializers_to=initializers)
934 finally:
935 # At this point we know that the initialization is complete (or less
~\anaconda3\envs\LikeProject\lib\site-packages\tensorflow\python\eager\def_function.py in
_initialize(self, args, kwds, add_initializers_to)
757 self._graph_deleter = FunctionDeleter(self._lifted_initializer_graph)
758 self._concrete_stateful_fn = (
--> 759 self._stateful_fn._get_concrete_function_internal_garbage_collected( #
pylint:
disable=protected-access
760 *args, **kwds))
761
~\anaconda3\envs\LikeProject\lib\site-packages\tensorflow\python\eager\function.py in
_get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
3064 args, kwargs = None, None
3065 with self._lock:
-> 3066 graph_function, _ = self._maybe_define_function(args, kwargs)
3067 return graph_function
3068
~\anaconda3\envs\LikeProject\lib\site-packages\tensorflow\python\eager\function.py in
_maybe_define_function(self, args, kwargs)
3461
3462 self._function_cache.missed.add(call_context_key)
-> 3463 graph_function = self._create_graph_function(args, kwargs)
3464 self._function_cache.primary[cache_key] = graph_function
3465
~\anaconda3\envs\LikeProject\lib\site-packages\tensorflow\python\eager\function.py in
_create_graph_function(self, args, kwargs, override_flat_arg_shapes)
3296 arg_names = base_arg_names + missing_arg_names
3297 graph_function = ConcreteFunction(
-> 3298 func_graph_module.func_graph_from_py_func(
3299 self._name,
3300 self._python_function,
~\anaconda3\envs\LikeProject\lib\site-packages\tensorflow\python\framework\func_graph.py in
func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph,
autograph_options, add_control_dependencies, arg_names, op_return_value, collections,
capture_by_value, override_flat_arg_shapes, acd_record_initial_resource_uses)
1005 _, original_func = tf_decorator.unwrap(python_func)
1006
-> 1007 func_outputs = python_func(*func_args, **func_kwargs)
1008
1009 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
~\anaconda3\envs\LikeProject\lib\site-packages\tensorflow\python\eager\def_function.py in
wrapped_fn(*args, **kwds)
666 # the function a weak reference to itself to avoid a reference cycle.
667 with OptionalXlaContext(compile_with_xla):
--> 668 out = weak_wrapped_fn().__wrapped__(*args, **kwds)
669 return out
670
~\anaconda3\envs\LikeProject\lib\site-packages\tensorflow\python\framework\func_graph.py in
wrapper(*args, **kwargs)
992 except Exception as e: # pylint:disable=broad-except
993 if hasattr(e, "ag_error_metadata"):
--> 994 raise e.ag_error_metadata.to_exception(e)
995 else:
996 raise
ValueError: in user code:
C:\Users\nefar\anaconda3\envs\LikeProject\lib\site-packages\keras\engine\training.py:853
train_function *
return step_function(self, iterator)
C:\Users\nefar\anaconda3\envs\LikeProject\lib\site-packages\keras\engine\training.py:842
step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
C:\Users\nefar\anaconda3\envs\LikeProject\lib\site-
packages\tensorflow\python\distribute\distribute_lib.py:1286 run
return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
C:\Users\nefar\anaconda3\envs\LikeProject\lib\site-
packages\tensorflow\python\distribute\distribute_lib.py:2849 call_for_each_replica
return self._call_for_each_replica(fn, args, kwargs)
C:\Users\nefar\anaconda3\envs\LikeProject\lib\site-
packages\tensorflow\python\distribute\distribute_lib.py:3632 _call_for_each_replica
return fn(*args, **kwargs)
C:\Users\nefar\anaconda3\envs\LikeProject\lib\site-packages\keras\engine\training.py:835
run_step **
outputs = model.train_step(data)
C:\Users\nefar\anaconda3\envs\LikeProject\lib\site-packages\keras\engine\training.py:787
train_step
y_pred = self(x, training=True)
C:\Users\nefar\anaconda3\envs\LikeProject\lib\site-
packages\keras\engine\base_layer.py:1020
__call__
input_spec.assert_input_compatibility(self.input_spec, inputs, self.name)
C:\Users\nefar\anaconda3\envs\LikeProject\lib\site-packages\keras\engine\input_spec.py:250
assert_input_compatibility
raise ValueError(
ValueError: Input 0 of layer sequential_414 is incompatible with the layer: expected axis
-1
of input shape to have value 1 but received input with shape (4, 106, 106, 5)
Answer the question
In order to leave comments, you need to log in
Didn't find what you were looking for?
Ask your questionAsk a Question
731 491 924 answers to any question