Every line of 'torch repeat' code snippets is scanned for vulnerabilities by our powerful machine learning engine that combs millions of open source libraries, ensuring your Python code is secure.
25 def repeat(*args, **kwargs): 26 return torch.repeat(*args, **kwargs)
4 def repeat(tensor, K): 5 """ 6 [B, ...] => [B*K, ...] 7 8 #-- Important --# 9 Used unsqueeze and transpose to avoid [K*B] when using torch.Tensor.repeat 10 """ 11 if isinstance(tensor, torch.Tensor): 12 B, *size = tensor.size() 13 repeat_size = [1] + [K] + [1] * (tensor.dim() - 1) 14 tensor = tensor.unsqueeze(1).repeat(*repeat_size).view(B * K, *size) 15 return tensor 16 elif isinstance(tensor, list): 17 out = [] 18 for x in tensor: 19 for _ in range(K): 20 out.append(x.copy()) 21 return out
694 @staticmethod 695 def forward(ctx, input, repeats): 696 ctx.repeats = repeats 697 ctx.input_dims = input.dim() 698 return input.repeat(repeats)
822 def repeat(self, *sizes): 823 """ 824 Repeats :attr:`self` along the specified dimensions. 825 826 Unlike expand(), this function copies the tensor’s data. 827 828 Args: 829 sizes (torch.Size or int...): The number of times to repeat this tensor along each 830 dimension 831 """ 832 raise NotImplementedError("repeat is not implemented")
1296 def repeat(g, self, repeats): 1297 if not sym_help._is_value(repeats): 1298 repeats = g.op("Constant", value_t=torch.LongTensor(repeats)) 1299 const_repeats = sym_help._maybe_get_const(repeats, 'is') 1300 1301 if self.isCompleteTensor() and not sym_help._is_value(const_repeats): 1302 sizes = self.type().sizes() 1303 diff_dims = len(const_repeats) - len(sizes) 1304 if diff_dims > 0: 1305 self = view(g, self, [1] * diff_dims + sizes) 1306 return g.op("Tile", self, repeats)