My timer wont work in unity

In my game, you can spawn a cube by clicking. After 10 seconds I want the cube to disappear. That’s what I’m trying to do in my code but it won’t work:

using System.Collections; using System.Collections.Generic; using UnityEngine; using UnityEngine.UI;  public class circleThing : MonoBehaviour {     public GameObject circle;     public GameObject block;     public float time;     void Start() {          Vector3 temp = Camera.main.ScreenToWorldPoint(Input.mousePosition);          temp.z = 5f;          transform.position = temp;          if (Input.GetMouseButtonDown(0)) {              StartCoroutine(timer());              Instantiate(block, temp, Quaternion.identity);          }          if (time >= 10) {              Destroy(block);          }     }     void Update()     {            }      IEnumerator timer(){          while (true)         {             Debug.Log(time);             yield return new WaitForSeconds(1);             time++;         }     } } 

What am I doing wrong? I get the error: Destroying assets is not permitted to avoid data loss.

Fragment shader not working as expected

I am porting the following shader into Godot. The results seem not correct though. Apart from the wrong colours there seems to be no change with time. What is the problem with it?

shader_type canvas_item;  const float DELTA = 0.00001; const float TAU = 6.28318530718; const float NOISE_TEXTURE_SIZE = 256.0; const float NOISE_TEXTURE_PIXEL_COUNT = (NOISE_TEXTURE_SIZE * NOISE_TEXTURE_SIZE);  uniform vec4 vColor: color;  // MAIN CONTROLLER UNIFORMS uniform float intensity = 1.;       // overall effect intensity, 0-1 (no upper limit) uniform float rngSeed = .0;         // seed offset (changes configuration around) uniform sampler2D noiseTexture;     // noise texture sampler  //TUNING uniform float lineSpeed = .01;          // line speed uniform float lineDrift = .1;           // horizontal line drifting uniform float lineResolution = 1.;      // line resolution uniform float lineVertShift = .0;       // wave phase offset of horizontal lines uniform float lineShift = .004;         // horizontal shift uniform float jumbleness = .2;          // amount of "block" glitchiness uniform float jumbleResolution = .2;    // resolution of blocks uniform float jumbleShift = .15;        // texture shift by blocks   uniform float jumbleSpeed = 1.;         // speed of block variation uniform float dispersion = .0025;       // color channel horizontal dispersion uniform float channelShift = .004;      // horizontal RGB shift uniform float noiseLevel = .5;          // level of noise uniform float shakiness = .5;           // horizontal shakiness //  float rand(vec2 co){     return fract(sin(dot(co.xy ,vec2(12.9898,78.233))) * 43758.5453); }  vec4 extractRed(vec4 col){     return vec4(col.r, 0., 0., col.a); }  vec4 extractGreen(vec4 col){     return vec4(0., col.g, 0., col.a); }  vec4 extractBlue(vec4 col){     return vec4(0., 0., col.b, col.a); }  // Replacement for the mirror address mode, hopefully nobody needs filtering. vec2 mirror(vec2 v) {     return abs((fract((v * 0.5) + 0.5) * 2.0) - 1.0); }  vec2 downsample(vec2 v, vec2 res) {         // Division by zero protected by uniform getters.     return floor(v * res) / res; }  // Fetches four random values from an RGBA noise texture vec4 whiteNoise(vec2 coord, vec2 texelOffset, vec2 resolution) {     vec2 offset = downsample(vec2(rngSeed * NOISE_TEXTURE_SIZE, rngSeed) + texelOffset, vec2(NOISE_TEXTURE_SIZE));     vec2 ratio = resolution / vec2(NOISE_TEXTURE_SIZE);     return texture(noiseTexture, (coord * ratio) + offset);  }  // Fetch noise texture texel based on single offset in the [0-1] range vec4 random(float dataOffset) {     vec2 halfTexelSize = vec2((0.5 / NOISE_TEXTURE_SIZE));     float offset = rngSeed + dataOffset;         return texture(noiseTexture, vec2(offset * NOISE_TEXTURE_SIZE, offset) + halfTexelSize);  }  // Jumble coord generation vec2 jumble(vec2 coord, float time, vec2 resolution){     // Static branch.     if ((jumbleShift * jumbleness * jumbleResolution) < DELTA) {         return vec2(0.0);     }              vec2 gridCoords = (coord * jumbleResolution) / (NOISE_TEXTURE_SIZE * 0.0245);     float jumbleTime = mod(floor(time * 0.02 * jumbleSpeed), NOISE_TEXTURE_PIXEL_COUNT);     vec2 offset = random(jumbleTime / NOISE_TEXTURE_PIXEL_COUNT).ga * jumbleResolution;     vec4 cellRandomValues = whiteNoise(gridCoords, vec2(jumbleResolution * -10.0) + offset, resolution);     return (cellRandomValues.ra - 0.5) * jumbleShift * floor(min(0.99999, cellRandomValues.b) + jumbleness); }  // Horizontal line offset generation float lineOffset(vec2 coord, vec2 uv, float time, vec2 resolution) {     // Static branch.     if (lineShift < DELTA) {         return 0.0;     }          // Wave offsets     vec2 waveHeights = vec2(50.0 * lineResolution, 25.0 * lineResolution);         vec4 lineRandom = whiteNoise(downsample(uv.yy, waveHeights), vec2(0.0), resolution);     float driftTime = uv.y * resolution.y * 2.778;          // XY: big waves, ZW: drift waves     vec4 waveTimes = (vec4(downsample(lineRandom.ra * TAU, waveHeights) * 80.0, driftTime + 2.0, (driftTime * 0.1) + 1.0) + (time * lineSpeed)) + (lineVertShift * TAU);     vec4 waveLineOffsets = vec4(sin(waveTimes.x), cos(waveTimes.y), sin(waveTimes.z), cos(waveTimes.w));     waveLineOffsets.xy *= ((whiteNoise(waveTimes.xy, vec2(0.0), resolution).gb - 0.5) * shakiness) + 1.0; *= lineDrift;     return dot(waveLineOffsets, vec4(1.0)); }  void fragment() {     vec3 randomValues = vec3(rand(vec2(TIME, 0.0)), rand(vec2(TIME, 0.0)), rand(vec2(TIME, 0.0)));     vec2 resolution = 1.0 / SCREEN_PIXEL_SIZE;     vec2 uv = FRAGCOORD.xy / (1.0 / SCREEN_PIXEL_SIZE).xy;          // Sample random high-frequency noise     vec4 randomHiFreq = whiteNoise(uv, randomValues.xy, resolution);          // Apply line offsets     vec2 offsetCoords = uv;     offsetCoords.x += ((((2.0 * randomValues.z) - 1.0) * shakiness * lineSpeed) + lineOffset(offsetCoords, uv, TIME, resolution)) * lineShift * intensity;          // Apply jumbles     offsetCoords += jumble(offsetCoords, TIME, resolution) * intensity * intensity * 0.25;              // Channel split     vec2 shiftFactors = (channelShift + (randomHiFreq.rg * dispersion)) * intensity;     vec4 outColour;          // Static branch.     if (((channelShift + dispersion) * intensity) < DELTA) {         outColour = texture(SCREEN_TEXTURE, mirror(offsetCoords));     } else {         outColour = extractRed(texture(SCREEN_TEXTURE, mirror(offsetCoords + vec2(shiftFactors.r, 0.0)))) + extractBlue(texture(SCREEN_TEXTURE, mirror(offsetCoords + vec2(-shiftFactors.g, 0.0)))) + extractGreen(texture(SCREEN_TEXTURE, mirror(offsetCoords)));     }          // Add noise         outColour.rgb *= (vec3(.55, .5, .4) * randomHiFreq.gab * intensity * noiseLevel) + 1.0;              // COLOR = vColor * outColour;     COLOR = outColour; } 

enter image description here

Why does Unity show dark regions on a model with animation created using Blender?

I sculpted a model in blender, and I want to import that to Unity. When I import just the model(without animation), Unity shows as is. However, when I attach armature to the body and automatically assign weights to it with the intention of creating an animation, Unity import shows dark regions all over the model. All the face normals look fine. I am not sure what the issue is.

Model in Blender: Model in Blender:

Weights: Weights

Dark patches in Unity: Dark patches in Unity

Elegantly transition from 4-grid to 6-grid and back

I find in general map creation that I prefer hex grids for natural environments, but square grids for interior and urban/constructed environments.

Is there a smooth or elegant way to transition between these that doesn’t require a scene break? Most importantly, is there an intuitive way?

As long as they don’t differ too vastly, the ratio of sizes between the two cell types is largely irrelevant to me. I’d also happily use several rows of transitional tiles if their borders can be procedurally determined. I also have no inherent preference for up-point or side-point hexes…

I’d show some things I’ve already tried, but they are genuinely all varieties of the same eldritch mess…

If player is not holding the ladder then it should not jump twice

I am using Unity2d and I am stuck with a problem. In my game I gave the player a double jump ability. I want to the player to jump twice only if they hold the ladder.

The problem is that if the player jumps through the ladder without holding the ladder, it jumps twice (as if the user presses jump key twice) I want the player to only double if player holding the ladder and then presses jump. If the player just jumps through the ladder it should not jump twice.

Below is the player script:

[SerializeField]  [SerializeField] float _xSpeed = 1f; [SerializeField] float _ySpeed = 1f; [SerializeField] float _jumpForce = 1f; [SerializeField] float _distance; [SerializeField] LayerMask _ladderLayer; private float _horizontalMovement; private float _verticalMovement; private Rigidbody2D _rb; private bool _isClimbing; private bool _isMovingHorizontal = true;    [SerializeField] Transform _groundPos; [SerializeField] float _checkRadius; [SerializeField] LayerMask _groundLayer; private bool _isGrounded;    // ExtraJump private int _extraJumps; [SerializeField] int _extraJumpValue = 1;   void Start() {     _rb = GetComponent<Rigidbody2D>();     _extraJumps = _extraJumpValue;  }   void Update() {     _horizontalMovement = Input.GetAxis("Horizontal");     _verticalMovement = Input.GetAxis("Vertical");        }   void FixedUpdate() {     if(_isMovingHorizontal)     {         _rb.velocity = new Vector2( _horizontalMovement * _xSpeed,_rb.velocity.y);     }            _isGrounded = Physics2D.OverlapCircle(_groundPos.position,_checkRadius,_groundLayer);       if(_isGrounded == true)     {         _extraJumps = _extraJumpValue;              }     if(Input.GetKeyDown(KeyCode.Space) && _extraJumps > 0)     {         _rb.velocity = new Vector2(_rb.velocity.x,_jumpForce);         _extraJumps --;              }     else if(Input.GetKeyDown(KeyCode.Space) && _extraJumps == 0 && _isGrounded == true)     {                  _rb.velocity = new Vector2(_rb.velocity.x,_jumpForce);     }       RaycastHit2D hitLadder = Physics2D.Raycast(transform.position,Vector2.up,_distance,_ladderLayer);      if(hitLadder.collider == true)      {         if(Input.GetKey(KeyCode.W))         {             _isClimbing = true;             _rb.gravityScale = 0;         }         else         if(Input.GetKey(KeyCode.Space))          {             _isClimbing = false;                              }            }                     if(_isClimbing == true && hitLadder.collider == true)     {                     _rb.velocity = new Vector2(_rb.velocity.x,_verticalMovement *          _ySpeed);              }     else     {         _rb.gravityScale = 1;     }       } 

How to colour blend between two materials?

I’ve been trying to get a material (with an image texture) to animate towards a colour mix with transparency, but either I’m attempting something the wrong way, or unable to find the right keyword to find about similar examples.

What I’m trying to accomplish is to make a 3D character blush; I have the face area on it’s own material, with textures set, but that’s where my problem is. If it was animating between two colours, I would simply use Color.Lerp, but since textures are included, that makes me think I have to somehow have a colour mix option for this.

I’ve also tried to just use the built-in animator to see if Unity would blend between the colours of two different materials, but after all it turned out to be an instant change on the new keyframe; which leaves me to think so far my only exit could be to manually create several keyframes, and create several materials to get it close to acceptable.

Although my knowledge about it is close to zero, is this after all only possible with a custom fragment shader? I don’t know if there’s a suitable package, or built-in Unity feature that makes something like this possible, or if at all I’m approaching this problem the right way…

Update: This is the shader I’m using right now, from 魔王. It might be a little confusing since inputs are all abbreviated, so here are some example files that are being used.


Godot Engine: Why is baking light making my scene darker?

Apparently I don’t understand real time lighting. I have an interior scene with some windows. The light is from an OmniLight near the ceiling, default environmental light from outside, and a desk lamp with emission. Without baking light, the scene looks like this:

enter image description here

Consistent with the documentation for emission, the desk lamp is not affecting the surrounding objects. I want to bake the light to see the lamp’s effect and to support low end hardware.

I followed the baked lightmaps tutorial and set the BakedLightmap’s extents to encompass the entire room. After baking, the scene looks like this:

enter image description here

I can see the lamp’s light, as expected, but now everything is too dark. It is unclear to me from Godot’s documentation if scenes include indirect light without baking. I have tried this with the OmniLight set to bake "all" and only indirect light.

What am I missing here?

How do I stop gyroscope-controlled camera from jittering when holding phone still?

I have here a simplified version of my gyro-controlled camera with a sensitivity modification (a side effect of increasing sensitivity is that the jitteriness is exacerbated).

public class GyroControl : MonoBehaviour{  private Transform _rawGyroRotation; Vector3 gyroAdjust; [SerializeField] private float _smoothing = 0.1f;  void Start() {     Input.gyro.enabled = true;     Application.targetFrameRate = 60;      _rawGyroRotation = new GameObject("GyroRaw").transform;     _rawGyroRotation.position = transform.position;     _rawGyroRotation.rotation = transform.rotation;  }  private void Update() {     _rawGyroRotation.rotation = Input.gyro.attitude;      gyroAdjust = _rawGyroRotation.rotation.eulerAngles * 2; //increase rotation sensitivity     transform.rotation = Quaternion.Euler(gyroAdjust);      transform.rotation = Quaternion.Slerp(transform.rotation, _rawGyroRotation.rotation, _smoothing);  }} 

When in motion, the jittering isn’t noticeable. But when you hold the phone still, there’s what I assume to be just analogue noise that causes jittering. I would really appreciate any help or advice on how to add a filter or something to reduce the jittering for this kind of controller.