Unity Problem, my camera goes up at the start of the game - c#

I´ve followed a youtube tutorial https://www.youtube.com/watch?v=c1FYp1oOFIs&list=PLD_vBJjpCwJtrHIW1SS5_BNRk6KZJZ7_d&index=4
and the problem is when i start the game my camera moves up when it shouldnt.
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace BARR3TT
{
public class CameraHandler : MonoBehaviour
{
public Transform targetTransform;
public Transform cameraTransform;
public Transform cameraPivotTransform;
private Transform myTransform;
private Vector3 cameraTransformPosition;
private LayerMask ignoreLayers;
private Vector3 cameraFollowVelocity = Vector3.zero;
public static CameraHandler singleton;
public float lookSpeed = 0.1f;
public float followSpeed = 0.1f;
public float pivotSpeed = 0.03f;
private float targetPosition;
private float defaultPosition;
private float lookAngle;
private float pivotAngle;
public float minimumPivot = -35;
public float maximumPivot = 35;
private float cameraSphereRadius = 0.2f;
public float cameraColisionOffset = 0.2f;
public float minimumColisionOffset = 0.2f;
private void Awake()
{
singleton = this;
myTransform = transform;
defaultPosition = cameraTransform.localPosition.z;
ignoreLayers = ~(1 << 8 | 1 << 9 << 10);
}
public void FollowTarget(float delta)
{
Vector3 targetPosition = Vector3.SmoothDamp(myTransform.position, targetTransform.position, ref cameraFollowVelocity, delta / followSpeed);
myTransform.position = targetPosition;
HandleCameraCollision(delta);
}
public void HandleCameraRotation(float delta, float mouseXInput, float mouseYInput)
{
lookAngle += (mouseXInput * lookSpeed) / delta;
pivotAngle -= (mouseYInput * pivotSpeed) / delta;
pivotAngle = Mathf.Clamp(pivotAngle, minimumPivot, maximumPivot);
Vector3 rotation = Vector3.zero;
rotation.y = lookAngle;
Quaternion targetRotation = Quaternion.Euler(rotation);
myTransform.rotation = targetRotation;
rotation = Vector3.zero;
rotation.x = pivotAngle;
targetRotation = Quaternion.Euler(rotation);
cameraPivotTransform.localRotation = targetRotation;
}
private void HandleCameraCollision(float delta)
{
targetPosition = defaultPosition;
RaycastHit hit;
Vector3 direction = cameraTransform.position - cameraPivotTransform.position;
direction.Normalize();
if(Physics.SphereCast(cameraPivotTransform.position, cameraSphereRadius, direction, out hit ,Mathf.Abs(targetPosition)))
{
float dis = Vector3.Distance(cameraPivotTransform.position, hit.point);
targetPosition = -(dis - cameraColisionOffset);
}
if(Mathf.Abs(targetPosition)<minimumColisionOffset)
{
targetPosition = -minimumColisionOffset;
}
cameraTransformPosition.z = Mathf.Lerp(cameraTransform.localPosition.z, targetPosition, delta / 0.2f);
cameraTransform.localPosition = cameraTransformPosition;
}
}
}
Ive checked every line and i dont see any mistakes the transforms are where they should be, i can kinda fix it adding some rotation in the x axis on the main camera but its still weird

You need to check if your camera has been attached to any script or if it is a child inside some objects. I think it is where the error comes from.
If the Camera Holder or Camera Pivot changes its direction so the Main Camera will be changed too.
Your code is very complicated and it doesn't have any comments. So that is why people when seeing your code are lazy to see, analyze and answer for you.
If you just begin with coding and I recommend that you need to build a good foundation first and then go to some difficult tutorials like those you followed. Don't follow step-by-step tutorials.

Related

Attempting to Add CameraCollisionHandler to my 3rd person controller (Unity)

i'm new to the world of Game Design and Unity and while following a tutorial i found i ran into this issue where my camera does not stay at the proper height and drops down the y coordinate of 0. My guess is it has something to do with when i set its position im overwriting the old one (Including the Y Coordinate). All feedback is greatly appreciated! My code is below...
using UnityEngine;
public class CameraManager : MonoBehaviour
{
InputManager inputManager;
public Transform targetTransform; //Target Object to be Followed...
public Transform cameraTransform; //Transform of the camera object.
public Transform cameraPivot; //Object used to pivot.
private float defaultPosition;
private Vector3 cameraFollowVelocity = Vector3.zero;
private Vector3 cameraVectorPosition;
public LayerMask collisionLayers; //Layers Camera can collide with.
public float cameraCollisionRadius = 0.2f;
public float cameraCollisionOffset = 0.2f; //Offset camera should have when colliding with the env...
public float minCollisionOffSet = 0.2f;
public float cameraFollowSpeed = 0.2f;
public float cameraLookSpeed = 2;
public float cameraPivotSpeed = 2;
public float lookAngle; // Left + Right
public float pivotAngle; // Up + Down
public float minPivotAngle = -35;
public float maxPivotAngle = 35;
private void Awake()
{
inputManager = FindObjectOfType<InputManager>();
targetTransform = FindObjectOfType<PlayerManager>().transform;
cameraTransform = Camera.main.transform;
defaultPosition = cameraTransform.localPosition.z;
}
private void FollowTarget() {
Vector3 targetPos = Vector3.SmoothDamp
(transform.position, targetTransform.position, ref cameraFollowVelocity, cameraFollowSpeed);
transform.position = targetPos;
}
private void RotateCamera() {
Vector3 rotation;
Quaternion targetRotation;
lookAngle = lookAngle + (inputManager.cameraInputX * cameraLookSpeed);
pivotAngle = pivotAngle - (inputManager.cameraInputY * cameraPivotSpeed);
pivotAngle = Mathf.Clamp(pivotAngle, minPivotAngle, maxPivotAngle);
rotation = Vector3.zero;
rotation.y = lookAngle;
targetRotation = Quaternion.Euler(rotation);
transform.rotation = targetRotation;
rotation = Vector3.zero;
rotation.x = pivotAngle;
targetRotation = Quaternion.Euler(rotation);
cameraPivot.localRotation = targetRotation;
}
private void HandleCameraCollision()
{
float targetPosition = defaultPosition;
RaycastHit hit;
Vector3 direction = cameraTransform.position - cameraPivot.position;
direction.Normalize();
if (Physics.SphereCast
(cameraPivot.transform.position, cameraCollisionRadius, direction, out hit, Mathf.Abs(targetPosition), collisionLayers))
{
float distance = Vector3.Distance(cameraPivot.position, hit.point);
targetPosition = -(distance - cameraCollisionOffset);
}
if (Mathf.Abs(targetPosition) < minCollisionOffSet)
{
targetPosition = targetPosition - minCollisionOffSet;
}
cameraVectorPosition.z = Mathf.Lerp(cameraTransform.localPosition.z, targetPosition, 0.2f);
cameraTransform.localPosition = cameraVectorPosition;
//cameraTransform.localPosition.Set(cameraVectorPosition);
}
public void HandleAllCameraMovement() {
FollowTarget();
RotateCamera();
HandleCameraCollision();
}
}

rb.AddForce() is launching my player in the opposite direction of the vector 2 value I assigned

Im making a game where you click and drag with the mouse then release to launch the player. But sometimes the player gets launched in the opposite direction of where it should go. I made a debug output to show you the different values. Here is the output
In that image for example you can see that the Vector2 of force * power is positive on the y axis, but the player launched downwards, and the same happens Viceversa. I think its also worth to note that this happens inconsistantly for some reason. Here is my code:
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class Movement : MonoBehaviour
{
public GameObject player;
public float power = 10f;
public Rigidbody2D rb;
public float maxSpeed;
public Vector2 minPower;
public Vector2 maxPower;
TragectoryLine tl;
Camera cam;
public Vector2 force;
public Vector3 startPoint;
public Vector3 endPoint;
public Vector3 currentPoint;
public Vector3 startPointMouse;
public bool isPulling = false;
float distance;
private void Start()
{
cam = Camera.main;
tl = GetComponent<TragectoryLine>();
}
private void Update()
{
if (Input.GetMouseButtonDown(0))
{
startPointMouse = cam.ScreenToWorldPoint(Input.mousePosition);
startPointMouse.z = 15;
}
if (Input.GetMouseButton(0))
{
startPoint = player.transform.position;
startPoint.z = 15;
isPulling = true;
Vector3 currentPoint = cam.ScreenToWorldPoint(Input.mousePosition);
currentPoint.z = 15;
tl.RenderLine(startPoint, currentPoint);
}
if (Input.GetMouseButtonUp(0))
{
endPoint = cam.ScreenToWorldPoint(Input.mousePosition);
endPoint.z = 15;
isPulling = false;
tl.EndLine();
distance = startPointMouse.magnitude - endPoint.magnitude;
if (distance < 0)
{
distance = -distance;
}
if (distance >= 1)
{
rb.AddForce(force * power, ForceMode2D.Impulse);
}
force = new Vector2(Mathf.Clamp(startPoint.x - endPoint.x, minPower.x, maxPower.x), Mathf.Clamp(startPoint.y - endPoint.y, minPower.y, maxPower.y));
Debug.Log("distance" + distance);
Debug.Log("start" + startPoint);
Debug.Log("end" + endPoint);
Debug.Log("force" +force);
Debug.Log("force * power" + force * power);
}
}
private void FixedUpdate()
{
rb.velocity = Vector3.ClampMagnitude(rb.velocity, maxSpeed);
}
}
Here I added the force using rb.AddForce(force * power, ForceMode2D.Impulse); when the force * power value was positive on the y axis. So why did it go to the opposite direction???
This was working perfectly fine before i tried implementing a feature where the player has to move the mouse a certain distance or else it wont launch. I have tried removing it but it doesnt seem to make a difference. I think I changed something in the code that ruined it but I cant figure out what! Please help!

Configurable Joint: yDrive mode - brackeys multiplayer fps

As you may know, the mode for the configurable spring has been removed, and I've discovered this following a Brackeys tutorial on multiplayer fps (part 5). There was a comment under the video saying that you should ignore this as Brackeys sets it manually in the code later. However, when I did this, there was no errors, but the jump button did nothing (yes I checked the inputs)
My code is below:
using UnityEngine;
[RequireComponent(typeof(PlayerMotor))]
[RequireComponent(typeof(ConfigurableJoint))]
public class PlayerController : MonoBehaviour
{
private Vector3 _movHorizontal;
private Vector3 _movVertical;
private Vector3 _velocity;
[SerializeField]
private float speed = 5f;
[SerializeField]
private float lookSensitivity = 5f;
[SerializeField]
private float thrusterForce = 1000f;
[Header("Spring Settings:")]
[SerializeField]
private JointDriveMode jointMode = JointDriveMode.Position;
[SerializeField]
private float jointSpring = 20f;
[SerializeField]
private float joinMaxForce = 40f;
private PlayerMotor motor;
private ConfigurableJoint joint;
void Start ()
{
motor = GetComponent<PlayerMotor>();
joint = GetComponent<ConfigurableJoint>();
SetJointSettings(jointSpring);
}
void Update ()
{
//Calc movement velocity as Vector 3D
float _xMov = Input.GetAxisRaw("Horizontal");
float _zMov = Input.GetAxisRaw("Vertical");
_movHorizontal = transform.right * _xMov;
_movVertical = transform.forward * _zMov;
//Final movement vector
_velocity = (_movHorizontal + _movVertical).normalized * speed;
//Apply movement
motor.Move(_velocity);
//Calculate rotation as a 3d vector (turning around)
float _yrot = Input.GetAxisRaw("Mouse X");
Vector3 _rotation = new Vector3 (0f, _yrot, 0f) * lookSensitivity;
//Apply rotation
motor.Rotate(_rotation);
//Calculate rotation as a 3d vector (turning around)
float _xrot = Input.GetAxisRaw("Mouse Y");
Vector3 _cameraRotation = new Vector3 (_xrot, 0f, 0f) * lookSensitivity;
//Apply rotation
motor.RotateCamera(_cameraRotation);
//Calculate thruster force based on player input
Vector3 _thrusterForce = new Vector3 (0, 0, 0);
if (Input.GetButton("Jump"))
{
_thrusterForce = Vector3.up * thrusterForce;
SetJointSettings(0f);
} else
{
SetJointSettings(jointSpring);
}
//Apply thruster force
motor.ApplyThruster(_thrusterForce);
}
private void SetJointSettings(float _jointSpring)
{
joint.yDrive = new JointDrive
{
mode = jointMode,
positionSpring = _jointSpring,
maximumForce = joinMaxForce
};
}
}

Clamping rotation/quaternion

still working on VR interactions, I want to be able to rotate objects but I'm facing an issue.
For instance, I want to open/close the upper part of a laptop using my hands in VR. What I'm doing to achieve this, is that I placed the forward like that :
I'm creating a plane using position, forward, up. Then get the closest point on plane corresponding to my VR controller, then use transform.LookAt.
This is working fine, but I want to be able to clamp the rotation, so I cannot rotate too much (see the end of the video).
I've been trying everything, using eulersAngle and Quaternion, but I'm unable to do it.
I made some helpers (the text to show the localEulerAngles, and a transform to LookAt so I don't have to use the VR headset as it's getting pretty tedious)
Here is a video showing what's going on : https://www.youtube.com/watch?v=UfN97OpYElk
And here's my code :
using UnityEngine;
public class JVRLookAtRotation : MonoBehaviour, IJVRControllerInteract
{
[SerializeField] private Transform toRotate;
[SerializeField] private Vector3 minRotation;
[SerializeField] private Vector3 maxRotation;
[Header("Rotation contraints")]
[SerializeField] private bool lockX;
[SerializeField] private bool lockY;
[SerializeField] private bool lockZ;
private JVRController _jvrController;
private bool _isGrabbed;
private Vector3 _targetPosition;
private Vector3 _tmp;
public Transform followTransform;
private void LateUpdate()
{
/*
if (!_isGrabbed) return;
if (_jvrController.Grip + _jvrController.Trigger < Rules.GrabbingThreshold)
{
_isGrabbed = false;
_jvrController.StopGrabbing();
_jvrController = null;
return;
}
*/
Vector3 up = toRotate.up;
Vector3 forward = toRotate.forward;
Vector3 pos0 = toRotate.position;
Vector3 pos1 = pos0 + up;
Vector3 pos2 = pos0 + forward;
Plane p = new Plane(pos0, pos1, pos2);
// Using followTransform just to no have to use VR, otherwise it's the controller pos
_targetPosition = p.ClosestPointOnPlane(followTransform.position);
toRotate.LookAt(_targetPosition, up);
/*
_tmp = toRotate.localEulerAngles;
_tmp.x = Mathf.Clamp(WrapAngle(_tmp.x), minRotation.x, maxRotation.x);
_tmp.y = WrapAngle(_tmp.y);
_tmp.z = WrapAngle(_tmp.z);
toRotate.localRotation = Quaternion.Euler(_tmp);
*/
}
public void JVRControllerInteract(JVRController jvrController)
{
if (_isGrabbed) return;
if (!(jvrController.Grip + jvrController.Trigger > Rules.GrabbingThreshold)) return;
_jvrController = jvrController;
_jvrController.SetGrabbedObject(this);
_isGrabbed = true;
}
private static float WrapAngle(float angle)
{
angle%=360;
if(angle >180)
return angle - 360;
return angle;
}
private static float UnwrapAngle(float angle)
{
if(angle >=0)
return angle;
angle = -angle%360;
return 360-angle;
}
}
Suppose the monitor's parent transform is the body/keyboard of the laptop. Local axes of the parent shown below:
To describe the range of motion you can define a "center of rotation" vector (e.g., grey vector labeled C) that is local to the parent and an angle (e.g., 110 degrees, between each purple vector and the grey vector). For instance:
[SerializeField] private Vector3 LocalRotationRangeCenter = new Vector3(0f, 0.94f, 0.342f);
[SerializeField] private float RotationRangeExtent = 110f;
Then, you can take the forward vector it "wants" to go, and find the signed angle between the world direction of RotationRangeCenter and that point, then clamp it to ±RotationRangeExtent:
Vector3 worldRotationRangeCenter = toRotate.parent.TransformDirection(RotationRangeCenter);
Vector3 targetForward = _targetPosition - toRotate.position;
float targetAngle = Vector3.SignedAngle(worldRotationRangeCenter, targetForward,
toRotate.right);
float clampedAngle = Mathf.Clamp(targetAngle, -RotationRangeExtent, RotationRangeExtent);
Then, find the direction that corresponds to that angle. Finally, rotate the monitor so that its forward aligns with the clamped forward and its right doesn't change. You can use a cross product to find what the monitor's up would be, then use Quaternion.LookRotation to find the corresponding rotation:
Vector3 clampedForward = Quaternion.AngleAxis(clampedAngle, toRotate.right)
* worldRotationRangeCenter;
toRotate.rotation = Quaternion.LookRotation(clampedForward,
Vector3.Cross(clampedForward, toRotate.right));
If someone tries to drag the monitor too far beyond the "boundaries" it will teleport from one limit to the other. If that's not desired behavior, you might consider interpolating from SignedAngle(worldRotationRangecenter, targetForward, toRotate.right) to clampedAngle, for a movement between the limits:
private float angleChangeLimit = 90f; // max angular speed
// ...
Vector3 worldRotationRangeCenter = toRotate.parent.TransformDirection(RotationRangeCenter);
Vector3 targetForward = _targetPosition - toRotate.position;
float targetAngle = Vector3.SignedAngle(worldRotationRangeCenter, targetForward,
toRotate.right);
float clampedAngle = Mathf.Clamp(targetAngle, -RotationRangeExtent, RotationRangeExtent);
float currentAngle = Vector3.SignedAngle(worldRotationRangeCenter, toRotate.forward,
toRotate.right);
clampedAngle = Mathf.MoveTowards(currentAngle, clampedAngle,
angleChangeLimit * Time.deltaTime);
Vector3 clampedForward = Quaternion.AngleAxis(clampedAngle, toRotate.right)
* worldRotationRangeCenter;
toRotate.rotation = Quaternion.LookRotation(clampedForward,
Vector3.Cross(clampedForward, toRotate.right));
#Ruzihm's answer worked with just a bit of tweaking ! I couldn't have got that myself honestly.
Here is the full code updated for VR, if anyone is interested :
using UnityEngine;
public class JVRLookAtRotation : MonoBehaviour, IJVRControllerInteract
{
[SerializeField] private Transform toRotate;
[SerializeField] private Vector3 minRotationDelta;
[SerializeField] private Vector3 maxRotationDelta;
private JVRController _jvrController;
private bool _isGrabbed;
private Vector3 _targetPosition;
// No clue where does this come from
private Vector3 _localRotationRangeCenter = new Vector3(0, 0.999f, 0.044f);
private void LateUpdate()
{
if (!_isGrabbed) return;
if (_jvrController.Grip + _jvrController.Trigger < Rules.GrabbingThreshold)
{
_isGrabbed = false;
_jvrController.StopGrabbing();
_jvrController = null;
return;
}
Vector3 up = toRotate.up;
Vector3 forward = toRotate.forward;
Vector3 right = toRotate.right;
Vector3 rotatePosition = toRotate.position;
Vector3 pos1 = rotatePosition + up;
Vector3 pos2 = rotatePosition + forward;
Plane p = new Plane(rotatePosition, pos1, pos2);
_targetPosition = p.ClosestPointOnPlane(_jvrController.CurrentPositionWorld);
Vector3 worldRotationRangeCenter = toRotate.parent.TransformDirection(_localRotationRangeCenter);
Vector3 targetForward = _targetPosition - rotatePosition;
float targetAngle = Vector3.SignedAngle(worldRotationRangeCenter, targetForward, right);
float clampedAngle = Mathf.Clamp(targetAngle, minRotationDelta.x, maxRotationDelta.x);
Vector3 clampedForward = Quaternion.AngleAxis(clampedAngle, right) * worldRotationRangeCenter;
toRotate.rotation = Quaternion.LookRotation(clampedForward, Vector3.Cross(clampedForward, right));
}
public void JVRControllerInteract(JVRController jvrController)
{
if (_isGrabbed) return;
if (!(jvrController.Grip + jvrController.Trigger > Rules.GrabbingThreshold)) return;
_jvrController = jvrController;
_jvrController.SetGrabbedObject(this);
_isGrabbed = true;
}
}

How to completely prevent the player from going offscreen in Unity?

For the entire day, I been trying to find a good solution to completely stop the player from going offscreen without hard coding.
I have this script called player controller and all it does so far is allow the player to move along the x-axis. It also has an additional function that clamps the player's movement in the x-axis. Here it is.
using UnityEngine;
public class PlayerController : MonoBehaviour
{
public const float MAX_SPEED = 5.0f;
// Update is called once per frame
void Update()
{
transform.Translate(Input.GetAxis("Horizontal") * MAX_SPEED * Time.deltaTime, 0.0f, 0.0f);
clampPlayerMovement();
}
void clampPlayerMovement()
{
Vector3 pos = Camera.main.WorldToViewportPoint(transform.position);
pos.x = Mathf.Clamp01(pos.x);
transform.position = Camera.main.ViewportToWorldPoint(pos);
}
}
The problem with this script is that it doesn't completely stops the player from going offscreen(half of the player's body still goes offscreen).
So this is what I tried next.
using UnityEngine;
public class PlayerController : MonoBehaviour
{
public const float MAX_SPEED = 5.0f;
private float xLeft;
private float xRight;
void Start()
{
float pivotX = GetComponent<SpriteRenderer>().sprite.pivot.x;
float pixelsPerunit = GetComponent<SpriteRenderer>().sprite.pixelsPerUnit;
float textureWidth = GetComponent<SpriteRenderer>().sprite.texture.width;
//Units on the left from the sprite's pivot.
xLeft = pivotX / pixelsPerunit;
//Units on the right from the sprite's pivot.
xRight = (textureWidth - pivotX) / pixelsPerunit;
}
// Update is called once per frame
void Update()
{
transform.Translate(Input.GetAxis("Horizontal") * MAX_SPEED * Time.deltaTime, 0.0f, 0.0f);
clampPlayersMovement();
}
void clampPlayersMovement()
{
Vector3 pos = transform.position;
Vector3 posMin = transform.position;
Vector3 posMax = transform.position;
posMin.x = posMin.x - xLeft;
posMax.x = posMax.x + xRight;
pos = Camera.main.WorldToViewportPoint(pos);
posMin = Camera.main.WorldToViewportPoint(posMin);
posMax = Camera.main.WorldToViewportPoint(posMax);
pos.x = Mathf.Clamp(pos.x, posMin.x, posMax.x);
transform.position = Camera.main.ViewportToWorldPoint(pos);
}
}
Unfortunately, this code is no good. In fact, it is even worse because it does not stop the player from going offscreen at all.
So at this point I'm stuck between a rock and a hard place. Any suggestions would be vastly appreciated.
After long searching I finally found an answer.
using UnityEngine;
public class PlayerController : MonoBehaviour
{
public const float MAX_SPEED = 5.0f;
private float halfPlayerSizeX;
void Start()
{
halfPlayerSizeX = GetComponent<SpriteRenderer>().bounds.size.x / 2;
}
// Update is called once per frame
void Update()
{
transform.Translate(Input.GetAxis("Horizontal") * MAX_SPEED * Time.deltaTime, 0.0f, 0.0f);
clampPlayerMovement();
}
void clampPlayerMovement()
{
Vector3 position = transform.position;
float distance = transform.position.z - Camera.main.transform.position.z;
float leftBorder = Camera.main.ViewportToWorldPoint(new Vector3(0, 0, distance)).x + halfPlayerSizeX;
float rightBorder = Camera.main.ViewportToWorldPoint(new Vector3(1, 0, distance)).x - halfPlayerSizeX;
position.x = Mathf.Clamp(position.x, leftBorder, rightBorder);
transform.position = position;
}
}
The only thing that I don't get is why do I need to subtract the z position from both the gameobject and the camera? why not the x position?

Categories

Resources